diff --git a/.github/workflows/build_push_dev.yml b/.github/workflows/build_push_dev.yml index 1edfd76ea..20e8fcd69 100644 --- a/.github/workflows/build_push_dev.yml +++ b/.github/workflows/build_push_dev.yml @@ -7,33 +7,40 @@ permissions: read-all jobs: docker_backend_dev: runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write steps: - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Set up QEMU - uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - - name: Login to Docker Hub - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + name: Login to GitHub Container Registry + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Set current date as env variable run: echo "CREATED=$(date +'%Y-%m-%dT%H:%M:%S')" >> $GITHUB_ENV - name: Build and push backend - uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 with: context: . file: ./docker/backend/prod/django/Dockerfile + platforms: linux/amd64,linux/arm64 push: true - tags: maibornwolff/secobserve-backend:dev + tags: ghcr.io/secobserve/secobserve-backend:dev build-args: | CREATED=${{ env.CREATED }} REVISION=${{ github.sha }} @@ -41,48 +48,41 @@ jobs: docker_frontend_dev: runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write steps: - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Set up QEMU - uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - - name: Login to Docker Hub - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + name: Login to GitHub Container Registry + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Set current date as env variable run: echo "CREATED=$(date +'%Y-%m-%dT%H:%M:%S')" >> $GITHUB_ENV - name: Build and push frontend - uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 with: context: . file: ./docker/frontend/prod/Dockerfile + platforms: linux/amd64,linux/arm64 push: true - tags: maibornwolff/secobserve-frontend:dev + tags: ghcr.io/secobserve/secobserve-frontend:dev build-args: | CREATED=${{ env.CREATED }} REVISION=${{ github.sha }} VERSION=dev - - sca_dev: - runs-on: ubuntu-latest - needs: [docker_backend_dev, docker_frontend_dev] - steps: - - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - - name: Run SCA vulnerability scanners - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_configuration: 'so_configuration_sca_dev.yml' - SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} diff --git a/.github/workflows/build_push_release.yml b/.github/workflows/build_push_release.yml index 217018b53..bd663526d 100644 --- a/.github/workflows/build_push_release.yml +++ b/.github/workflows/build_push_release.yml @@ -13,37 +13,44 @@ permissions: read-all jobs: docker_backend_release: runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write steps: - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: 'v${{ github.event.inputs.release }}' - name: Set up QEMU - uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - - name: Login to Docker Hub - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + name: Login to GitHub Container Registry + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Set current date as env variable run: echo "CREATED=$(date +'%Y-%m-%dT%H:%M:%S')" >> $GITHUB_ENV - name: Build and push backend - uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 with: context: . file: ./docker/backend/prod/django/Dockerfile + platforms: linux/amd64,linux/arm64 push: true tags: | - maibornwolff/secobserve-backend:${{ github.event.inputs.release }} - maibornwolff/secobserve-backend:latest + ghcr.io/secobserve/secobserve-backend:${{ github.event.inputs.release }} + ghcr.io/secobserve/secobserve-backend:latest build-args: | CREATED=${{ env.CREATED }} REVISION=${{ github.sha }} @@ -51,84 +58,70 @@ jobs: docker_frontend_release: runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write steps: - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: 'v${{ github.event.inputs.release }}' - name: Set up QEMU - uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - - name: Login to Docker Hub - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + name: Login to GitHub Container Registry + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Set current date as env variable run: echo "CREATED=$(date +'%Y-%m-%dT%H:%M:%S')" >> $GITHUB_ENV - name: Build and push frontend - uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 with: context: . file: ./docker/frontend/prod/Dockerfile + platforms: linux/amd64,linux/arm64 push: true tags: | - maibornwolff/secobserve-frontend:${{ github.event.inputs.release }} - maibornwolff/secobserve-frontend:latest + ghcr.io/secobserve/secobserve-frontend:${{ github.event.inputs.release }} + ghcr.io/secobserve/secobserve-frontend:latest build-args: | CREATED=${{ env.CREATED }} REVISION=${{ github.sha }} VERSION=${{ github.event.inputs.release }} - vulnerability_scans_release: - runs-on: ubuntu-latest - needs: [docker_backend_release, docker_frontend_release] - steps: - - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - ref: 'v${{ github.event.inputs.release }}' - - - name: Run vulnerability scanners for images - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_configuration: 'so_configuration_sca_current.yml' - SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} - - - name: Run vulnerability scanners for endpoints - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_configuration: 'so_configuration_endpoints.yml' - SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} - generate_sboms: runs-on: ubuntu-latest needs: [docker_backend_release, docker_frontend_release] permissions: contents: write steps: - - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 with: - node-version: 20 + node-version: 24 - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: 'v${{ github.event.inputs.release }}' - name: Install programs env: - CDXGEN_VERSION: 10.9.4 - SBOM_UTILITY_VERSION: 0.16.0 - CYCLONE_DX_CLI_VERSION: 0.25.1 + CDXGEN_VERSION: 11.9.0 + TRIVY_VERSION: 0.67.0 + SBOM_UTILITY_VERSION: 0.18.1 + CYCLONE_DX_CLI_VERSION: 0.29.1 run: | npm install -g @cyclonedx/cdxgen@"$CDXGEN_VERSION" cd /usr/local/bin @@ -136,6 +129,7 @@ jobs: wget --no-verbose https://github.com/CycloneDX/cyclonedx-cli/releases/download/v"$CYCLONE_DX_CLI_VERSION"/cyclonedx-linux-x64 cp cyclonedx-linux-x64 /usr/local/bin/cyclonedx chmod +x /usr/local/bin/cyclonedx + curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b /usr/local/bin v"$TRIVY_VERSION" - name: Generate SBOM for backend application env: @@ -145,7 +139,7 @@ jobs: run: | mv ../backend/poetry_requirements.txt ../backend/poetry_requirements.sic sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_backend_application.json - cdxgen ../backend --type python --required-only --profile license-compliance --no-auto-compositions --output sbom_backend_application.json + cdxgen ../backend --type python --required-only --profile license-compliance --no-auto-compositions --output sbom_backend_application.json --spec-version 1.6 sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles --quiet --input-file sbom_backend_application.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_backend_application.json --quiet --input-file - --output-file sbom_backend_application_"$VERSION".json @@ -159,7 +153,7 @@ jobs: working-directory: ./sbom run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_frontend_application.json - cdxgen ../frontend --type npm --no-babel --required-only --profile license-compliance --no-auto-compositions --project-name secobserve --output sbom_frontend_application.json + cdxgen ../frontend --type npm --no-babel --required-only --profile license-compliance --no-auto-compositions --project-name secobserve --output sbom_frontend_application.json --spec-version 1.6 sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles --quiet --input-file sbom_frontend_application.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_frontend_application.json --quiet --input-file - --output-file sbom_frontend_application_"$VERSION".json @@ -172,7 +166,7 @@ jobs: working-directory: ./sbom run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_backend_container.json - cdxgen maibornwolff/secobserve-backend:$VERSION --type container --exclude-type python --exclude-type ruby --profile license-compliance --no-auto-compositions --output sbom_backend_container.json + trivy image --scanners license --pkg-types os --format cyclonedx --output sbom_backend_container.json ghcr.io/secobserve/secobserve-backend:$VERSION sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles,services --quiet --input-file sbom_backend_container.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_backend_container.json --quiet --input-file - --output-file sbom_backend_container_"$VERSION".json @@ -185,7 +179,7 @@ jobs: working-directory: ./sbom run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_frontend_container.json - cdxgen maibornwolff/secobserve-frontend:$VERSION --type container --exclude-type npm --exclude-type ruby --profile license-compliance --no-auto-compositions --output sbom_frontend_container.json + trivy image --scanners license --pkg-types os --format cyclonedx --output sbom_frontend_container.json ghcr.io/secobserve/secobserve-frontend:$VERSION sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles,services --quiet --input-file sbom_frontend_container.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_frontend_container.json --quiet --input-file - --output-file sbom_frontend_container_"$VERSION".json @@ -199,15 +193,108 @@ jobs: run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_complete.json cyclonedx merge --hierarchical --name "SecObserve" --version "$VERSION" --input-files sbom_backend_application_"$VERSION".json sbom_frontend_application_"$VERSION".json sbom_backend_container_"$VERSION".json sbom_frontend_container_"$VERSION".json --output-format json \ + | sbom-utility trim --keys=declarations,definitions --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_complete.json --quiet --input-file - --output-file sbom_"$VERSION".json sbom-utility validate --input-file sbom_"$VERSION".json - - + - name: Commit SBOMs - uses: stefanzweifel/git-auto-commit-action@e348103e9026cc0eee72ae06630dbe30c8bf7a79 # v5 + uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7 with: skip_fetch: true create_branch: true commit_message: "chore: generate SBOMs for release ${{ github.event.inputs.release }}" branch: "chore/sboms_release_${{ github.event.inputs.release }}" file_pattern: "sbom/sbom*.json" + # - + # name: Merge SBOM branch into main and delete branch + # uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + # env: + # VERSION: ${{ github.event.inputs.release }} + # with: + # github-token: ${{ secrets.GITHUB_TOKEN }} + # script: | + # const sbomBranch = `chore/sboms_release_${process.env.VERSION}`; + # const targetBranch = 'main'; + + # console.log(`Merging branch ${sbomBranch} into ${targetBranch}`); + + # try { + # // Merge the SBOM branch into main + # await github.rest.repos.merge({ + # owner: context.repo.owner, + # repo: context.repo.repo, + # base: targetBranch, + # head: sbomBranch, + # commit_message: `chore: merge SBOM files for release ${process.env.VERSION}` + # }); + + # console.log(`Successfully merged ${sbomBranch} into ${targetBranch}`); + + # // Delete the SBOM branch after successful merge + # console.log(`Deleting branch ${sbomBranch}`); + # await github.rest.git.deleteRef({ + # owner: context.repo.owner, + # repo: context.repo.repo, + # ref: `heads/${sbomBranch}` + # }); + + # console.log(`Successfully deleted branch ${sbomBranch}`); + # } catch (error) { + # console.error(`Error during merge or branch deletion: ${error.message}`); + # core.setFailed(error.message); + # } + - + name: Add SBOMs to GitHub Release + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + VERSION: ${{ github.event.inputs.release }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const fs = require('fs'); + const path = require('path'); + const version = process.env.VERSION; + const releaseTag = `v${version}`; + + console.log(`Adding SBOMs to GitHub release ${releaseTag}`); + + try { + // Get the release by tag + const { data: release } = await github.rest.repos.getReleaseByTag({ + owner: context.repo.owner, + repo: context.repo.repo, + tag: releaseTag + }); + + // SBOM files to upload + const sbomFiles = [ + `sbom_backend_application_${version}.json`, + `sbom_frontend_application_${version}.json`, + `sbom_backend_container_${version}.json`, + `sbom_frontend_container_${version}.json`, + `sbom_${version}.json` + ]; + + // Upload each SBOM file to the release + for (const file of sbomFiles) { + const filePath = path.join('./sbom', file); + + console.log(`Uploading ${filePath} to release ${releaseTag}`); + + const fileContent = fs.readFileSync(filePath); + + await github.rest.repos.uploadReleaseAsset({ + owner: context.repo.owner, + repo: context.repo.repo, + release_id: release.id, + name: file, + data: fileContent + }); + + console.log(`Successfully uploaded ${file} to release ${releaseTag}`); + } + } catch (error) { + console.error(`Error adding SBOMs to release: ${error.message}`); + core.setFailed(error.message); + } diff --git a/.github/workflows/check_backend.yml b/.github/workflows/check_backend.yml index 28f5e3219..6fbfe2a05 100644 --- a/.github/workflows/check_backend.yml +++ b/.github/workflows/check_backend.yml @@ -1,10 +1,15 @@ name: Check backend -on: [push, pull_request] +on: + push: + paths-ignore: + - charts/** + pull_request: + paths-ignore: + - charts/** permissions: read-all - env: POETRY_NO_INTERACTION: 1 @@ -12,11 +17,11 @@ jobs: code_quality: runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - name: Set up Python 3.12 - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - name: Set up Python + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: - python-version: 3.12 + python-version: 3.14 - name: Install dependencies working-directory: ./backend @@ -58,8 +63,38 @@ jobs: unittests: runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Unittests run: | docker build -f docker/backend/unittests/django/Dockerfile -t secobserve_backend_unittests:latest . - docker run --rm --env-file docker/backend/unittests/envs/django --env-file docker/backend/unittests/envs/sqlite secobserve_backend_unittests:latest /start + docker run --rm \ + --volume ./backend:/home \ + --env-file docker/backend/unittests/envs/django \ + --env-file docker/backend/unittests/envs/sqlite \ + secobserve_backend_unittests:latest + - name: "Upload coverage report" + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: coverage-report + path: backend/coverage.xml + retention-days: 1 + + check_code_sonarqube_backend: + if: github.repository == 'SecObserve/SecObserve' && (github.ref == 'refs/heads/dev' || github.event_name == 'pull_request') + needs: [unittests] + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + - name: Download a single artifact + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: coverage-report + - name: Run SonarQube scan for backend + uses: SonarSource/sonarqube-scan-action@a31c9398be7ace6bbfaf30c0bd5d415f843d45e9 # v7.0.0 + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN_BACKEND }} + with: + projectBaseDir: backend diff --git a/.github/workflows/check_frontend.yml b/.github/workflows/check_frontend.yml index 4e2e350ba..ca4ca9196 100644 --- a/.github/workflows/check_frontend.yml +++ b/.github/workflows/check_frontend.yml @@ -1,6 +1,12 @@ name: Check frontend -on: [push, pull_request] +on: + push: + paths-ignore: + - charts/** + pull_request: + paths-ignore: + - charts/** permissions: read-all @@ -8,10 +14,10 @@ jobs: code_quality: runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 with: - node-version: 20 + node-version: 24 - name: Install dependencies working-directory: ./frontend @@ -31,7 +37,7 @@ jobs: end_to_end_tests: runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: End-to-end tests working-directory: . run: | @@ -40,3 +46,20 @@ jobs: cd .. docker compose -f docker-compose-playwright.yml build docker compose -f docker-compose-playwright.yml up --abort-on-container-exit --exit-code-from playwright + + check_code_sonarqube_frontend: + if: github.repository == 'SecObserve/SecObserve' && (github.ref == 'refs/heads/dev' || github.event_name == 'pull_request') + runs-on: ubuntu-latest + steps: + - + name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + - + name: Run SonarQube scan for frontend + uses: SonarSource/sonarqube-scan-action@a31c9398be7ace6bbfaf30c0bd5d415f843d45e9 # v7.0.0 + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN_FRONTEND }} + with: + projectBaseDir: frontend diff --git a/.github/workflows/check_licenses_dev.yml b/.github/workflows/check_licenses_dev.yml deleted file mode 100644 index eb87e37d4..000000000 --- a/.github/workflows/check_licenses_dev.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Check application licenses for dev - -on: - push: - branches: - - dev - -permissions: read-all - -jobs: - scan_licenses: - runs-on: ubuntu-latest - steps: - - - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 - with: - node-version: 20 - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - - name: Install programs - env: - CDXGEN_VERSION: 10.10.6 - run: | - npm install -g @cyclonedx/cdxgen@"$CDXGEN_VERSION" - - - name: Generate SBOM for backend application - env: - FETCH_LICENSE: 1 - run: | - cdxgen ./backend --type python --required-only --profile license-compliance --no-auto-compositions --output sbom_backend_application.json - - - name: Generate SBOM for frontend application - run: | - cdxgen ./frontend --type npm --no-babel --required-only --profile license-compliance --no-auto-compositions --project-name secobserve --output sbom_frontend_application.json - - - name: Import backend SBOM - uses: MaibornWolff/secobserve_actions_templates/actions/importer@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_product_name: 'SecObserve' - so_file_name: 'sbom_backend_application.json' - so_parser_name: 'CycloneDX' - so_branch_name: 'dev' - so_api_base_url: "https://secobserve-backend.maibornwolff.de" - so_api_token: ${{ secrets.SO_API_TOKEN }} - - - name: Import frontend SBOM - uses: MaibornWolff/secobserve_actions_templates/actions/importer@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_product_name: 'SecObserve' - so_file_name: 'sbom_frontend_application.json' - so_parser_name: 'CycloneDX' - so_branch_name: 'dev' - so_api_base_url: "https://secobserve-backend.maibornwolff.de" - so_api_token: ${{ secrets.SO_API_TOKEN }} diff --git a/.github/workflows/check_vulnerabilities.yml b/.github/workflows/check_vulnerabilities.yml deleted file mode 100644 index 1a9d23c05..000000000 --- a/.github/workflows/check_vulnerabilities.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Check for vulnerabilities in the code - -on: [push] - -permissions: read-all - -jobs: - check_code_vulnerabilities: - if: github.event.repository.url == 'https://github.com/MaibornWolff/SecObserve' - runs-on: ubuntu-latest - steps: - - - name: Checkout code - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - - name: Run vulnerability scanners for code - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_configuration: 'so_configuration_code.yml' - SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} diff --git a/.github/workflows/generate_sboms.yml b/.github/workflows/generate_sboms.yml index b235fc077..9fe610a99 100644 --- a/.github/workflows/generate_sboms.yml +++ b/.github/workflows/generate_sboms.yml @@ -16,20 +16,21 @@ jobs: permissions: contents: write steps: - - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 with: - node-version: 20 + node-version: 24 - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: 'v${{ github.event.inputs.release }}' - name: Install programs env: - CDXGEN_VERSION: 10.9.4 - SBOM_UTILITY_VERSION: 0.16.0 - CYCLONE_DX_CLI_VERSION: 0.25.1 + CDXGEN_VERSION: 11.9.0 + TRIVY_VERSION: 0.67.0 + SBOM_UTILITY_VERSION: 0.18.1 + CYCLONE_DX_CLI_VERSION: 0.29.1 run: | npm install -g @cyclonedx/cdxgen@"$CDXGEN_VERSION" cd /usr/local/bin @@ -37,6 +38,7 @@ jobs: wget --no-verbose https://github.com/CycloneDX/cyclonedx-cli/releases/download/v"$CYCLONE_DX_CLI_VERSION"/cyclonedx-linux-x64 cp cyclonedx-linux-x64 /usr/local/bin/cyclonedx chmod +x /usr/local/bin/cyclonedx + curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b /usr/local/bin v"$TRIVY_VERSION" - name: Generate SBOM for backend application env: @@ -46,7 +48,7 @@ jobs: run: | mv ../backend/poetry_requirements.txt ../backend/poetry_requirements.sic sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_backend_application.json - cdxgen ../backend --type python --required-only --profile license-compliance --no-auto-compositions --output sbom_backend_application.json + cdxgen ../backend --type python --required-only --profile license-compliance --no-auto-compositions --output sbom_backend_application.json --spec-version 1.6 sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles --quiet --input-file sbom_backend_application.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_backend_application.json --quiet --input-file - --output-file sbom_backend_application_"$VERSION".json @@ -60,7 +62,7 @@ jobs: working-directory: ./sbom run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_frontend_application.json - cdxgen ../frontend --type npm --no-babel --required-only --profile license-compliance --no-auto-compositions --project-name secobserve --output sbom_frontend_application.json + cdxgen ../frontend --type npm --no-babel --required-only --profile license-compliance --no-auto-compositions --project-name secobserve --output sbom_frontend_application.json --spec-version 1.6 sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles --quiet --input-file sbom_frontend_application.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_frontend_application.json --quiet --input-file - --output-file sbom_frontend_application_"$VERSION".json @@ -73,7 +75,7 @@ jobs: working-directory: ./sbom run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_backend_container.json - cdxgen maibornwolff/secobserve-backend:$VERSION --type container --exclude-type python --exclude-type ruby --profile license-compliance --no-auto-compositions --output sbom_backend_container.json + trivy image --scanners license --pkg-types os --format cyclonedx --output sbom_backend_container.json ghcr.io/secobserve/secobserve-backend:$VERSION sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles,services --quiet --input-file sbom_backend_container.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_backend_container.json --quiet --input-file - --output-file sbom_backend_container_"$VERSION".json @@ -86,7 +88,7 @@ jobs: working-directory: ./sbom run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_frontend_container.json - cdxgen maibornwolff/secobserve-frontend:$VERSION --type container --exclude-type npm --exclude-type ruby --profile license-compliance --no-auto-compositions --output sbom_frontend_container.json + trivy image --scanners license --pkg-types os --format cyclonedx --output sbom_frontend_container.json ghcr.io/secobserve/secobserve-frontend:$VERSION sbom-utility trim --keys=externalReferences,properties,evidence,authors,lifecycles,services --quiet --input-file sbom_frontend_container.json \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_frontend_container.json --quiet --input-file - --output-file sbom_frontend_container_"$VERSION".json @@ -100,12 +102,13 @@ jobs: run: | sed -i "s|REPLACE_VERSION|$VERSION|g" ./configuration/patch_complete.json cyclonedx merge --hierarchical --name "SecObserve" --version "$VERSION" --input-files sbom_backend_application_"$VERSION".json sbom_frontend_application_"$VERSION".json sbom_backend_container_"$VERSION".json sbom_frontend_container_"$VERSION".json --output-format json \ + | sbom-utility trim --keys=declarations,definitions --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_supplier.json --quiet --input-file - \ | sbom-utility patch --patch-file ./configuration/patch_complete.json --quiet --input-file - --output-file sbom_"$VERSION".json sbom-utility validate --input-file sbom_"$VERSION".json - name: Commit SBOMs - uses: stefanzweifel/git-auto-commit-action@e348103e9026cc0eee72ae06630dbe30c8bf7a79 # v5 + uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7 with: skip_fetch: true create_branch: true diff --git a/.github/workflows/publish_docs.yml b/.github/workflows/publish_docs.yml index 005ffe9b6..1ecd74ad4 100644 --- a/.github/workflows/publish_docs.yml +++ b/.github/workflows/publish_docs.yml @@ -4,7 +4,7 @@ on: push: branches: - main - - chore/documentation_check_security_gate + - chore/docs_copyright permissions: read-all @@ -14,13 +14,17 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: 3.x - - uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0 + - uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: ${{ github.ref }} path: .cache - run: pip install -r mkdocs_requirements.txt - - run: mkdocs gh-deploy --force + # MkDocs does not support adding non-installed (local) plugins via configuration alone. + # By setting PYTHONPATH to mkdocs_plugins, we ensure MkDocs can import custom plugins from this directory. + - env: + PYTHONPATH: docs/mkdocs_plugins + run: mkdocs gh-deploy --force diff --git a/.github/workflows/release_helm_chart.yaml b/.github/workflows/release_helm_chart.yaml new file mode 100644 index 000000000..34765a59f --- /dev/null +++ b/.github/workflows/release_helm_chart.yaml @@ -0,0 +1,39 @@ +name: Publish released chart + +on: + workflow_dispatch: + push: + branches: + - main + paths: + - charts/** + +permissions: read-all + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Configure Git + run: | + git config user.name "${{ github.actor }}" + git config user.email "${{ github.actor}}@users.noreply.github.com" + + - name: downcase repo owner for image + run: echo "REPO_OWNER=$(echo ${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV + - name: Run chart-releaser + uses: bitdeps/helm-oci-charts-releaser@caedceea2a5ab997c7e5469a999811dbb3d5b070 # v0.1.5 + with: + oci_registry: ghcr.io/${{ env.REPO_OWNER }}/charts + oci_username: ${{ github.actor}} + oci_password: ${{ secrets.GITHUB_TOKEN }} + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/scan_sca_current.yml b/.github/workflows/scan_sca_current.yml deleted file mode 100644 index 4a7b64792..000000000 --- a/.github/workflows/scan_sca_current.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: SCA scan current release - -on: - workflow_dispatch: - schedule: - - cron: '30 2 * * *' - -permissions: read-all - -jobs: - docker: - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - ref: 'v1.26.0' - - - name: Run SCA vulnerability scanners - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_configuration: 'so_configuration_sca_current.yml' - SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} - - - name: Run endpoint vulnerability scanners - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@a8344daa56598a80c2c80081974a0468dd29d086 # main - with: - so_configuration: 'so_configuration_endpoints.yml' - SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index eaba276b0..5e134535c 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -32,12 +32,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 + uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 with: results_file: results.sarif results_format: sarif @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: SARIF file path: results.sarif @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@b6a472f63d85b9c78a3ac5e89422239fc15e9b3c # v3.28.1 + uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3 with: sarif_file: results.sarif diff --git a/.gitignore b/.gitignore index 2c56b89e2..096b0f14c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .vscode/settings.json +.continue node_modules docs/assets/images/.$secobserve_architecture.drawio.bkp docs/assets/images/.$secobserve_process.drawio.bkp @@ -15,3 +16,14 @@ keycloak/h2/keycloakdb.trace.db keycloak/h2/keycloakdb.lock.db keycloak/h2/keycloakdb.mv.db backend/application/import_observations/parsers/trivy_operator_prometheus_file +coverage.xml +docker-compose-dev-metabase.yml +__pycache__ +# PyCharm +/.idea/ +# Virtual environment +/.venv/ +# Generated content +/site/ +charts/secobserve/charts/postgresql-16.7.27.tgz +charts/secobserve/my_values.yaml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9fb526fe7..7cc393c11 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -37,12 +37,12 @@ A well-written bug report helps the developers understand and reproduce the prob ## Security vulnerabilities -If you find a security vulnerability, please act responsibly and report it to us. Please do not create a public issue. Instead, use the ["Report a vulnerability"](https://github.com/MaibornWolff/SecObserve/security/advisories/new) button in the GitHub repository (under the "Security" tab) to report the vulnerability. +If you find a security vulnerability, please act responsibly and report it to us. Please do not create a public issue. Instead, use the ["Report a vulnerability"](https://github.com/SecObserve/SecObserve/security/advisories/new) button in the GitHub repository (under the "Security" tab) to report the vulnerability. ## Code contributions -Before you start working on a new feature, please have a discussion with the maintainers on the [GitHub discussions page](https://github.com/MaibornWolff/SecObserve/discussions). This helps to ensure that your work is aligned with the project's goals and that you are not duplicating efforts. It also gives you the opportunity to get feedback and guidance from the maintainers. +Before you start working on a new feature, please have a discussion with the maintainers on the [GitHub discussions page](https://github.com/SecObserve/SecObserve/discussions). This helps to ensure that your work is aligned with the project's goals and that you are not duplicating efforts. It also gives you the opportunity to get feedback and guidance from the maintainers. ### Development process diff --git a/LICENSE.txt b/LICENSE.txt index a724a4452..17cbb44a4 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,5 @@ -Copyright 2023 MaibornWolff GmbH / Stefan Fleckenstein +Copyright 2023 - November 2025 MaibornWolff GmbH / Stefan Fleckenstein + since December 2025 Stefan Fleckenstein Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/README.md b/README.md index 25745ea32..724c0d92d 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,18 @@ +> [!IMPORTANT] +> The SecObserve repositories have been moved from the MaibornWolff organization to their own SecObserve organization. Even though all links to the previous repository location are automatically redirected to the new location, we strongly recommend updating any existing links to the new repository URL. +> +> The location of the Docker images has been changed with release 1.42.0, they are now stored in a GitHub container registry: +> +> * **ghcr.io/secobserve/secobserve-backend** (see https://github.com/SecObserve/SecObserve/pkgs/container/secobserve-backend) +> * **ghcr.io/secobserve/secobserve-frontend** (see https://github.com/SecObserve/SecObserve/pkgs/container/secobserve-frontend) +> +> Please adjust your pull statements accordingly. + ![SecObserve](frontend/public/secobserve.svg) # SecObserve -[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8388/badge)](https://www.bestpractices.dev/projects/8388) [![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/MaibornWolff/SecObserve/badge)](https://securityscorecards.dev/viewer/?uri=github.com/MaibornWolff/SecObserve) +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8388/badge)](https://www.bestpractices.dev/projects/8388) [![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/SecObserve/SecObserve/badge)](https://securityscorecards.dev/viewer/?uri=github.com/SecObserve/SecObserve) SecObserve is an open source vulnerability and license management system for software development teams and cloud environments. It supports a variety of open source vulnerability scanners and integrates easily into CI/CD pipelines. Results about potential security flaws from various vulnerability scanning tools are made available for assessment and reporting. @@ -21,7 +31,7 @@ The aim of SecObserve is to make vulnerability scanning and vulnerability manage All templates run the scanner, upload the results into SecObserve and make the results of the scans available for download as artefacts in JSON format. - The sources of the GitHub actions and GitLab CI templates can be found in [https://github.com/MaibornWolff/secobserve_actions_templates](https://github.com/MaibornWolff/secobserve_actions_templates). + The sources of the GitHub actions and GitLab CI templates can be found in [https://github.com/SecObserve/secobserve_actions_templates](https://github.com/SecObserve/secobserve_actions_templates). ![Overview](docs/assets/images/secobserve_process.svg) @@ -31,7 +41,7 @@ The aim of SecObserve is to make vulnerability scanning and vulnerability manage ## Documentation -The full documentation how to install and use Secobserve can be found here: [https://maibornwolff.github.io/SecObserve/](https://maibornwolff.github.io/SecObserve/) +The full documentation how to install and use SecObserve can be found here: [https://secobserve.github.io/SecObserve/](https://secobserve.github.io/SecObserve/) ## Code of Conduct diff --git a/SECURITY.md b/SECURITY.md index 89640d319..c3fbc2ed1 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,7 +2,7 @@ ## Reporting a Vulnerability -Please use the ["Report a vulnerability"](https://github.com/MaibornWolff/SecObserve/security/advisories/new) button in the GitHub repository (under the "Security" tab) to report a vulnerability. +Please use the ["Report a vulnerability"](https://github.com/SecObserve/SecObserve/security/advisories/new) button in the GitHub repository (under the "Security" tab) to report a vulnerability. **Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.** diff --git a/backend/application/__init__.py b/backend/application/__init__.py index 088f4aa9c..3ff95f334 100644 --- a/backend/application/__init__.py +++ b/backend/application/__init__.py @@ -1,4 +1,4 @@ -__version__ = "1.26.0" +__version__ = "1.48.0" import pymysql diff --git a/backend/application/access_control/api/filters.py b/backend/application/access_control/api/filters.py index babc20719..935b269a0 100644 --- a/backend/application/access_control/api/filters.py +++ b/backend/application/access_control/api/filters.py @@ -1,8 +1,11 @@ -from django.db.models import Exists +from typing import Any, Optional + +from django.db.models import Exists, QuerySet from django_filters import CharFilter, FilterSet, NumberFilter, OrderingFilter +from rest_framework.request import Request from application.access_control.models import ( - API_Token, + API_Token_Multiple, Authorization_Group, Authorization_Group_Member, User, @@ -17,40 +20,46 @@ class UserFilter(FilterSet): field_name="exclude_authorization_group", method="get_exclude_authorization_group", ) - exclude_license_group = NumberFilter( - field_name="exclude_license_group", method="get_exclude_license_group" - ) - exclude_license_policy = NumberFilter( - field_name="exclude_license_policy", method="get_exclude_license_policy" - ) - exclude_product = NumberFilter( - field_name="exclude_product", method="get_exclude_product" - ) + exclude_license_group = NumberFilter(field_name="exclude_license_group", method="get_exclude_license_group") + exclude_license_policy = NumberFilter(field_name="exclude_license_policy", method="get_exclude_license_policy") + exclude_product = NumberFilter(field_name="exclude_product", method="get_exclude_product") def get_exclude_authorization_group( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(authorization_groups__id=value) return queryset def get_exclude_license_group( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(license_groups__id=value) return queryset def get_exclude_license_policy( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(license_policies__id=value) return queryset def get_exclude_product( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(product_members__id=value) return queryset @@ -78,9 +87,16 @@ class Meta: "is_external", ] - def __init__(self, data=None, queryset=None, *, request=None, prefix=None): + def __init__( + self, + data: Optional[Any] = None, + queryset: Optional[QuerySet] = None, + *, + request: Optional[Request] = None, + prefix: Optional[Any] = None, + ): super().__init__(data, queryset, request=request, prefix=prefix) - if not request.user.is_superuser: + if request and not request.user.is_superuser: self.filters.pop("is_oidc_user") self.filters.pop("is_active") self.filters.pop("is_superuser") @@ -97,33 +113,36 @@ class AuthorizationGroupFilter(FilterSet): name = CharFilter(field_name="name", lookup_expr="icontains") oidc_group = CharFilter(field_name="oidc_group", lookup_expr="icontains") user = NumberFilter(field_name="users") - exclude_license_group = NumberFilter( - field_name="exclude_license_group", method="get_exclude_license_group" - ) - exclude_license_policy = NumberFilter( - field_name="exclude_license_policy", method="get_exclude_license_policy" - ) - exclude_product = NumberFilter( - field_name="exclude_product", method="get_exclude_product" - ) + exclude_license_group = NumberFilter(field_name="exclude_license_group", method="get_exclude_license_group") + exclude_license_policy = NumberFilter(field_name="exclude_license_policy", method="get_exclude_license_policy") + exclude_product = NumberFilter(field_name="exclude_product", method="get_exclude_product") def get_exclude_license_group( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(license_groups__id=value) return queryset def get_exclude_license_policy( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(license_policies__id=value) return queryset def get_exclude_product( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: if value is not None: return queryset.exclude(authorization_groups__id=value) return queryset @@ -137,12 +156,15 @@ class Meta: model = Authorization_Group fields = ["name", "oidc_group"] - def get_user(self, queryset, name, value): # pylint: disable=unused-argument + def get_user( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: # field_name is used as a positional argument - authorization_group_members = Authorization_Group_Member.objects.filter( - user__id=value - ) + authorization_group_members = Authorization_Group_Member.objects.filter(user__id=value) queryset = queryset.annotate( member=Exists(authorization_group_members), ) @@ -169,13 +191,18 @@ class Meta: class ApiTokenFilter(FilterSet): - name = CharFilter(field_name="user__username", lookup_expr="icontains") + username = CharFilter(field_name="user__username", lookup_expr="icontains") ordering = OrderingFilter( # tuple-mapping retains order - fields=(("user__username", "name"),), + fields=( + ("user__username", "username"), + ("user", "user"), + ("name", "name"), + ("expiration_date", "expiration_date"), + ) ) class Meta: - model = API_Token - fields = ["name"] + model = API_Token_Multiple + fields = ["username", "user"] diff --git a/backend/application/access_control/api/permissions.py b/backend/application/access_control/api/permissions.py index 580a9ae93..1f141b1c7 100644 --- a/backend/application/access_control/api/permissions.py +++ b/backend/application/access_control/api/permissions.py @@ -1,5 +1,10 @@ +from typing import Any + +from django.contrib.auth.models import AnonymousUser from django.shortcuts import get_object_or_404 from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView from application.access_control.models import ( Authorization_Group, @@ -8,7 +13,7 @@ class UserHasSuperuserPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if ( request.method != "GET" and request.path != "/api/users/my_settings/" @@ -18,24 +23,23 @@ def has_permission(self, request, view): return True - def has_object_permission(self, request, view, obj): - if ( - request.method != "GET" - and request.path != f"/api/users/{obj.pk}/change_password/" - ): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: + if request.method != "GET" and request.path != f"/api/users/{obj.pk}/change_password/": return request.user.is_superuser return True class UserHasAuthorizationGroupPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": + if isinstance(request.user, AnonymousUser): + return False return not request.user.is_external return True - def has_object_permission(self, request, view, obj: Authorization_Group): + def has_object_permission(self, request: Request, view: APIView, obj: Authorization_Group) -> bool: if request.method != "GET": return _has_manage_permission(request, obj) @@ -43,24 +47,26 @@ def has_object_permission(self, request, view, obj: Authorization_Group): class UserHasAuthorizationGroupMemberPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - authorization_group = get_object_or_404( - Authorization_Group, pk=request.data.get("authorization_group") - ) + authorization_group = get_object_or_404(Authorization_Group, pk=request.data.get("authorization_group")) return _has_manage_permission(request, authorization_group) return True - def has_object_permission(self, request, view, obj: Authorization_Group_Member): + def has_object_permission(self, request: Request, view: APIView, obj: Authorization_Group_Member) -> bool: if request.method != "GET": return _has_manage_permission(request, obj.authorization_group) return True -def _has_manage_permission(request, authorization_group: Authorization_Group) -> bool: +def _has_manage_permission(request: Request, authorization_group: Authorization_Group) -> bool: user = request.user + + if isinstance(user, AnonymousUser): + return False + if user and user.is_superuser: return True diff --git a/backend/application/access_control/api/permissions_base.py b/backend/application/access_control/api/permissions_base.py deleted file mode 100644 index 1ba7135a2..000000000 --- a/backend/application/access_control/api/permissions_base.py +++ /dev/null @@ -1,42 +0,0 @@ -from django.shortcuts import get_object_or_404 -from rest_framework.exceptions import ParseError - -from application.access_control.services.authorization import user_has_permission - - -def check_post_permission(request, post_model, post_foreign_key, post_permission): - if request.method == "POST": - if request.data.get(post_foreign_key) is None: - raise ParseError( - f"Unable to check for permissions: Attribute '{post_foreign_key}' is required" - ) - object_to_check = get_object_or_404( - post_model, pk=request.data.get(post_foreign_key) - ) - return user_has_permission(object_to_check, post_permission) - - return True - - -def check_object_permission( - *, - request, - object_to_check, - get_permission, - put_permission, - delete_permission, - post_permission=None, -): - if request.method == "GET": - return user_has_permission(object_to_check, get_permission) - - if request.method in ("PUT", "PATCH"): - return user_has_permission(object_to_check, put_permission) - - if request.method == "DELETE": - return user_has_permission(object_to_check, delete_permission) - - if request.method == "POST": - return user_has_permission(object_to_check, post_permission) - - return False diff --git a/backend/application/access_control/api/serializers.py b/backend/application/access_control/api/serializers.py index 277857174..505ac0966 100644 --- a/backend/application/access_control/api/serializers.py +++ b/backend/application/access_control/api/serializers.py @@ -1,9 +1,10 @@ -from typing import Optional +import re +from datetime import date +from typing import Any, Optional -from django.core.validators import MaxValueValidator, MinValueValidator from rest_framework.serializers import ( CharField, - IntegerField, + DateField, ModelSerializer, Serializer, SerializerMethodField, @@ -11,17 +12,17 @@ ) from application.access_control.models import ( - API_Token, + API_Token_Multiple, Authorization_Group, Authorization_Group_Member, User, ) +from application.access_control.queries.api_token import get_api_tokens_for_user from application.access_control.queries.authorization_group_member import ( get_authorization_group_member, ) -from application.access_control.services.authorization import get_user_permissions -from application.access_control.services.roles_permissions import Permissions -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.authorization.services.roles_permissions import Permissions from application.core.models import Product_Authorization_Group_Member, Product_Member @@ -50,19 +51,20 @@ class Meta: "is_external", "setting_theme", "setting_list_size", + "setting_package_info_preference", + "setting_metrics_timespan", "permissions", - "setting_list_properties", "oidc_groups_hash", "is_oidc_user", "date_joined", "has_password", ] - def to_representation(self, instance: User): + def to_representation(self, instance: User) -> dict[str, Any]: data = super().to_representation(instance) user = get_current_user() - if user and not user.is_superuser and not user.pk == instance.pk: + if user and not user.is_superuser and user.pk != instance.pk: data.pop("email") data.pop("first_name") data.pop("last_name") @@ -71,7 +73,8 @@ def to_representation(self, instance: User): data.pop("is_external") data.pop("setting_theme") data.pop("setting_list_size") - data.pop("setting_list_properties") + data.pop("setting_package_info_preference") + data.pop("setting_metrics_timespan") data.pop("permissions") data.pop("oidc_groups_hash") data.pop("is_oidc_user") @@ -87,22 +90,32 @@ def get_full_name(self, obj: User) -> str: return obj.full_name def get_permissions(self, obj: User) -> list[Permissions]: - return get_user_permissions(obj) + return _get_user_permissions(obj) def get_has_password(self, obj: User) -> bool: - return bool( - obj.password - and obj.password != "" # nosec B105 - and obj.has_usable_password() - ) + return bool(obj.password and obj.password != "" and obj.has_usable_password()) # nosec B105 # eliminate false positive, password is not hardcoded +def _get_user_permissions(user: User = None) -> list[Permissions]: + if not user: + user = get_current_user() + + permissions = [] + + if user and not user.is_external: + permissions.append(Permissions.Product_Create) + permissions.append(Permissions.Product_Group_Create) + + return permissions + + class UserSerializer(UserListSerializer): full_name = SerializerMethodField() has_authorization_groups = SerializerMethodField() has_product_group_members = SerializerMethodField() has_product_members = SerializerMethodField() + has_api_tokens = SerializerMethodField() class Meta: model = User @@ -118,8 +131,9 @@ class Meta: "is_external", "setting_theme", "setting_list_size", + "setting_package_info_preference", + "setting_metrics_timespan", "permissions", - "setting_list_properties", "oidc_groups_hash", "is_oidc_user", "date_joined", @@ -127,16 +141,18 @@ class Meta: "has_authorization_groups", "has_product_group_members", "has_product_members", + "has_api_tokens", ] - def to_representation(self, instance: User): + def to_representation(self, instance: User) -> dict[str, Any]: data = super().to_representation(instance) user = get_current_user() - if user and not user.is_superuser and not user.pk == instance.pk: + if user and not user.is_superuser and user.pk != instance.pk: data.pop("has_authorization_groups") data.pop("has_product_group_members") data.pop("has_product_members") + data.pop("has_api_tokens") return data @@ -147,14 +163,13 @@ def get_has_authorization_groups(self, obj: User) -> bool: return Authorization_Group_Member.objects.filter(user=obj).exists() def get_has_product_group_members(self, obj: User) -> bool: - return Product_Member.objects.filter( - user=obj, product__is_product_group=True - ).exists() + return Product_Member.objects.filter(user=obj, product__is_product_group=True).exists() def get_has_product_members(self, obj: User) -> bool: - return Product_Member.objects.filter( - user=obj, product__is_product_group=False - ).exists() + return Product_Member.objects.filter(user=obj, product__is_product_group=False).exists() + + def get_has_api_tokens(self, obj: User) -> bool: + return get_api_tokens_for_user(obj).exists() class UserUpdateSerializer(ModelSerializer): @@ -204,15 +219,11 @@ def get_has_product_members(self, obj: Authorization_Group) -> bool: ).exists() def get_has_users(self, obj: Authorization_Group) -> bool: - return Authorization_Group_Member.objects.filter( - authorization_group=obj - ).exists() + return Authorization_Group_Member.objects.filter(authorization_group=obj).exists() def get_is_manager(self, obj: Authorization_Group) -> bool: user = get_current_user() - return Authorization_Group_Member.objects.filter( - authorization_group=obj, user=user, is_manager=True - ).exists() + return Authorization_Group_Member.objects.filter(authorization_group=obj, user=user, is_manager=True).exists() class AuthorizationGroupListSerializer(ModelSerializer): @@ -232,26 +243,24 @@ class Meta: model = Authorization_Group_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: Authorization_Group_Member - data_authorization_group: Optional[Authorization_Group] = attrs.get( - "authorization_group" - ) + data_authorization_group: Optional[Authorization_Group] = attrs.get("authorization_group") data_user = attrs.get("user") if self.instance is not None and ( - ( - data_authorization_group - and data_authorization_group != self.instance.authorization_group - ) + (data_authorization_group and data_authorization_group != self.instance.authorization_group) or (data_user and data_user != self.instance.user) ): raise ValidationError("Authorization group and user cannot be changed") if self.instance is None: - authorization_group_member = get_authorization_group_member( - data_authorization_group, data_user - ) + if data_authorization_group is None: + raise ValidationError("Authorization group is required") + if data_user is None: + raise ValidationError("User is required") + + authorization_group_member = get_authorization_group_member(data_authorization_group, data_user) if authorization_group_member: raise ValidationError( f"Authorization group member {data_authorization_group} / {data_user} already exists" @@ -266,7 +275,8 @@ class Meta: fields = [ "setting_theme", "setting_list_size", - "setting_list_properties", + "setting_package_info_preference", + "setting_metrics_timespan", ] @@ -280,43 +290,50 @@ class AuthenticationResponseSerializer(Serializer): user = UserSerializer() -class ProductApiTokenSerializer(Serializer): - id = IntegerField(validators=[MinValueValidator(0)]) - role = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(5)]) - - class ApiTokenSerializer(ModelSerializer): - id = SerializerMethodField() - name = SerializerMethodField() + username = SerializerMethodField() product = SerializerMethodField() product_group = SerializerMethodField() class Meta: - model = API_Token - fields = ["id", "name", "product", "product_group"] - - def get_id(self, obj: API_Token) -> int: - return obj.pk + model = API_Token_Multiple + fields = ["id", "name", "user", "username", "product", "product_group", "expiration_date"] - def get_name(self, obj: API_Token) -> str: + def get_username(self, obj: API_Token_Multiple) -> str: return obj.user.username - def get_product(self, obj: API_Token) -> Optional[int]: - product_member = Product_Member.objects.filter( - user=obj.user, product__is_product_group=False - ).first() - if product_member: - return product_member.product.pk + def get_product(self, obj: API_Token_Multiple) -> Optional[int]: + if re.match("-product-(\\d)*(-.*)?-api_token-", obj.user.username): + product_member = Product_Member.objects.filter(user=obj.user, product__is_product_group=False).first() + if product_member: + return product_member.product.pk return None - def get_product_group(self, obj: API_Token) -> Optional[int]: - product_member = Product_Member.objects.filter( - user=obj.user, product__is_product_group=True - ).first() - if product_member: - return product_member.product.pk + def get_product_group(self, obj: API_Token_Multiple) -> Optional[int]: + if re.match("-product-(\\d)*(-.*)?-api_token-", obj.user.username): + product_member = Product_Member.objects.filter(user=obj.user, product__is_product_group=True).first() + if product_member: + return product_member.product.pk return None -class CreateApiTokenResponseSerializer(Serializer): +class ApiTokenCreateRequestSerializer(Serializer): + username = CharField(max_length=150, required=True) + password = CharField(max_length=128, required=True) + name = CharField(max_length=255, required=True) + expiration_date = DateField(required=False, allow_null=True) + + def validate_expiration_date(self, expiration_date: Optional[date]) -> Optional[date]: + if expiration_date and expiration_date < date.today(): + raise ValidationError("Expiration date cannot be in the past") + return expiration_date + + +class ApiTokenCreateResponseSerializer(Serializer): token = CharField() + + +class ApiTokenRevokeRequestSerializer(Serializer): + username = CharField(max_length=150, required=True) + password = CharField(max_length=128, required=True) + name = CharField(max_length=255, required=True) diff --git a/backend/application/access_control/api/views.py b/backend/application/access_control/api/views.py index 9029696ba..ec0380beb 100644 --- a/backend/application/access_control/api/views.py +++ b/backend/application/access_control/api/views.py @@ -3,6 +3,7 @@ from typing import Any from django.contrib.auth import authenticate as django_authenticate +from django.contrib.auth.models import AnonymousUser from django.contrib.auth.password_validation import ( CommonPasswordValidator, MinimumLengthValidator, @@ -12,17 +13,20 @@ validate_password, ) from django.core.exceptions import ValidationError as DjangoValidationError +from django.db.models import QuerySet from django_filters.rest_framework import DjangoFilterBackend -from drf_spectacular.utils import OpenApiParameter, extend_schema +from drf_spectacular.utils import extend_schema from rest_framework import status from rest_framework.decorators import action from rest_framework.exceptions import PermissionDenied, ValidationError from rest_framework.filters import SearchFilter from rest_framework.mixins import ListModelMixin -from rest_framework.permissions import IsAdminUser, IsAuthenticated +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response +from rest_framework.serializers import BaseSerializer from rest_framework.views import APIView -from rest_framework.viewsets import GenericViewSet, ModelViewSet, ViewSet +from rest_framework.viewsets import GenericViewSet, ModelViewSet from application.access_control.api.filters import ( ApiTokenFilter, @@ -36,13 +40,14 @@ UserHasSuperuserPermission, ) from application.access_control.api.serializers import ( + ApiTokenCreateRequestSerializer, + ApiTokenCreateResponseSerializer, + ApiTokenRevokeRequestSerializer, ApiTokenSerializer, AuthenticationRequestSerializer, AuthenticationResponseSerializer, AuthorizationGroupMemberSerializer, AuthorizationGroupSerializer, - CreateApiTokenResponseSerializer, - ProductApiTokenSerializer, UserListSerializer, UserPasswordSerializer, UserPasswortRulesSerializer, @@ -51,12 +56,13 @@ UserUpdateSerializer, ) from application.access_control.models import ( - API_Token, + API_Token_Multiple, Authorization_Group, Authorization_Group_Member, JWT_Secret, User, ) +from application.access_control.queries.api_token import get_api_tokens from application.access_control.queries.authorization_group import ( get_authorization_groups, ) @@ -67,23 +73,14 @@ get_users, get_users_without_api_tokens, ) -from application.access_control.services.authorization import user_has_permission_or_403 from application.access_control.services.jwt_authentication import create_jwt from application.access_control.services.jwt_secret import create_secret -from application.access_control.services.product_api_token import ( - create_product_api_token, - get_product_api_tokens, - revoke_product_api_token, -) -from application.access_control.services.roles_permissions import Permissions from application.access_control.services.user_api_token import ( create_user_api_token, revoke_user_api_token, ) from application.commons.models import Settings from application.commons.services.log_message import format_log_message -from application.core.models import Product -from application.core.queries.product import get_product_by_id logger = logging.getLogger("secobserve.access_control") @@ -96,13 +93,13 @@ class UserViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["full_name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[User]: if self.action == "list": return get_users_without_api_tokens() return get_users() - def get_serializer_class(self): + def get_serializer_class(self) -> type[BaseSerializer[Any]]: if self.action == "list": return UserListSerializer if self.action in ["create", "update", "partial_update"]: @@ -110,7 +107,7 @@ def get_serializer_class(self): return super().get_serializer_class() - def destroy(self, request, *args, **kwargs) -> Response: + def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: instance: User = self.get_object() if instance == request.user: raise ValidationError("You cannot delete yourself") @@ -119,7 +116,7 @@ def destroy(self, request, *args, **kwargs) -> Response: @extend_schema(methods=["GET"], responses={status.HTTP_200_OK: UserSerializer}) @action(detail=False, methods=["get"]) - def me(self, request): + def me(self, request: Request) -> Response: serializer = UserSerializer(request.user) return Response(serializer.data, status=status.HTTP_200_OK) @@ -129,23 +126,27 @@ def me(self, request): responses={status.HTTP_200_OK: UserSerializer}, ) @action(detail=False, methods=["patch"]) - def my_settings(self, request): + def my_settings(self, request: Request) -> Response: request_serializer = UserSettingsSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) setting_theme = request_serializer.validated_data.get("setting_theme") setting_list_size = request_serializer.validated_data.get("setting_list_size") - setting_list_properties = request_serializer.validated_data.get( - "setting_list_properties" - ) - user: User = request.user + setting_package_info_preference = request_serializer.validated_data.get("setting_package_info_preference") + setting_metrics_timespan = request_serializer.validated_data.get("setting_metrics_timespan") + user = request.user + if isinstance(user, AnonymousUser): + raise PermissionDenied("You must be authenticated to change settings") + if setting_theme: user.setting_theme = setting_theme if setting_list_size: user.setting_list_size = setting_list_size - if setting_list_properties: - user.setting_list_properties = setting_list_properties + if setting_package_info_preference: + user.setting_package_info_preference = setting_package_info_preference + if setting_metrics_timespan: + user.setting_metrics_timespan = setting_metrics_timespan user.save() response_serializer = UserSerializer(request.user) @@ -157,7 +158,7 @@ def my_settings(self, request): responses={status.HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["patch"]) - def change_password(self, request, pk=None): # pylint: disable=unused-argument + def change_password(self, request: Request, pk: int = None) -> Response: # pylint: disable=unused-argument # pk is not used, but it is required to match the action signature request_serializer = UserPasswordSerializer(data=request.data) if not request_serializer.is_valid(): @@ -169,9 +170,7 @@ def change_password(self, request, pk=None): # pylint: disable=unused-argument new_password_2 = request_serializer.validated_data.get("new_password_2") if not request.user.is_superuser and request.user.pk != instance.pk: - raise PermissionDenied( - "You are not allowed to change other users' passwords" - ) + raise PermissionDenied("You are not allowed to change other users' passwords") if not instance.has_usable_password() or instance.is_oidc_user: raise ValidationError("User's password cannot be changed") @@ -202,27 +201,22 @@ def change_password(self, request, pk=None): # pylint: disable=unused-argument responses={status.HTTP_200_OK: UserPasswortRulesSerializer}, ) @action(detail=False, methods=["get"]) - def password_rules(self, request): + def password_rules(self, request: Request) -> Response: @dataclass class PasswordRules: password_rules: str - password_rules_text = password_validators_help_texts( - self._get_password_validators() - ) - password_rules = PasswordRules("- " + "\n- ".join(password_rules_text)) + password_rules_list = password_validators_help_texts(self._get_password_validators()) + password_rules_list = [s.replace("Your password", "The password") for s in password_rules_list] + password_rules = PasswordRules("- " + "\n- ".join(password_rules_list)) response_serializer = UserPasswortRulesSerializer(password_rules) return Response(response_serializer.data, status=status.HTTP_200_OK) def _get_password_validators(self) -> list[Any]: validators: list[Any] = [] settings = Settings.load() - validators.append( - MinimumLengthValidator( - min_length=settings.password_validator_minimum_length - ) - ) + validators.append(MinimumLengthValidator(min_length=settings.password_validator_minimum_length)) if settings.password_validator_common_passwords: validators.append(CommonPasswordValidator()) if settings.password_validator_attribute_similarity: @@ -241,7 +235,7 @@ class AuthorizationGroupViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Authorization_Group]: return get_authorization_groups() @@ -251,119 +245,69 @@ class AuthorizationGroupMemberViewSet(ModelViewSet): queryset = Authorization_Group_Member.objects.none() permission_classes = (IsAuthenticated, UserHasAuthorizationGroupMemberPermission) - def get_queryset(self): + def get_queryset(self) -> QuerySet[Authorization_Group_Member]: return get_authorization_group_members() class ApiTokenViewSet(ListModelMixin, GenericViewSet): serializer_class = ApiTokenSerializer filterset_class = ApiTokenFilter - permission_classes = (IsAuthenticated, IsAdminUser) - queryset = API_Token.objects.all() + permission_classes = (IsAuthenticated,) + queryset = API_Token_Multiple.objects.none() + + def get_queryset(self) -> QuerySet[API_Token_Multiple]: + return get_api_tokens().select_related("user") -class CreateUserAPITokenView(APIView): +class UserAPITokenCreateView(APIView): authentication_classes = [] permission_classes = [] @extend_schema( - request=AuthenticationRequestSerializer, - responses={status.HTTP_201_CREATED: CreateApiTokenResponseSerializer}, + request=ApiTokenCreateRequestSerializer, + responses={status.HTTP_201_CREATED: ApiTokenCreateResponseSerializer}, ) - def post(self, request): - user = _get_authenticated_user(request.data) + def post(self, request: Request) -> Response: + request_serializer = ApiTokenCreateRequestSerializer(data=request.data) + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) + + user = _get_authenticated_user(request_serializer.validated_data) + name = request_serializer.validated_data.get("name") + expiration_date = request_serializer.validated_data.get("expiration_date") + try: - token = create_user_api_token(user) + token = create_user_api_token(user, name, expiration_date) except ValidationError as e: response = Response(status=status.HTTP_400_BAD_REQUEST) - logger.warning( - format_log_message(message=str(e), user=user, response=response) - ) + logger.warning(format_log_message(message=str(e), username=user.username, response=response)) raise response = Response({"token": token}, status=status.HTTP_201_CREATED) - logger.info( - format_log_message( - message="API token created", user=user, response=response - ) - ) + logger.info(format_log_message(message="API token created", username=user.username, response=response)) return response -class RevokeUserAPITokenView(APIView): +class UserAPITokenRevokeView(APIView): authentication_classes = [] permission_classes = [] @extend_schema( - request=AuthenticationRequestSerializer, + request=ApiTokenRevokeRequestSerializer, responses={status.HTTP_204_NO_CONTENT: None}, ) - def post(self, request): - user = _get_authenticated_user(request.data) - revoke_user_api_token(user) - response = Response(status=status.HTTP_204_NO_CONTENT) - logger.info( - format_log_message( - message="API token revoked", user=user, response=response - ) - ) - return response - - -class ProductApiTokenViewset(ViewSet): - serializer_class = ProductApiTokenSerializer - - @extend_schema( - parameters=[ - OpenApiParameter( - name="product", location=OpenApiParameter.QUERY, required=True, type=int - ), - ], - ) - def list(self, request): - product = _get_product(request.query_params.get("product")) - user_has_permission_or_403(product, Permissions.Product_View) - tokens = get_product_api_tokens(product) - serializer = ProductApiTokenSerializer(tokens, many=True) - response_data = {"results": serializer.data} - return Response(response_data) - - @extend_schema( - request=ProductApiTokenSerializer, - responses={status.HTTP_200_OK: CreateApiTokenResponseSerializer}, - ) - def create(self, request): - request_serializer = ProductApiTokenSerializer(data=request.data) + def post(self, request: Request) -> Response: + request_serializer = ApiTokenRevokeRequestSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) - product = _get_product(request_serializer.validated_data.get("id")) - - user_has_permission_or_403(product, Permissions.Product_Api_Token_Create) + user = _get_authenticated_user(request_serializer.validated_data) + name = request_serializer.validated_data.get("name") - token = create_product_api_token( - product, request_serializer.validated_data.get("role") - ) - - response = Response({"token": token}, status=status.HTTP_201_CREATED) - logger.info( - format_log_message(message="Product API token created", response=response) - ) - return response - - @extend_schema( - responses={status.HTTP_204_NO_CONTENT: None}, - ) - def destroy(self, request, pk=None): - product = _get_product(pk) - user_has_permission_or_403(product, Permissions.Product_Api_Token_Revoke) - - revoke_product_api_token(product) + revoke_user_api_token(user, name) response = Response(status=status.HTTP_204_NO_CONTENT) - logger.info( - format_log_message(message="Product API token revoked", response=response) - ) + logger.info(format_log_message(message="API token revoked", username=user.username, response=response)) return response @@ -375,18 +319,14 @@ class AuthenticateView(APIView): request=AuthenticationRequestSerializer, responses={status.HTTP_200_OK: AuthenticationResponseSerializer}, ) - def post(self, request): + def post(self, request: Request) -> Response: user = _get_authenticated_user(request.data) jwt = create_jwt(user) user_serializer = UserSerializer(user) response = Response({"jwt": jwt, "user": user_serializer.data}) - logger.info( - format_log_message( - message="User authenticated", user=user, response=response - ) - ) + logger.info(format_log_message(message="User authenticated", username=user.username, response=response)) return response @@ -397,19 +337,15 @@ class JWTSecretResetView(APIView): request=None, responses={status.HTTP_204_NO_CONTENT: None}, ) - def post(self, request): + def post(self, request: Request) -> Response: jwt_secret = JWT_Secret(secret=create_secret()) jwt_secret.save() return Response(status=status.HTTP_204_NO_CONTENT) def _get_authenticated_user(data: dict) -> User: - request_serializer = AuthenticationRequestSerializer(data=data) - if not request_serializer.is_valid(): - raise ValidationError(request_serializer.errors) - - username = request_serializer.validated_data.get("username") - password = request_serializer.validated_data.get("password") + username = data.get("username") + password = data.get("password") user: User = django_authenticate(username=username, password=password) # type: ignore[assignment] # We always get a User from our model @@ -417,11 +353,3 @@ def _get_authenticated_user(data: dict) -> User: raise PermissionDenied("Invalid credentials") return user - - -def _get_product(product_id: int) -> Product: - product = get_product_by_id(product_id) - if not product: - raise ValidationError(f"Product {product_id} does not exist") - - return product diff --git a/backend/application/access_control/apps.py b/backend/application/access_control/apps.py index a63354851..ca0b2d7b6 100644 --- a/backend/application/access_control/apps.py +++ b/backend/application/access_control/apps.py @@ -6,7 +6,7 @@ class AccessControlConfig(AppConfig): name = "application.access_control" verbose_name = _("Access Control") - def ready(self): + def ready(self) -> None: try: import application.access_control.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import except ImportError: diff --git a/backend/application/access_control/migrations/0001_initial.py b/backend/application/access_control/migrations/0001_initial.py index 4c983db76..4513207b2 100644 --- a/backend/application/access_control/migrations/0001_initial.py +++ b/backend/application/access_control/migrations/0001_initial.py @@ -28,9 +28,7 @@ class Migration(migrations.Migration): ("password", models.CharField(max_length=128, verbose_name="password")), ( "last_login", - models.DateTimeField( - blank=True, null=True, verbose_name="last login" - ), + models.DateTimeField(blank=True, null=True, verbose_name="last login"), ), ( "is_superuser", @@ -43,23 +41,17 @@ class Migration(migrations.Migration): ( "username", models.CharField( - error_messages={ - "unique": "A user with that username already exists." - }, + error_messages={"unique": "A user with that username already exists."}, help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", max_length=150, unique=True, - validators=[ - django.contrib.auth.validators.UnicodeUsernameValidator() - ], + validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name="username", ), ), ( "email", - models.EmailField( - blank=True, max_length=254, verbose_name="email address" - ), + models.EmailField(blank=True, max_length=254, verbose_name="email address"), ), ( "is_staff", @@ -79,15 +71,11 @@ class Migration(migrations.Migration): ), ( "date_joined", - models.DateTimeField( - default=django.utils.timezone.now, verbose_name="date joined" - ), + models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined"), ), ( "full_name", - models.CharField( - blank=True, max_length=255, verbose_name="Name of user" - ), + models.CharField(blank=True, max_length=255, verbose_name="Name of user"), ), ( "groups", diff --git a/backend/application/access_control/migrations/0002_user_additional_fields.py b/backend/application/access_control/migrations/0002_user_additional_fields.py index 3c9781488..1a6935677 100644 --- a/backend/application/access_control/migrations/0002_user_additional_fields.py +++ b/backend/application/access_control/migrations/0002_user_additional_fields.py @@ -12,9 +12,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="user", name="first_name", - field=models.CharField( - blank=True, max_length=150, verbose_name="first name" - ), + field=models.CharField(blank=True, max_length=150, verbose_name="first name"), ), migrations.AddField( model_name="user", @@ -24,9 +22,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="user", name="last_name", - field=models.CharField( - blank=True, max_length=150, verbose_name="last name" - ), + field=models.CharField(blank=True, max_length=150, verbose_name="last name"), ), migrations.AlterField( model_name="user", diff --git a/backend/application/access_control/migrations/0008_user_oidc_groups_hash_authorization_group.py b/backend/application/access_control/migrations/0008_user_oidc_groups_hash_authorization_group.py index 17430ab17..4d36a92a2 100644 --- a/backend/application/access_control/migrations/0008_user_oidc_groups_hash_authorization_group.py +++ b/backend/application/access_control/migrations/0008_user_oidc_groups_hash_authorization_group.py @@ -42,11 +42,7 @@ class Migration(migrations.Migration): options={ "verbose_name": "Authorization Group", "verbose_name_plural": "Authorization Groups", - "indexes": [ - models.Index( - fields=["oidc_group"], name="access_cont_oidc_gr_705c33_idx" - ) - ], + "indexes": [models.Index(fields=["oidc_group"], name="access_cont_oidc_gr_705c33_idx")], }, ), ] diff --git a/backend/application/access_control/migrations/0011_user_setting_package_info_preference.py b/backend/application/access_control/migrations/0011_user_setting_package_info_preference.py new file mode 100644 index 000000000..599f8516c --- /dev/null +++ b/backend/application/access_control/migrations/0011_user_setting_package_info_preference.py @@ -0,0 +1,22 @@ +# Generated by Django 5.2.4 on 2025-07-04 14:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("access_control", "0010_authorization_group_member_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="user", + name="setting_package_info_preference", + field=models.CharField( + choices=[("open/source/insights", "open/source/insights"), ("ecosyste.ms", "ecosyste.ms")], + default="open/source/insights", + max_length=20, + ), + ), + ] diff --git a/backend/application/access_control/migrations/0012_api_token_expiration_date_api_token_id_and_more.py b/backend/application/access_control/migrations/0012_api_token_expiration_date_api_token_id_and_more.py new file mode 100644 index 000000000..1e8ed8ea6 --- /dev/null +++ b/backend/application/access_control/migrations/0012_api_token_expiration_date_api_token_id_and_more.py @@ -0,0 +1,56 @@ +# Generated by Django 5.2.8 on 2025-11-13 14:01 + +import django.db.models.deletion +from django.conf import settings +from django.db import connection, migrations, models + + +def _get_operations() -> list: + # For some installations using MySQL the database migration of release 1.42.0 caused problems. + # Now there are installations where the migration has already run, and for others it hasn't run. + # This is why we need to deal with both situations here. + if "django_migrations" in connection.introspection.table_names(): + with connection.cursor() as cursor: + cursor.execute( + "SELECT * FROM django_migrations WHERE app = 'access_control' and name = '0012_api_token_expiration_date_api_token_id_and_more'" + ) + rows = cursor.fetchall() + + if rows: + return [ + migrations.AlterField( + model_name="api_token", + name="user", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name="api_token", + name="expiration_date", + field=models.DateField(null=True), + ), + migrations.AddField( + model_name="api_token", + name="id", + field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AddField( + model_name="api_token", + name="name", + field=models.CharField(default="default", max_length=32), + ), + migrations.AlterUniqueTogether( + name="api_token", + unique_together={("user", "name")}, + ), + ] + + return [] + + +class Migration(migrations.Migration): + + dependencies = [ + ("access_control", "0011_user_setting_package_info_preference"), + ] + + operations = _get_operations() diff --git a/backend/application/access_control/migrations/0013_api_token_multiple.py b/backend/application/access_control/migrations/0013_api_token_multiple.py new file mode 100644 index 000000000..0ebd7b9b3 --- /dev/null +++ b/backend/application/access_control/migrations/0013_api_token_multiple.py @@ -0,0 +1,30 @@ +# Generated by Django 5.2.8 on 2025-11-19 12:37 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("access_control", "0012_api_token_expiration_date_api_token_id_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="API_Token_Multiple", + fields=[ + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(default="default", max_length=32)), + ("api_token_hash", models.CharField(max_length=255)), + ("expiration_date", models.DateField(null=True)), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + "verbose_name": "API token", + "verbose_name_plural": "API token", + "unique_together": {("user", "name")}, + }, + ), + ] diff --git a/backend/application/access_control/migrations/0014_move_api_tokens.py b/backend/application/access_control/migrations/0014_move_api_tokens.py new file mode 100644 index 000000000..ff4db501e --- /dev/null +++ b/backend/application/access_control/migrations/0014_move_api_tokens.py @@ -0,0 +1,49 @@ +import logging + +from django.core.paginator import Paginator +from django.db import migrations + +logger = logging.getLogger("secobserve.migration") + + +def copy_api_tokens(apps, schema_editor): + API_Token_Legacy = apps.get_model("access_control", "API_Token") + API_Token_Multiple = apps.get_model("access_control", "API_Token_Multiple") + legacy_api_tokens = API_Token_Legacy.objects.all().order_by("pk") + + paginator = Paginator(legacy_api_tokens, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + creates = [] + + for legacy_api_token in page.object_list: + creates.append( + API_Token_Multiple( + user=legacy_api_token.user, + name=legacy_api_token.name if hasattr(legacy_api_token, "name") else "default", + api_token_hash=legacy_api_token.api_token_hash, + expiration_date=( + legacy_api_token.expiration_date if hasattr(legacy_api_token, "expiration_date") else None + ), + ) + ) + + API_Token_Multiple.objects.bulk_create(creates) + + API_Token_Legacy.objects.all().delete() + + +class Migration(migrations.Migration): + dependencies = [ + ( + "access_control", + "0013_api_token_multiple", + ), + ] + + operations = [ + migrations.RunPython( + copy_api_tokens, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/backend/application/access_control/migrations/0015_user_setting_metrics_timespan.py b/backend/application/access_control/migrations/0015_user_setting_metrics_timespan.py new file mode 100644 index 000000000..5934f566b --- /dev/null +++ b/backend/application/access_control/migrations/0015_user_setting_metrics_timespan.py @@ -0,0 +1,22 @@ +# Generated by Django 5.2.10 on 2026-01-15 06:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("access_control", "0014_move_api_tokens"), + ] + + operations = [ + migrations.AddField( + model_name="user", + name="setting_metrics_timespan", + field=models.CharField( + choices=[("Week", "Week"), ("Month", "Month"), ("Quarter", "Quarter"), ("Year", "Year")], + default="Week", + max_length=8, + ), + ), + ] diff --git a/backend/application/access_control/migrations/0016_remove_user_setting_list_properties.py b/backend/application/access_control/migrations/0016_remove_user_setting_list_properties.py new file mode 100644 index 000000000..dbba85f7f --- /dev/null +++ b/backend/application/access_control/migrations/0016_remove_user_setting_list_properties.py @@ -0,0 +1,17 @@ +# Generated by Django 5.2.10 on 2026-01-16 16:04 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("access_control", "0015_user_setting_metrics_timespan"), + ] + + operations = [ + migrations.RemoveField( + model_name="user", + name="setting_list_properties", + ), + ] diff --git a/backend/application/access_control/migrations/0017_add_system_theme_option.py b/backend/application/access_control/migrations/0017_add_system_theme_option.py new file mode 100644 index 000000000..ee0f560e7 --- /dev/null +++ b/backend/application/access_control/migrations/0017_add_system_theme_option.py @@ -0,0 +1,20 @@ +# Generated by Django 5.2.10 on 2026-01-28 09:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("access_control", "0016_remove_user_setting_list_properties"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="setting_theme", + field=models.CharField( + choices=[("light", "light"), ("dark", "dark"), ("system", "system")], default="light", max_length=6 + ), + ), + ] diff --git a/backend/application/access_control/models.py b/backend/application/access_control/models.py index c983fa158..779bc3553 100644 --- a/backend/application/access_control/models.py +++ b/backend/application/access_control/models.py @@ -1,46 +1,43 @@ +from typing import Any + from django.contrib.auth.models import AbstractUser from django.db.models import ( CASCADE, BooleanField, CharField, + DateField, ForeignKey, Index, ManyToManyField, Model, - OneToOneField, - TextField, ) from encrypted_model_fields.fields import EncryptedCharField +from application.access_control.types import ( + ListSize, + MetricsTimespan, + PackageInfo, + Theme, +) -class User(AbstractUser): - THEME_LIGHT = "light" - THEME_DARK = "dark" - - THEME_CHOICES = [ - (THEME_LIGHT, THEME_LIGHT), - (THEME_DARK, THEME_DARK), - ] - - LIST_SIZE_SMALL = "small" - LIST_SIZE_MEDIUM = "medium" - - LIST_SIZE_CHOICES = [ - (LIST_SIZE_SMALL, LIST_SIZE_SMALL), - (LIST_SIZE_MEDIUM, LIST_SIZE_MEDIUM), - ] +class User(AbstractUser): full_name = CharField(max_length=301, blank=True) is_external = BooleanField(default=False) - setting_theme = CharField(max_length=5, choices=THEME_CHOICES, default=THEME_LIGHT) - setting_list_size = CharField( - max_length=6, choices=LIST_SIZE_CHOICES, default=LIST_SIZE_MEDIUM + setting_theme = CharField(max_length=6, choices=Theme.THEME_CHOICES, default=Theme.THEME_LIGHT) + setting_list_size = CharField(max_length=6, choices=ListSize.LIST_SIZE_CHOICES, default=ListSize.LIST_SIZE_MEDIUM) + setting_package_info_preference = CharField( + max_length=20, + choices=PackageInfo.PACKAGE_INFO_PREFERENCE_CHOICES, + default=PackageInfo.PACKAGE_INFO_PREFERENCE_DEPS_DEV, + ) + setting_metrics_timespan = CharField( + max_length=8, choices=MetricsTimespan.METRICS_TIMESPAN_CHOICES, default=MetricsTimespan.METRICS_TIMESPAN_7_DAYS ) - setting_list_properties = TextField(max_length=2048, blank=True) oidc_groups_hash = CharField(max_length=64, blank=True) is_oidc_user = BooleanField(default=False) - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: if self.first_name and self.last_name: self.full_name = f"{self.first_name} {self.last_name}" elif self.first_name: @@ -70,7 +67,7 @@ class Meta: Index(fields=["oidc_group"]), ] - def __str__(self): + def __str__(self) -> str: return self.name @@ -85,7 +82,7 @@ class Meta: "user", ) - def __str__(self): + def __str__(self) -> str: return f"{self.authorization_group} / {self.user}" @@ -95,7 +92,7 @@ class JWT_Secret(Model): class Meta: verbose_name = "JWT secret" - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: """ Save object to the database. Removes all other entries if there are any. @@ -104,7 +101,7 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) @classmethod - def load(cls): + def load(cls) -> "JWT_Secret": """ Load object from the database. Failing that, create a new empty (default) instance of the object and return it (without saving it @@ -117,9 +114,30 @@ def load(cls): class API_Token(Model): - user = OneToOneField(User, on_delete=CASCADE, primary_key=True) + user = ForeignKey(User, on_delete=CASCADE) + name = CharField(max_length=32, default="default") api_token_hash = CharField(max_length=255) + expiration_date = DateField(null=True) class Meta: verbose_name = "API token" verbose_name_plural = "API token" + unique_together = ( + "user", + "name", + ) + + +class API_Token_Multiple(Model): + user = ForeignKey(User, on_delete=CASCADE) + name = CharField(max_length=32, default="default") + api_token_hash = CharField(max_length=255) + expiration_date = DateField(null=True) + + class Meta: + verbose_name = "API token" + verbose_name_plural = "API token" + unique_together = ( + "user", + "name", + ) diff --git a/backend/application/access_control/queries/api_token.py b/backend/application/access_control/queries/api_token.py new file mode 100644 index 000000000..c4f64e11c --- /dev/null +++ b/backend/application/access_control/queries/api_token.py @@ -0,0 +1,41 @@ +from typing import Optional + +from django.db.models.query import QuerySet + +from application.access_control.models import API_Token_Multiple, User +from application.access_control.services.current_user import get_current_user + + +def get_api_token_by_id(pk: int) -> Optional[API_Token_Multiple]: + try: + return API_Token_Multiple.objects.get(pk=pk) + except API_Token_Multiple.DoesNotExist: + return None + + +def get_api_tokens() -> QuerySet[API_Token_Multiple]: + user = get_current_user() + + if user is None: + return API_Token_Multiple.objects.none() + + api_tokens = API_Token_Multiple.objects.all() + + if user.is_superuser: + return api_tokens + + return api_tokens.filter(user=user) + + +def get_api_tokens_for_user(given_user: User) -> QuerySet[API_Token_Multiple]: + current_user = get_current_user() + + if current_user is None: + return API_Token_Multiple.objects.none() + + api_tokens = API_Token_Multiple.objects.filter(user=given_user) + + if current_user.is_superuser: + return api_tokens + + return api_tokens if current_user == given_user else API_Token_Multiple.objects.none() diff --git a/backend/application/access_control/queries/authorization_group.py b/backend/application/access_control/queries/authorization_group.py index b66f1f982..16b5b2d14 100644 --- a/backend/application/access_control/queries/authorization_group.py +++ b/backend/application/access_control/queries/authorization_group.py @@ -1,7 +1,7 @@ from django.db.models.query import QuerySet from application.access_control.models import Authorization_Group -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user def get_authorization_groups() -> QuerySet[Authorization_Group]: diff --git a/backend/application/access_control/queries/authorization_group_member.py b/backend/application/access_control/queries/authorization_group_member.py index e4ee4afed..bd61862dc 100644 --- a/backend/application/access_control/queries/authorization_group_member.py +++ b/backend/application/access_control/queries/authorization_group_member.py @@ -10,16 +10,14 @@ from application.access_control.queries.authorization_group import ( get_authorization_groups, ) -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user def get_authorization_group_member( authorization_group: Authorization_Group, user: User ) -> Optional[Authorization_Group_Member]: try: - return Authorization_Group_Member.objects.get( - authorization_group=authorization_group, user=user - ) + return Authorization_Group_Member.objects.get(authorization_group=authorization_group, user=user) except Authorization_Group_Member.DoesNotExist: return None @@ -36,6 +34,4 @@ def get_authorization_group_members() -> QuerySet[Authorization_Group_Member]: return authorization_group_members authorization_groups = get_authorization_groups() - return authorization_group_members.filter( - authorization_group__in=authorization_groups - ) + return authorization_group_members.filter(authorization_group__in=authorization_groups) diff --git a/backend/application/access_control/queries/user.py b/backend/application/access_control/queries/user.py index 32341c8bd..02591904d 100644 --- a/backend/application/access_control/queries/user.py +++ b/backend/application/access_control/queries/user.py @@ -4,7 +4,7 @@ from django.db.models.query import QuerySet from application.access_control.models import User -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.queries.product_member import get_product_members @@ -44,9 +44,7 @@ def get_users() -> QuerySet[User]: product_members = get_product_members() - return users.filter( - Q(id__in=[member.user_id for member in product_members]) | Q(id=user.pk) - ) + return users.filter(Q(id__in=[member.user_id for member in product_members]) | Q(id=user.pk)) def get_users_without_api_tokens() -> QuerySet[User]: @@ -67,6 +65,4 @@ def get_users_without_api_tokens() -> QuerySet[User]: product_members = get_product_members() - return users.filter( - Q(id__in=[member.user_id for member in product_members]) | Q(id=user.pk) - ) + return users.filter(Q(id__in=[member.user_id for member in product_members]) | Q(id=user.pk)) diff --git a/backend/application/access_control/services/api_token_authentication.py b/backend/application/access_control/services/api_token_authentication.py index 34f22b599..6c5dd633f 100644 --- a/backend/application/access_control/services/api_token_authentication.py +++ b/backend/application/access_control/services/api_token_authentication.py @@ -1,16 +1,18 @@ +from datetime import date from typing import Optional from argon2 import PasswordHasher from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import AuthenticationFailed +from rest_framework.request import Request -from application.access_control.models import API_Token, User +from application.access_control.models import API_Token_Multiple, User API_TOKEN_PREFIX = "APIToken" # nosec B105 class APITokenAuthentication(BaseAuthentication): - def authenticate(self, request): + def authenticate(self, request: Request) -> Optional[tuple[User, None]]: authentication_header = get_authorization_header(request).split() if not authentication_header: @@ -20,9 +22,7 @@ def authenticate(self, request): raise AuthenticationFailed("Invalid token header: No credentials provided.") if len(authentication_header) > 2: - raise AuthenticationFailed( - "Invalid token header: Token string should not contain spaces." - ) + raise AuthenticationFailed("Invalid token header: Token string should not contain spaces.") auth_prefix = authentication_header[0].decode("UTF-8") auth_token = authentication_header[1].decode("UTF-8") @@ -31,25 +31,28 @@ def authenticate(self, request): # Authorization header is possibly for another backend return None - user = self._validate_api_token(auth_token) - if not user: + api_token = self._validate_api_token(auth_token) + if not api_token: raise AuthenticationFailed("Invalid API token.") - if not user.is_active: + if not api_token.user.is_active: raise AuthenticationFailed("User is deactivated.") - return (user, None) + if api_token.expiration_date and api_token.expiration_date < date.today(): + raise AuthenticationFailed("API token has expired.") - def authenticate_header(self, request): + return (api_token.user, None) + + def authenticate_header(self, request: Request) -> str: return API_TOKEN_PREFIX - def _validate_api_token(self, api_token: str) -> Optional[User]: + def _validate_api_token(self, api_token: str) -> Optional[API_Token_Multiple]: ph = PasswordHasher() - api_tokens = API_Token.objects.all() + api_tokens = API_Token_Multiple.objects.all() for api_token_data in api_tokens: try: ph.verify(api_token_data.api_token_hash, api_token) - return api_token_data.user + return api_token_data except Exception: # nosec B110 # all token need to be checked if a valid one can be found pass diff --git a/backend/application/access_control/services/current_user.py b/backend/application/access_control/services/current_user.py new file mode 100644 index 000000000..9c75047ed --- /dev/null +++ b/backend/application/access_control/services/current_user.py @@ -0,0 +1,21 @@ +from typing import Optional + +from django.contrib.auth.models import AnonymousUser + +from application.access_control.models import User +from application.commons.services.global_request import get_current_request + + +def get_current_user() -> Optional[User]: + request = get_current_request() + if request and request.user and not isinstance(request.user, AnonymousUser): + return request.user + + return None + + +def get_current_username() -> Optional[str]: + user = get_current_user() + if user: + return user.username + return None diff --git a/backend/application/access_control/services/jwt_authentication.py b/backend/application/access_control/services/jwt_authentication.py index 2c7fc8538..543c3b8fd 100644 --- a/backend/application/access_control/services/jwt_authentication.py +++ b/backend/application/access_control/services/jwt_authentication.py @@ -4,6 +4,7 @@ import jwt from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import AuthenticationFailed +from rest_framework.request import Request from application.access_control.models import JWT_Secret, User from application.access_control.queries.user import get_user_by_username @@ -32,7 +33,7 @@ def create_jwt(user: User) -> str: class JWTAuthentication(BaseAuthentication): - def authenticate(self, request): + def authenticate(self, request: Request) -> Optional[tuple[User, None]]: auth = get_authorization_header(request).split() if not auth: @@ -42,9 +43,7 @@ def authenticate(self, request): raise AuthenticationFailed("Invalid token header: No credentials provided.") if len(auth) > 2: - raise AuthenticationFailed( - "Invalid token header: Token string should not contain spaces." - ) + raise AuthenticationFailed("Invalid token header: Token string should not contain spaces.") auth_prefix = auth[0].decode("UTF-8") auth_token = auth[1].decode("UTF-8") @@ -62,14 +61,12 @@ def authenticate(self, request): return (user, None) - def authenticate_header(self, request): + def authenticate_header(self, request: Request) -> str: return JWT_PREFIX def _validate_jwt(self, token: str) -> Optional[User]: try: - payload = jwt.decode( - token, JWT_Secret.load().secret, algorithms=[ALGORITHM] - ) + payload = jwt.decode(token, JWT_Secret.load().secret, algorithms=[ALGORITHM]) username = payload.get("username") if not username: raise AuthenticationFailed("No username in JWT") diff --git a/backend/application/access_control/services/oidc_authentication.py b/backend/application/access_control/services/oidc_authentication.py index 5a224a73e..78c4732c9 100644 --- a/backend/application/access_control/services/oidc_authentication.py +++ b/backend/application/access_control/services/oidc_authentication.py @@ -9,6 +9,7 @@ from django.db import IntegrityError, transaction from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import AuthenticationFailed +from rest_framework.request import Request from application.access_control.models import Authorization_Group, User from application.access_control.queries.user import get_user_by_username @@ -19,7 +20,7 @@ class OIDCAuthentication(BaseAuthentication): - def authenticate(self, request): + def authenticate(self, request: Request) -> Optional[tuple[User, None]]: auth = get_authorization_header(request).split() if not auth: return None @@ -28,9 +29,7 @@ def authenticate(self, request): raise AuthenticationFailed("Invalid token header: No credentials provided.") if len(auth) > 2: - raise AuthenticationFailed( - "Invalid token header: Token string should not contain spaces." - ) + raise AuthenticationFailed("Invalid token header: Token string should not contain spaces.") auth_prefix = auth[0].decode("UTF-8") auth_token = auth[1].decode("UTF-8") @@ -48,10 +47,11 @@ def authenticate(self, request): return (user, None) - def authenticate_header(self, request): + def authenticate_header(self, request: Request) -> str: return OIDC_PREFIX def _validate_jwt(self, token: str) -> Optional[User]: + settings = Settings.load() try: jwks_uri = self._get_jwks_uri() jwks_client = jwt.PyJWKClient(jwks_uri) @@ -71,6 +71,7 @@ def _validate_jwt(self, token: str) -> Optional[User]: key=signing_key.key, algorithms=ALGORITHMS, audience=os.environ["OIDC_CLIENT_ID"], + leeway=settings.oidc_clock_skew, ) username = payload.get(os.environ["OIDC_USERNAME"]) user = get_user_by_username(username) @@ -81,7 +82,7 @@ def _validate_jwt(self, token: str) -> Optional[User]: except jwt.PyJWTError as e: raise AuthenticationFailed(str(e)) from e - def _get_jwks_uri(self): + def _get_jwks_uri(self) -> str: jwks_uri = cache.get("jwks_uri") if not jwks_uri: response = requests.request( @@ -134,28 +135,16 @@ def _create_user(self, username: str, payload: dict) -> User: def _check_user_change(self, user: User, payload: dict) -> User: user_changed = False - if ( - os.environ.get("OIDC_EMAIL") - and user.email != payload[os.environ["OIDC_EMAIL"]] - ): + if os.environ.get("OIDC_EMAIL") and user.email != payload[os.environ["OIDC_EMAIL"]]: user.email = payload[os.environ["OIDC_EMAIL"]] user_changed = True - if ( - os.environ.get("OIDC_FULL_NAME") - and user.full_name != payload[os.environ["OIDC_FULL_NAME"]] - ): + if os.environ.get("OIDC_FULL_NAME") and user.full_name != payload[os.environ["OIDC_FULL_NAME"]]: user.full_name = payload[os.environ["OIDC_FULL_NAME"]] user_changed = True - if ( - os.environ.get("OIDC_FIRST_NAME") - and user.first_name != payload[os.environ["OIDC_FIRST_NAME"]] - ): + if os.environ.get("OIDC_FIRST_NAME") and user.first_name != payload[os.environ["OIDC_FIRST_NAME"]]: user.first_name = payload[os.environ["OIDC_FIRST_NAME"]] user_changed = True - if ( - os.environ.get("OIDC_LAST_NAME") - and user.last_name != payload[os.environ["OIDC_LAST_NAME"]] - ): + if os.environ.get("OIDC_LAST_NAME") and user.last_name != payload[os.environ["OIDC_LAST_NAME"]]: user.last_name = payload[os.environ["OIDC_LAST_NAME"]] user_changed = True groups_hash = self._get_groups_hash(payload) @@ -179,7 +168,14 @@ def _get_groups_from_token(self, payload: dict) -> list: return [] groups = payload.get(os.environ["OIDC_GROUPS"]) - if not groups or not isinstance(groups, list): + + if not groups: + return [] + + if isinstance(groups, str): + groups = [groups] + + if not isinstance(groups, list): return [] return sorted(groups) @@ -190,14 +186,12 @@ def _get_groups_hash(self, payload: dict) -> str: return hashlib.sha256("".join(groups).encode("UTF-8")).hexdigest() return "" - def _synchronize_groups(self, user: User, payload: dict): + def _synchronize_groups(self, user: User, payload: dict) -> None: groups = Authorization_Group.objects.exclude(oidc_group="") for group in groups: group.users.remove(user) oidc_groups = self._get_groups_from_token(payload) - authorization_groups = Authorization_Group.objects.filter( - oidc_group__in=oidc_groups - ) + authorization_groups = Authorization_Group.objects.filter(oidc_group__in=oidc_groups) for authorization_group in authorization_groups: user.authorization_groups.add(authorization_group) diff --git a/backend/application/access_control/services/product_api_token.py b/backend/application/access_control/services/product_api_token.py deleted file mode 100644 index 813d5091e..000000000 --- a/backend/application/access_control/services/product_api_token.py +++ /dev/null @@ -1,79 +0,0 @@ -from dataclasses import dataclass - -from rest_framework.exceptions import ValidationError - -from application.access_control.models import API_Token, User -from application.access_control.queries.user import get_user_by_username -from application.access_control.services.roles_permissions import Roles -from application.access_control.services.user_api_token import generate_api_token_hash -from application.core.models import Product, Product_Member -from application.core.queries.product_member import get_product_member - - -def create_product_api_token(product: Product, role: Roles) -> str: - product_user_name = _get_product_user_name(product) - user = get_user_by_username(product_user_name) - if user: - try: - user.api_token # pylint: disable=pointless-statement - # This statement raises an exception if the user has no API token. - raise ValidationError("Only one API token per product is allowed.") - except API_Token.DoesNotExist: - pass - - api_token, api_token_hash = generate_api_token_hash() - - if user: - user.is_active = True - else: - user = User(username=product_user_name, is_active=True) - user.set_unusable_password() - user.save() - - Product_Member(product=product, user=user, role=role).save() - API_Token(user=user, api_token_hash=api_token_hash).save() - - return api_token - - -def revoke_product_api_token(product: Product) -> None: - product_user_name = _get_product_user_name(product) - user = get_user_by_username(product_user_name) - if not user: - return - - try: - api_token = user.api_token - api_token.delete() - except API_Token.DoesNotExist: - pass - - product_member = get_product_member(product, user) - if product_member: - product_member.delete() - - user.is_active = False - user.save() - - -@dataclass -class ProductAPIToken: - id: int - role: int - - -def get_product_api_tokens(product: Product) -> list[ProductAPIToken]: - product_user_name = _get_product_user_name(product) - user = get_user_by_username(product_user_name) - if not user: - return [] - - product_member = get_product_member(product, user) - if not product_member: - return [] - - return [ProductAPIToken(id=product.pk, role=product_member.role)] - - -def _get_product_user_name(product: Product) -> str: - return f"-product-{product.id}-api_token-" diff --git a/backend/application/access_control/services/user_api_token.py b/backend/application/access_control/services/user_api_token.py index a1e260580..33fed04d4 100644 --- a/backend/application/access_control/services/user_api_token.py +++ b/backend/application/access_control/services/user_api_token.py @@ -1,31 +1,34 @@ import secrets import string +from datetime import date from argon2 import PasswordHasher from argon2.profiles import RFC_9106_LOW_MEMORY from rest_framework.exceptions import ValidationError -from application.access_control.models import API_Token, User +from application.access_control.models import API_Token_Multiple, User -def create_user_api_token(user: User) -> str: +def create_user_api_token(user: User, name: str, expiration_date: date) -> str: try: - API_Token.objects.get(user=user) - raise ValidationError("Only one API token per user is allowed.") - except API_Token.DoesNotExist: + API_Token_Multiple.objects.get(user=user, name=name) + raise ValidationError("API token with this name already exists.") + except API_Token_Multiple.DoesNotExist: pass api_token, api_token_hash = generate_api_token_hash() - API_Token(user=user, api_token_hash=api_token_hash).save() + API_Token_Multiple(user=user, name=name, api_token_hash=api_token_hash, expiration_date=expiration_date).save() return api_token -def revoke_user_api_token(user: User) -> None: - api_tokens = API_Token.objects.filter(user=user) - for api_token in api_tokens: +def revoke_user_api_token(user: User, name: str) -> None: + try: + api_token = API_Token_Multiple.objects.get(user=user, name=name) api_token.delete() + except API_Token_Multiple.DoesNotExist: + pass def generate_api_token_hash() -> tuple[str, str]: diff --git a/backend/application/access_control/signals.py b/backend/application/access_control/signals.py index d9bef9ffc..d0daacc39 100644 --- a/backend/application/access_control/signals.py +++ b/backend/application/access_control/signals.py @@ -1,4 +1,5 @@ import logging +from typing import Any from django.contrib.auth.signals import ( user_logged_in, @@ -13,61 +14,56 @@ Authorization_Group_Member, User, ) -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import ( + get_current_user, + get_current_username, +) from application.commons.services.log_message import format_log_message logger = logging.getLogger("secobserve.access_control") @receiver(user_logged_in) -def signal_user_logged_in( # pylint: disable=unused-argument - sender, user: User, **kwargs -) -> None: +def signal_user_logged_in(sender: Any, user: User, **kwargs: Any) -> None: # pylint: disable=unused-argument # sender is needed according to Django documentation - logger.info(format_log_message(message="User logged in", user=user)) + logger.info(format_log_message(message="User logged in", username=user.username)) @receiver(user_logged_out) -def signal_user_logged_out( # pylint: disable=unused-argument - sender, user: User, **kwargs -) -> None: +def signal_user_logged_out(sender: Any, user: User, **kwargs: Any) -> None: # pylint: disable=unused-argument # sender is needed according to Django documentation - logger.info(format_log_message(message="User logged out", user=user)) + logger.info(format_log_message(message="User logged out", username=user.username)) @receiver(user_login_failed) -def signal_user_login_failed( # pylint: disable=unused-argument - sender, credentials: dict, **kwargs -) -> None: +def signal_user_login_failed(sender: Any, credentials: dict, **kwargs: Any) -> None: # pylint: disable=unused-argument # sender is needed according to Django documentation - logger.info(format_log_message(message="User login failed: ", data=credentials)) + logger.info(format_log_message(message="User login failed: ", data=credentials, username=get_current_username())) @receiver(post_save, sender=Authorization_Group) def authorization_group_post_save( # pylint: disable=unused-argument - sender, instance: Authorization_Group, created: bool, **kwargs + sender: Any, instance: Authorization_Group, created: bool, **kwargs: Any ) -> None: # sender is needed according to Django documentation _invalidate_oidc_groups_hashes() if created: user = get_current_user() if user and not user.is_superuser: - Authorization_Group_Member.objects.create( - authorization_group=instance, user=user, is_manager=True - ) + Authorization_Group_Member.objects.create(authorization_group=instance, user=user, is_manager=True) @receiver(post_delete, sender=Authorization_Group) def authorization_group_post_delete( # pylint: disable=unused-argument - sender, instance: Authorization_Group, **kwargs + sender: Any, instance: Authorization_Group, **kwargs: Any ) -> None: # sender is needed according to Django documentation _invalidate_oidc_groups_hashes() -def _invalidate_oidc_groups_hashes(): +def _invalidate_oidc_groups_hashes() -> None: for user in User.objects.exclude(oidc_groups_hash=""): user.oidc_groups_hash = "" user.save() diff --git a/backend/application/access_control/types.py b/backend/application/access_control/types.py new file mode 100644 index 000000000..bf71f8014 --- /dev/null +++ b/backend/application/access_control/types.py @@ -0,0 +1,44 @@ +class Theme: + THEME_LIGHT = "light" + THEME_DARK = "dark" + THEME_SYSTEM = "system" + + THEME_CHOICES = [ + (THEME_LIGHT, THEME_LIGHT), + (THEME_DARK, THEME_DARK), + (THEME_SYSTEM, THEME_SYSTEM), + ] + + +class ListSize: + LIST_SIZE_SMALL = "small" + LIST_SIZE_MEDIUM = "medium" + + LIST_SIZE_CHOICES = [ + (LIST_SIZE_SMALL, LIST_SIZE_SMALL), + (LIST_SIZE_MEDIUM, LIST_SIZE_MEDIUM), + ] + + +class PackageInfo: + PACKAGE_INFO_PREFERENCE_DEPS_DEV = "open/source/insights" + PACKAGE_INFO_PREFERENCE_ECOSYSTE_MS = "ecosyste.ms" + + PACKAGE_INFO_PREFERENCE_CHOICES = [ + (PACKAGE_INFO_PREFERENCE_DEPS_DEV, PACKAGE_INFO_PREFERENCE_DEPS_DEV), + (PACKAGE_INFO_PREFERENCE_ECOSYSTE_MS, PACKAGE_INFO_PREFERENCE_ECOSYSTE_MS), + ] + + +class MetricsTimespan: + METRICS_TIMESPAN_7_DAYS = "Week" + METRICS_TIMESPAN_30_DAYS = "Month" + METRICS_TIMESPAN_90_DAYS = "Quarter" + METRICS_TIMESPAN_365_DAYS = "Year" + + METRICS_TIMESPAN_CHOICES = [ + (METRICS_TIMESPAN_7_DAYS, METRICS_TIMESPAN_7_DAYS), + (METRICS_TIMESPAN_30_DAYS, METRICS_TIMESPAN_30_DAYS), + (METRICS_TIMESPAN_90_DAYS, METRICS_TIMESPAN_90_DAYS), + (METRICS_TIMESPAN_365_DAYS, METRICS_TIMESPAN_365_DAYS), + ] diff --git a/backend/unittests/import_observations/parsers/owasp_zap/__init__.py b/backend/application/authorization/__init__.py similarity index 100% rename from backend/unittests/import_observations/parsers/owasp_zap/__init__.py rename to backend/application/authorization/__init__.py diff --git a/backend/application/authorization/api/__init__.py b/backend/application/authorization/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/authorization/api/permissions_base.py b/backend/application/authorization/api/permissions_base.py new file mode 100644 index 000000000..502e6a43f --- /dev/null +++ b/backend/application/authorization/api/permissions_base.py @@ -0,0 +1,47 @@ +from typing import Any, Optional + +from django.shortcuts import get_object_or_404 +from rest_framework.exceptions import ParseError +from rest_framework.request import Request + +from application.authorization.services.authorization import user_has_permission +from application.authorization.services.roles_permissions import Permissions + + +def check_post_permission( + request: Request, + post_model: Any, + post_foreign_key: str, + post_permission: Permissions, +) -> bool: + if request.method == "POST": + if request.data.get(post_foreign_key) is None: + raise ParseError(f"Unable to check for permissions: Attribute '{post_foreign_key}' is required") + object_to_check = get_object_or_404(post_model, pk=request.data.get(post_foreign_key)) + return user_has_permission(object_to_check, post_permission) + + return True + + +def check_object_permission( + *, + request: Request, + object_to_check: Any, + get_permission: Optional[Permissions], + put_permission: Optional[Permissions], + delete_permission: Optional[Permissions], + post_permission: Optional[Permissions] = None, +) -> bool: + if request.method == "GET" and get_permission is not None: + return user_has_permission(object_to_check, get_permission) + + if request.method in ("PUT", "PATCH") and put_permission is not None: + return user_has_permission(object_to_check, put_permission) + + if request.method == "DELETE" and delete_permission is not None: + return user_has_permission(object_to_check, delete_permission) + + if request.method == "POST" and post_permission is not None: + return user_has_permission(object_to_check, post_permission) + + return False diff --git a/backend/application/authorization/services/__init__.py b/backend/application/authorization/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/access_control/services/authorization.py b/backend/application/authorization/services/authorization.py similarity index 63% rename from backend/application/access_control/services/authorization.py rename to backend/application/authorization/services/authorization.py index 96336a33b..8887779b9 100644 --- a/backend/application/access_control/services/authorization.py +++ b/backend/application/authorization/services/authorization.py @@ -1,14 +1,14 @@ -from typing import Optional +from typing import Any, Optional from rest_framework.exceptions import PermissionDenied from application.access_control.models import User -from application.access_control.services.roles_permissions import ( +from application.access_control.services.current_user import get_current_user +from application.authorization.services.roles_permissions import ( Permissions, Roles, get_roles_with_permissions, ) -from application.commons.services.global_request import get_current_user from application.core.models import ( Branch, Observation, @@ -26,13 +26,13 @@ Api_Configuration, Vulnerability_Check, ) -from application.licenses.models import License_Component +from application.licenses.models import Concluded_License, License_Component from application.rules.models import Rule from application.vex.models import VEX_Base def user_has_permission( # pylint: disable=too-many-return-statements,too-many-branches - obj, permission: int, user: User = None + obj: Any, permission: Permissions, user: User = None ) -> bool: # There are a lot of different objects that need to be checked for permissions. # Refactoring it wouldn't make it more readable. @@ -58,27 +58,18 @@ def user_has_permission( # pylint: disable=too-many-return-statements,too-many- role = get_highest_user_role(obj, user) return bool(role and role_has_permission(role, permission)) - if ( - isinstance(obj, Product_Member) - and permission in Permissions.get_product_member_permissions() - ): + if isinstance(obj, Product_Member) and permission in Permissions.get_product_member_permissions(): return user_has_permission(obj.product, permission, user) if ( isinstance(obj, Product_Authorization_Group_Member) - and permission - in Permissions.get_product_authorization_group_member_permissions() + and permission in Permissions.get_product_authorization_group_member_permissions() ): return user_has_permission(obj.product, permission, user) - if ( - isinstance(obj, Rule) - and permission in Permissions.get_product_rule_permissions() - ): + if isinstance(obj, Rule) and permission in Permissions.get_product_rule_permissions(): if not obj.product: - raise NoAuthorizationImplementedError( - "No authorization implemented for General Rules" - ) + raise NoAuthorizationImplementedError("No authorization implemented for General Rules") return user_has_permission(obj.product, permission, user) @@ -88,22 +79,13 @@ def user_has_permission( # pylint: disable=too-many-return-statements,too-many- if isinstance(obj, Service) and permission in Permissions.get_service_permissions(): return user_has_permission(obj.product, permission, user) - if ( - isinstance(obj, Observation) - and permission in Permissions.get_observation_permissions() - ): + if isinstance(obj, Observation) and permission in Permissions.get_observation_permissions(): return user_has_permission(obj.product, permission, user) - if ( - isinstance(obj, Observation_Log) - and permission in Permissions.get_observation_log_permissions() - ): + if isinstance(obj, Observation_Log) and permission in Permissions.get_observation_log_permissions(): return user_has_permission(obj.observation.product, permission, user) - if ( - isinstance(obj, Api_Configuration) - and permission in Permissions.get_api_configuration_permissions() - ): + if isinstance(obj, Api_Configuration) and permission in Permissions.get_api_configuration_permissions(): return user_has_permission(obj.product, permission, user) if isinstance(obj, VEX_Base) and permission in Permissions.get_vex_permissions(): @@ -113,16 +95,13 @@ def user_has_permission( # pylint: disable=too-many-return-statements,too-many- return user_has_permission(obj.product, permission, user) return False - if ( - isinstance(obj, Vulnerability_Check) - and permission in Permissions.get_vulnerability_check_permissions() - ): + if isinstance(obj, Vulnerability_Check) and permission in Permissions.get_vulnerability_check_permissions(): return user_has_permission(obj.product, permission, user) - if ( - isinstance(obj, License_Component) - and permission in Permissions.get_component_license_permissions() - ): + if isinstance(obj, License_Component) and permission in Permissions.get_component_license_permissions(): + return user_has_permission(obj.product, permission, user) + + if isinstance(obj, Concluded_License) and permission in Permissions.get_concluded_license_permissions(): return user_has_permission(obj.product, permission, user) raise NoAuthorizationImplementedError( @@ -130,12 +109,12 @@ def user_has_permission( # pylint: disable=too-many-return-statements,too-many- ) -def user_has_permission_or_403(obj, permission: int, user: User = None) -> None: +def user_has_permission_or_403(obj: Any, permission: Permissions, user: User = None) -> None: if not user_has_permission(obj, permission, user): raise PermissionDenied() -def role_has_permission(role: int, permission: int) -> bool: +def role_has_permission(role: Roles, permission: Permissions) -> bool: if not Permissions.has_value(permission): raise PermissionDoesNotExistError(f"Permission {permission} does not exist") @@ -149,7 +128,7 @@ def role_has_permission(role: int, permission: int) -> bool: return permission in permissions -def get_highest_user_role(product: Product, user: User = None) -> Optional[int]: +def get_highest_user_role(product: Product, user: User = None) -> Optional[Roles]: if user is None: user = get_current_user() @@ -165,46 +144,27 @@ def get_highest_user_role(product: Product, user: User = None) -> Optional[int]: user_product_group_role = 0 if product.product_group: user_product_group_member = get_product_member(product.product_group, user) - user_product_group_role = ( - user_product_group_member.role if user_product_group_member else 0 - ) + user_product_group_role = user_product_group_member.role if user_product_group_member else 0 - authorization_group_role = ( - get_highest_role_of_product_authorization_group_members_for_user(product, user) - ) - highest_role = max( - user_product_role, user_product_group_role, authorization_group_role - ) + authorization_group_role = get_highest_role_of_product_authorization_group_members_for_user(product, user) + highest_role = max(user_product_role, user_product_group_role, authorization_group_role) if highest_role: - return highest_role + return Roles(highest_role) return None -def get_user_permissions(user: User = None) -> list[Permissions]: - if not user: - user = get_current_user() - - permissions = [] - - if user and not user.is_external: - permissions.append(Permissions.Product_Create) - permissions.append(Permissions.Product_Group_Create) - - return permissions - - class NoAuthorizationImplementedError(Exception): - def __init__(self, message): + def __init__(self, message: str): self.message = message class PermissionDoesNotExistError(Exception): - def __init__(self, message): + def __init__(self, message: str): self.message = message class RoleDoesNotExistError(Exception): - def __init__(self, message): + def __init__(self, message: str): self.message = message diff --git a/backend/application/access_control/services/roles_permissions.py b/backend/application/authorization/services/roles_permissions.py similarity index 77% rename from backend/application/access_control/services/roles_permissions.py rename to backend/application/authorization/services/roles_permissions.py index 2480ced56..fe9b15732 100644 --- a/backend/application/access_control/services/roles_permissions.py +++ b/backend/application/authorization/services/roles_permissions.py @@ -10,7 +10,7 @@ class Roles(IntEnum): Owner = 5 @classmethod - def has_value(cls, value): + def has_value(cls, value: int) -> bool: try: Roles(value) return True @@ -29,6 +29,7 @@ class Permissions(IntEnum): Product_Delete = 1103 Product_Create = 1104 Product_Import_Observations = 1105 + Product_Scan_OSV = 1106 Product_Member_View = 1201 Product_Member_Edit = 1202 @@ -48,7 +49,9 @@ class Permissions(IntEnum): Branch_Create = 1404 Service_View = 1501 + Serice_Edit = 1502 Service_Delete = 1503 + Service_Create = 1504 Product_Authorization_Group_Member_View = 1601 Product_Authorization_Group_Member_Edit = 1602 @@ -76,10 +79,16 @@ class Permissions(IntEnum): VEX_Delete = 5003 VEX_Create = 5004 + License_Component_Edit = 6002 License_Component_Delete = 6003 + Concluded_License_View = 7001 + Concluded_License_Edit = 7002 + Concluded_License_Delete = 7003 + Concluded_License_Create = 7004 + @classmethod - def has_value(cls, value): + def has_value(cls, value: int) -> bool: try: Permissions(value) return True @@ -87,7 +96,7 @@ def has_value(cls, value): return False @classmethod - def get_product_group_permissions(cls): + def get_product_group_permissions(cls) -> set["Permissions"]: return { Permissions.Product_Group_View, Permissions.Product_Group_Edit, @@ -96,7 +105,7 @@ def get_product_group_permissions(cls): } @classmethod - def get_observation_permissions(cls): + def get_observation_permissions(cls) -> set["Permissions"]: return { Permissions.Observation_View, Permissions.Observation_Edit, @@ -106,13 +115,13 @@ def get_observation_permissions(cls): } @classmethod - def get_observation_log_permissions(cls): + def get_observation_log_permissions(cls) -> set["Permissions"]: return { Permissions.Observation_Log_Approval, } @classmethod - def get_product_member_permissions(cls): + def get_product_member_permissions(cls) -> set["Permissions"]: return { Permissions.Product_Member_View, Permissions.Product_Member_Edit, @@ -120,7 +129,7 @@ def get_product_member_permissions(cls): } @classmethod - def get_product_authorization_group_member_permissions(cls): + def get_product_authorization_group_member_permissions(cls) -> set["Permissions"]: return { Permissions.Product_Authorization_Group_Member_View, Permissions.Product_Authorization_Group_Member_Edit, @@ -128,7 +137,7 @@ def get_product_authorization_group_member_permissions(cls): } @classmethod - def get_product_rule_permissions(cls): + def get_product_rule_permissions(cls) -> set["Permissions"]: return { Permissions.Product_Rule_View, Permissions.Product_Rule_Edit, @@ -139,7 +148,7 @@ def get_product_rule_permissions(cls): } @classmethod - def get_branch_permissions(cls): + def get_branch_permissions(cls) -> set["Permissions"]: return { Permissions.Branch_View, Permissions.Branch_Edit, @@ -148,14 +157,16 @@ def get_branch_permissions(cls): } @classmethod - def get_service_permissions(cls): + def get_service_permissions(cls) -> set["Permissions"]: return { Permissions.Service_View, + Permissions.Serice_Edit, Permissions.Service_Delete, + Permissions.Service_Create, } @classmethod - def get_api_configuration_permissions(cls): + def get_api_configuration_permissions(cls) -> set["Permissions"]: return { Permissions.Api_Configuration_View, Permissions.Api_Configuration_Edit, @@ -164,7 +175,7 @@ def get_api_configuration_permissions(cls): } @classmethod - def get_vex_permissions(cls): + def get_vex_permissions(cls) -> set["Permissions"]: return { Permissions.VEX_View, Permissions.VEX_Edit, @@ -173,19 +184,29 @@ def get_vex_permissions(cls): } @classmethod - def get_vulnerability_check_permissions(cls): + def get_vulnerability_check_permissions(cls) -> set["Permissions"]: return { Permissions.Product_View, } @classmethod - def get_component_license_permissions(cls): + def get_component_license_permissions(cls) -> set["Permissions"]: return { + Permissions.License_Component_Edit, Permissions.License_Component_Delete, } + @classmethod + def get_concluded_license_permissions(cls) -> set["Permissions"]: + return { + Permissions.Concluded_License_View, + Permissions.Concluded_License_Edit, + Permissions.Concluded_License_Delete, + Permissions.Concluded_License_Create, + } + -def get_roles_with_permissions(): +def get_roles_with_permissions() -> dict[Roles, set[Permissions]]: return { Roles.Reader: { Permissions.Product_Group_View, @@ -198,6 +219,7 @@ def get_roles_with_permissions(): Permissions.Observation_View, Permissions.Api_Configuration_View, Permissions.VEX_View, + Permissions.Concluded_License_View, }, Roles.Upload: { Permissions.Product_Import_Observations, @@ -206,6 +228,7 @@ def get_roles_with_permissions(): Permissions.Product_Group_View, Permissions.Product_View, Permissions.Product_Import_Observations, + Permissions.Product_Scan_OSV, Permissions.Product_Member_View, Permissions.Product_Authorization_Group_Member_View, Permissions.Product_Rule_View, @@ -218,6 +241,10 @@ def get_roles_with_permissions(): Permissions.Observation_Assessment, Permissions.Api_Configuration_View, Permissions.VEX_View, + Permissions.License_Component_Edit, + Permissions.Concluded_License_View, + Permissions.Concluded_License_Edit, + Permissions.Concluded_License_Create, }, Roles.Maintainer: { Permissions.Product_Group_View, @@ -225,6 +252,7 @@ def get_roles_with_permissions(): Permissions.Product_View, Permissions.Product_Edit, Permissions.Product_Import_Observations, + Permissions.Product_Scan_OSV, Permissions.Product_Member_View, Permissions.Product_Member_Edit, Permissions.Product_Member_Delete, @@ -244,7 +272,9 @@ def get_roles_with_permissions(): Permissions.Branch_Delete, Permissions.Branch_Create, Permissions.Service_View, + Permissions.Serice_Edit, Permissions.Service_Delete, + Permissions.Service_Create, Permissions.Observation_View, Permissions.Observation_Edit, Permissions.Observation_Create, @@ -258,7 +288,12 @@ def get_roles_with_permissions(): Permissions.VEX_Edit, Permissions.VEX_Create, Permissions.VEX_Delete, + Permissions.License_Component_Edit, Permissions.License_Component_Delete, + Permissions.Concluded_License_View, + Permissions.Concluded_License_Edit, + Permissions.Concluded_License_Create, + Permissions.Concluded_License_Delete, }, Roles.Owner: { Permissions.Product_Group_View, @@ -268,6 +303,7 @@ def get_roles_with_permissions(): Permissions.Product_Edit, Permissions.Product_Delete, Permissions.Product_Import_Observations, + Permissions.Product_Scan_OSV, Permissions.Product_Member_View, Permissions.Product_Member_Edit, Permissions.Product_Member_Delete, @@ -287,7 +323,9 @@ def get_roles_with_permissions(): Permissions.Branch_Delete, Permissions.Branch_Create, Permissions.Service_View, + Permissions.Serice_Edit, Permissions.Service_Delete, + Permissions.Service_Create, Permissions.Observation_View, Permissions.Observation_Edit, Permissions.Observation_Create, @@ -304,12 +342,17 @@ def get_roles_with_permissions(): Permissions.VEX_Edit, Permissions.VEX_Create, Permissions.VEX_Delete, + Permissions.License_Component_Edit, Permissions.License_Component_Delete, + Permissions.Concluded_License_View, + Permissions.Concluded_License_Edit, + Permissions.Concluded_License_Create, + Permissions.Concluded_License_Delete, }, } -def get_permissions_for_role(role: Optional[int]) -> list[Permissions]: +def get_permissions_for_role(role: Optional[Roles]) -> Optional[set[Permissions]]: if not role: - return [] + return set() return get_roles_with_permissions().get(role) diff --git a/backend/application/background_tasks/__init__.py b/backend/application/background_tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/background_tasks/api/__init__.py b/backend/application/background_tasks/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/background_tasks/api/filters.py b/backend/application/background_tasks/api/filters.py new file mode 100644 index 000000000..4034fd1b0 --- /dev/null +++ b/backend/application/background_tasks/api/filters.py @@ -0,0 +1,24 @@ +from django_filters import CharFilter, ChoiceFilter, FilterSet, OrderingFilter + +from application.background_tasks.models import Periodic_Task +from application.background_tasks.types import Status + + +class PeriodicTaskFilter(FilterSet): + task = CharFilter(field_name="task", lookup_expr="icontains") + status = ChoiceFilter(field_name="status", choices=Status.STATUS_CHOICES) + + ordering = OrderingFilter( + # tuple-mapping retains order + fields=( + ("task", "task"), + ("start_time", "start_time"), + ("status", "status"), + ("duration", "duration"), + ), + ) + + class Meta: + model = Periodic_Task + fields = ["task", "status", "start_time", "duration"] + order_by = ["-start_time"] diff --git a/backend/application/background_tasks/api/serializers.py b/backend/application/background_tasks/api/serializers.py new file mode 100644 index 000000000..ab02042c3 --- /dev/null +++ b/backend/application/background_tasks/api/serializers.py @@ -0,0 +1,9 @@ +from rest_framework.serializers import ModelSerializer + +from application.background_tasks.models import Periodic_Task + + +class PeriodicTaskSerializer(ModelSerializer): + class Meta: + model = Periodic_Task + fields = "__all__" diff --git a/backend/application/background_tasks/api/views.py b/backend/application/background_tasks/api/views.py new file mode 100644 index 000000000..f55686e02 --- /dev/null +++ b/backend/application/background_tasks/api/views.py @@ -0,0 +1,19 @@ +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework.filters import SearchFilter +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin +from rest_framework.permissions import IsAuthenticated +from rest_framework.viewsets import GenericViewSet + +from application.background_tasks.api.filters import PeriodicTaskFilter +from application.background_tasks.api.serializers import PeriodicTaskSerializer +from application.background_tasks.models import Periodic_Task +from application.commons.api.permissions import UserHasSuperuserPermission + + +class PeriodicTaskViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): + serializer_class = PeriodicTaskSerializer + filterset_class = PeriodicTaskFilter + permission_classes = [IsAuthenticated, UserHasSuperuserPermission] + queryset = Periodic_Task.objects.all() + filter_backends = [SearchFilter, DjangoFilterBackend] + search_fields = ["task"] diff --git a/backend/application/background_tasks/apps.py b/backend/application/background_tasks/apps.py new file mode 100644 index 000000000..e3a4086db --- /dev/null +++ b/backend/application/background_tasks/apps.py @@ -0,0 +1,14 @@ +from django.apps import AppConfig + + +class BackgroundTasksConfig(AppConfig): + name = "application.background_tasks" + verbose_name = "Background tasks" + + def ready(self) -> None: + try: + import application.background_tasks.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import + except ImportError: + pass + + import config.schema # noqa: F401 pylint: disable=import-outside-toplevel, unused-import diff --git a/backend/application/background_tasks/migrations/0001_initial.py b/backend/application/background_tasks/migrations/0001_initial.py new file mode 100644 index 000000000..4d921d69b --- /dev/null +++ b/backend/application/background_tasks/migrations/0001_initial.py @@ -0,0 +1,37 @@ +# Generated by Django 5.2.4 on 2025-07-20 15:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Periodic_Task", + fields=[ + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("task", models.CharField(max_length=255)), + ("start_time", models.DateTimeField()), + ("duration", models.BigIntegerField(help_text="Duration in milliseconds", null=True)), + ( + "status", + models.CharField( + choices=[("Running", "Running"), ("Success", "Success"), ("Failure", "Failure")], max_length=10 + ), + ), + ("message", models.CharField(blank=True, max_length=255)), + ], + options={ + "indexes": [ + models.Index(fields=["task", "-start_time"], name="background__task_6102f3_idx"), + models.Index(fields=["-start_time"], name="background__start_t_224633_idx"), + models.Index(fields=["status"], name="background__status_2d3bb2_idx"), + models.Index(fields=["duration"], name="background__duratio_c2bf22_idx"), + ], + }, + ), + ] diff --git a/backend/application/background_tasks/migrations/__init__.py b/backend/application/background_tasks/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/background_tasks/models.py b/backend/application/background_tasks/models.py new file mode 100644 index 000000000..fb6a232aa --- /dev/null +++ b/backend/application/background_tasks/models.py @@ -0,0 +1,19 @@ +from django.db.models import BigIntegerField, CharField, DateTimeField, Index, Model + +from application.background_tasks.types import Status + + +class Periodic_Task(Model): + task = CharField(max_length=255) + start_time = DateTimeField() + duration = BigIntegerField(null=True, help_text="Duration in milliseconds") + status = CharField(max_length=10, choices=Status.STATUS_CHOICES) + message = CharField(max_length=255, blank=True) + + class Meta: + indexes = [ + Index(fields=["task", "-start_time"]), + Index(fields=["-start_time"]), + Index(fields=["status"]), + Index(fields=["duration"]), + ] diff --git a/backend/application/background_tasks/periodic_tasks/__init__.py b/backend/application/background_tasks/periodic_tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/background_tasks/periodic_tasks/core_tasks.py b/backend/application/background_tasks/periodic_tasks/core_tasks.py new file mode 100644 index 000000000..d4988dd9a --- /dev/null +++ b/backend/application/background_tasks/periodic_tasks/core_tasks.py @@ -0,0 +1,35 @@ +from huey import crontab +from huey.contrib.djhuey import db_periodic_task + +from application.background_tasks.services.task_base import so_periodic_task +from application.commons import settings_static +from application.core.services.housekeeping import ( + delete_inactive_branches_and_set_flags, +) +from application.core.services.risk_acceptance_expiry_task import ( + expire_risk_acceptances, +) + + +@db_periodic_task( + crontab( + minute=settings_static.branch_housekeeping_crontab_minute, + hour=settings_static.branch_housekeeping_crontab_hour, + ) +) +@so_periodic_task("Branch housekeeping") +def task_branch_housekeeping() -> str: + message = delete_inactive_branches_and_set_flags() + return message + + +@db_periodic_task( + crontab( + minute=settings_static.risk_acceptance_expiry_crontab_minute, + hour=settings_static.risk_acceptance_expiry_crontab_hour, + ) +) +@so_periodic_task("Expire risk acceptances") +def task_expire_risk_acceptances() -> str: + message = expire_risk_acceptances() + return message diff --git a/backend/application/background_tasks/periodic_tasks/epss_tasks.py b/backend/application/background_tasks/periodic_tasks/epss_tasks.py new file mode 100644 index 000000000..c3c292522 --- /dev/null +++ b/backend/application/background_tasks/periodic_tasks/epss_tasks.py @@ -0,0 +1,22 @@ +from huey import crontab +from huey.contrib.djhuey import db_periodic_task + +from application.background_tasks.services.task_base import so_periodic_task +from application.commons import settings_static +from application.epss.services.cvss_bt import import_cvss_bt +from application.epss.services.epss import epss_apply_observations, import_epss + + +@db_periodic_task( + crontab( + minute=settings_static.background_epss_import_crontab_minute, + hour=settings_static.background_epss_import_crontab_hour, + ) +) +@so_periodic_task("Import EPSS and cvss-bt") +def task_import_epss() -> str: + message = import_epss() + "\n" + message += epss_apply_observations() + "\n" + message += import_cvss_bt() + + return message diff --git a/backend/application/background_tasks/periodic_tasks/import_observations_tasks.py b/backend/application/background_tasks/periodic_tasks/import_observations_tasks.py new file mode 100644 index 000000000..9e81a8f00 --- /dev/null +++ b/backend/application/background_tasks/periodic_tasks/import_observations_tasks.py @@ -0,0 +1,108 @@ +import logging + +from huey import crontab +from huey.contrib.djhuey import db_periodic_task + +from application.background_tasks.services.task_base import so_periodic_task +from application.commons import settings_static +from application.commons.models import Settings +from application.import_observations.models import Api_Configuration, Product +from application.import_observations.scanners.osv_scanner import scan_product +from application.import_observations.services.import_observations import ( + ApiImportParameters, + api_import_observations, +) +from application.notifications.services.tasks import handle_task_exception + +logger = logging.getLogger("secobserve.import_observations") + + +@db_periodic_task( + crontab( + minute=settings_static.api_import_crontab_minute, + hour=settings_static.api_import_crontab_hour, + ) +) +@so_periodic_task("Import observations from API configurations and OSV") +def task_api_import() -> str: + message = "" + + # 1. Import observations from API configurations + settings = Settings.load() + if not settings.feature_automatic_api_import: + logger.info("API import is disabled in settings") + message += "API import is disabled in settings." + else: + product_set = set() + api_imports_failed = 0 + + api_configurations = Api_Configuration.objects.filter(automatic_import_enabled=True) + for api_configuration in api_configurations: + product_set.add(api_configuration.product) + try: + service_name = ( + api_configuration.automatic_import_service.name + if api_configuration.automatic_import_service + else "" + ) + api_import_parameters = ApiImportParameters( + api_configuration=api_configuration, + branch=api_configuration.automatic_import_branch, + service_name=service_name, + docker_image_name_tag=api_configuration.automatic_import_docker_image_name_tag, + endpoint_url=api_configuration.automatic_import_endpoint_url, + kubernetes_cluster=api_configuration.automatic_import_kubernetes_cluster, + ) + ( + observations_new, + observations_updated, + observations_resolved, + ) = api_import_observations(api_import_parameters) + logger.info( + "API import - %s: %s new, %s updated, %s resolved", + api_configuration, + observations_new, + observations_updated, + observations_resolved, + ) + except Exception as e: + api_imports_failed += 1 + logger.warning("API import - %s: failed with exception", api_configuration) + handle_task_exception(e, product=api_configuration.product) + + message += f" Imported observations for {len(product_set)} products from API configurations." + if api_imports_failed > 0: + message += f" API import failed for {api_imports_failed} configurations." + + # 2. Scan products for OSV vulnerabilities + settings = Settings.load() + if not settings.feature_automatic_osv_scanning: + logger.info("OSV scanning is disabled in settings") + return message + "\nOSV scanning is disabled in settings." + + osv_imports_failed = 0 + products = Product.objects.filter(osv_enabled=True, automatic_osv_scanning_enabled=True) + for product in products: + try: + ( + observations_new, + observations_updated, + observations_resolved, + ) = scan_product(product) + logger.info( + "OSV scanning - %s: %s new, %s updated, %s resolved", + product, + observations_new, + observations_updated, + observations_resolved, + ) + except Exception as e: + osv_imports_failed += 1 + logger.warning("OSV scanning - %s: failed with exception", product) + handle_task_exception(e, product=product) + + message += f"\nImported observations for {len(products)} products from OSV scanning." + if osv_imports_failed > 0: + message += f" OSV scanning failed for {osv_imports_failed} products." + + return message diff --git a/backend/application/background_tasks/periodic_tasks/license_tasks.py b/backend/application/background_tasks/periodic_tasks/license_tasks.py new file mode 100644 index 000000000..7b2630ac1 --- /dev/null +++ b/backend/application/background_tasks/periodic_tasks/license_tasks.py @@ -0,0 +1,28 @@ +from huey import crontab +from huey.contrib.djhuey import db_periodic_task + +from application.background_tasks.services.task_base import so_periodic_task +from application.commons import settings_static +from application.commons.models import Settings +from application.licenses.services.license import import_licenses +from application.licenses.services.license_group import import_scancode_licensedb + + +@db_periodic_task( + crontab( + minute=settings_static.license_import_crontab_minute, + hour=settings_static.license_import_crontab_hour, + ) +) +@so_periodic_task("Import SPDX licenses") +def task_spdx_license_import() -> str: + settings = Settings.load() + if not settings.feature_license_management: + return "License management feature is disabled." + + message = import_licenses() + + import_scancode_licensedb() + message += "\nLicense Groups from ScanCode LicenseDB imported" + + return message diff --git a/backend/application/background_tasks/periodic_tasks/metrics_tasks.py b/backend/application/background_tasks/periodic_tasks/metrics_tasks.py new file mode 100644 index 000000000..be45c9177 --- /dev/null +++ b/backend/application/background_tasks/periodic_tasks/metrics_tasks.py @@ -0,0 +1,13 @@ +from huey import crontab +from huey.contrib.djhuey import db_periodic_task + +from application.background_tasks.services.task_base import so_periodic_task +from application.commons import settings_static +from application.metrics.services.metrics import calculate_product_metrics + + +@db_periodic_task(crontab(minute=f"*/{settings_static.background_product_metrics_interval_minutes}")) +@so_periodic_task("Calculate product metrics") +def task_calculate_product_metrics() -> str: + message = calculate_product_metrics() + return message diff --git a/backend/application/background_tasks/services/__init__.py b/backend/application/background_tasks/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/background_tasks/services/task_base.py b/backend/application/background_tasks/services/task_base.py new file mode 100644 index 000000000..9485efc8e --- /dev/null +++ b/backend/application/background_tasks/services/task_base.py @@ -0,0 +1,92 @@ +import functools +import inspect +import logging +import sys +import traceback +from datetime import timedelta +from typing import Any, Callable + +from django.utils import timezone +from huey.contrib.djhuey import lock_task + +from application.background_tasks.models import Periodic_Task +from application.background_tasks.types import Status +from application.commons.models import Settings +from application.commons.services.log_message import format_log_message +from application.notifications.services.send_notifications import ( + send_task_exception_notification, +) + +logger = logging.getLogger("secobserve.background_tasks") + + +def so_periodic_task(name: str) -> Callable: + def decorator(func: Callable) -> Callable: + @functools.wraps(func) + @lock_task(name) + def wrapper() -> None: + logger.info("--- %s - start ---", name) + + periodic_task = Periodic_Task( + task=name, + start_time=timezone.now(), + status=Status.STATUS_RUNNING, + ) + periodic_task.save() + + _delete_older_entries(name) + + try: + message = func() + + periodic_task.status = Status.STATUS_SUCCESS + periodic_task.duration = (timezone.now() - periodic_task.start_time) / timedelta(milliseconds=1) + periodic_task.message = str(message) if message else "" + periodic_task.save() + except Exception as e: + periodic_task.status = Status.STATUS_FAILURE + periodic_task.duration = (timezone.now() - periodic_task.start_time) / timedelta(milliseconds=1) + periodic_task.message = str(e) + periodic_task.save() + + _handle_periodic_task_exception(e) + return + + logger.info("--- %s - finished ---", name) + + return wrapper + + return decorator + + +def _handle_periodic_task_exception(e: Exception) -> None: + data: dict[str, Any] = {} + function = None + + if sys.exc_info() and len(sys.exc_info()) >= 2 and sys.exc_info()[2]: + frames = inspect.getinnerframes(sys.exc_info()[2]) # type: ignore[arg-type] + if frames and len(frames) >= 2: + function = frames[1].function + data["function"] = function + + logger.error( + format_log_message( + message="Error while executing periodic background task", + data=data, + exception=e, + username=None, + ) + ) + logger.error(traceback.format_exc()) + + send_task_exception_notification(function=function, arguments=None, user=None, exception=e, product=None) + + +def _delete_older_entries(name: str) -> None: + settings = Settings.load() + recent_task_ids = list( + Periodic_Task.objects.filter(task=name) + .order_by("-start_time") + .values_list("id", flat=True)[: settings.periodic_task_max_entries] + ) + Periodic_Task.objects.filter(task=name).exclude(id__in=recent_task_ids).delete() diff --git a/backend/application/background_tasks/tasks.py b/backend/application/background_tasks/tasks.py new file mode 100644 index 000000000..78edd506b --- /dev/null +++ b/backend/application/background_tasks/tasks.py @@ -0,0 +1,6 @@ +# Importing necessary modules for background tasks in Django, needed for auto-discovery of huey +import application.background_tasks.periodic_tasks.core_tasks # noqa: F401 pylint: disable=unused-import +import application.background_tasks.periodic_tasks.epss_tasks # noqa: F401 pylint: disable=unused-import +import application.background_tasks.periodic_tasks.import_observations_tasks # noqa: F401 pylint: disable=unused-import +import application.background_tasks.periodic_tasks.license_tasks # noqa: F401 pylint: disable=unused-import +import application.background_tasks.periodic_tasks.metrics_tasks # noqa: F401 pylint: disable=unused-import diff --git a/backend/application/background_tasks/types.py b/backend/application/background_tasks/types.py new file mode 100644 index 000000000..bae074903 --- /dev/null +++ b/backend/application/background_tasks/types.py @@ -0,0 +1,10 @@ +class Status: + STATUS_RUNNING = "Running" + STATUS_SUCCESS = "Success" + STATUS_FAILURE = "Failure" + + STATUS_CHOICES = [ + (STATUS_RUNNING, STATUS_RUNNING), + (STATUS_SUCCESS, STATUS_SUCCESS), + (STATUS_FAILURE, STATUS_FAILURE), + ] diff --git a/backend/application/commons/__init__.py b/backend/application/commons/__init__.py index 902ebbe2d..53a0b2676 100644 --- a/backend/application/commons/__init__.py +++ b/backend/application/commons/__init__.py @@ -4,7 +4,7 @@ class LazyConfig(LazyObject): - def _setup(self): + def _setup(self) -> None: from application.commons.models import ( # pylint: disable=import-outside-toplevel Settings, ) diff --git a/backend/application/commons/api/extended_ordering_filter.py b/backend/application/commons/api/extended_ordering_filter.py index 68cc1def9..033ab85a8 100644 --- a/backend/application/commons/api/extended_ordering_filter.py +++ b/backend/application/commons/api/extended_ordering_filter.py @@ -1,11 +1,13 @@ +from typing import Any + from django.core.validators import EMPTY_VALUES -from django.db.models import F +from django.db.models import F, QuerySet from django_filters import OrderingFilter # Copied from https://github.com/carltongibson/django-filter/issues/274#issuecomment-1862859556 class ExtendedOrderingFilter(OrderingFilter): - def filter(self, qs, value): + def filter(self, qs: QuerySet, value: Any) -> QuerySet: if value in EMPTY_VALUES: return qs diff --git a/backend/application/commons/api/filters.py b/backend/application/commons/api/filters.py deleted file mode 100644 index f50c9a8e7..000000000 --- a/backend/application/commons/api/filters.py +++ /dev/null @@ -1,35 +0,0 @@ -from django_filters import CharFilter, FilterSet, OrderingFilter - -from application.commons.models import Notification - - -class NotificationFilter(FilterSet): - name = CharFilter(field_name="name", lookup_expr="icontains") - message = CharFilter(field_name="message", lookup_expr="icontains") - function = CharFilter(field_name="function", lookup_expr="icontains") - - ordering = OrderingFilter( - fields=( - ("name", "name"), - ("created", "created"), - ("message", "message"), - ("type", "type"), - ("function", "function"), - ("product__name", "product_name"), - ("observation__title", "observation_title"), - ("user__full_name", "user_full_name"), - ), - ) - - class Meta: - model = Notification - fields = [ - "name", - "created", - "message", - "type", - "function", - "product", - "observation", - "user", - ] diff --git a/backend/application/commons/api/permissions.py b/backend/application/commons/api/permissions.py index 439832fbf..f1f3baaa9 100644 --- a/backend/application/commons/api/permissions.py +++ b/backend/application/commons/api/permissions.py @@ -1,29 +1,13 @@ -from rest_framework.permissions import BasePermission - -from application.access_control.api.permissions_base import check_object_permission -from application.access_control.services.roles_permissions import Permissions - +from typing import Any -class UserHasNotificationPermission(BasePermission): - def has_object_permission(self, request, view, obj): - if obj.product: - return check_object_permission( - request=request, - object_to_check=obj.product, - get_permission=Permissions.Product_View, - put_permission=None, - delete_permission=Permissions.Product_Delete, - ) - - if request.user and request.user.is_superuser: - return True - - return False +from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView class UserHasSuperuserPermission(BasePermission): - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return request.user.is_superuser - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: return request.user.is_superuser diff --git a/backend/application/commons/api/serializers.py b/backend/application/commons/api/serializers.py index 17104aef7..f939919ae 100644 --- a/backend/application/commons/api/serializers.py +++ b/backend/application/commons/api/serializers.py @@ -7,8 +7,7 @@ SerializerMethodField, ) -from application.commons.models import Notification, Settings -from application.commons.services.global_request import get_current_user +from application.commons.models import Settings class VersionSerializer(Serializer): @@ -18,6 +17,7 @@ class VersionSerializer(Serializer): class StatusSettingsSerializer(Serializer): features = ListField(child=CharField(), min_length=0, max_length=200, required=True) risk_acceptance_expiry_days = IntegerField() + vex_justification_style = CharField() class SettingsSerializer(ModelSerializer): @@ -27,55 +27,7 @@ class Meta: model = Settings fields = "__all__" - def get_id(self, obj: Settings): # pylint: disable=unused-argument + def get_id(self, obj: Settings) -> int: # pylint: disable=unused-argument # obj is needed for the signature but we don't need it # The id is hardcoded to 1 because there is only one instance of the Settings model return 1 - - -class NotificationSerializer(ModelSerializer): - message = SerializerMethodField() - product_name = SerializerMethodField() - observation_title = SerializerMethodField() - user_full_name = SerializerMethodField() - - class Meta: - model = Notification - fields = "__all__" - - def get_message(self, obj: Notification): - if not obj.message: - return obj.message - - user = get_current_user() - if user and user.is_superuser: - return obj.message - - return "..." - - def get_product_name(self, obj: Notification): - if obj.product: - return obj.product.name - - if obj.observation: - return obj.observation.product.name - - return None - - def get_observation_title(self, obj: Notification): - if obj.observation: - return obj.observation.title - - return None - - def get_user_full_name(self, obj: Notification): - if obj.user: - return obj.user.full_name - - return None - - -class NotificationBulkSerializer(Serializer): - notifications = ListField( - child=IntegerField(min_value=1), min_length=0, max_length=100, required=True - ) diff --git a/backend/application/commons/api/views.py b/backend/application/commons/api/views.py index b53b05c68..2a4cf1d66 100644 --- a/backend/application/commons/api/views.py +++ b/backend/application/commons/api/views.py @@ -1,37 +1,25 @@ -from django_filters.rest_framework import DjangoFilterBackend -from drf_spectacular.utils import extend_schema +import environ from rest_framework.decorators import action from rest_framework.exceptions import ValidationError -from rest_framework.filters import SearchFilter -from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.status import HTTP_204_NO_CONTENT from rest_framework.views import APIView -from rest_framework.viewsets import GenericViewSet -from application.commons.api.filters import NotificationFilter -from application.commons.api.permissions import ( - UserHasNotificationPermission, - UserHasSuperuserPermission, -) +from application.commons.api.permissions import UserHasSuperuserPermission from application.commons.api.serializers import ( - NotificationBulkSerializer, - NotificationSerializer, SettingsSerializer, StatusSettingsSerializer, VersionSerializer, ) -from application.commons.models import Notification, Settings -from application.commons.queries.notification import get_notifications -from application.commons.services.notification import bulk_delete +from application.commons.models import Settings class VersionView(APIView): serializer_class = VersionSerializer @action(detail=True, methods=["get"], url_name="version") - def get(self, request): + def get(self, request: Request) -> Response: content = { "version": "version_unknown", } @@ -44,7 +32,7 @@ class HealthView(APIView): serializer_class = None @action(detail=True, methods=["get"], url_name="health") - def get(self, request): + def get(self, request: Request) -> Response: response = Response() response["Cache-Control"] = "no-cache, no-store, must-revalidate" @@ -56,11 +44,10 @@ class StatusSettingsView(APIView): permission_classes = [] @action(detail=True, methods=["get"], url_name="settings") - def get(self, request): + def get(self, request: Request) -> Response: features = [] settings = Settings.load() - if settings.feature_disable_user_login: features.append("feature_disable_user_login") @@ -71,15 +58,24 @@ def get(self, request): features.append("feature_general_rules_need_approval") if settings.feature_license_management: features.append("feature_license_management") - - content = { + if settings.feature_automatic_api_import: + features.append("feature_automatic_api_import") + if settings.feature_automatic_osv_scanning: + features.append("feature_automatic_osv_scanning") + if settings.feature_exploit_information: + features.append("feature_exploit_information") + + env = environ.Env() + if env("EMAIL_HOST", default="") or env("EMAIL_PORT", default=""): + features.append("feature_email") + + content: dict[str, (int | list[str]) | str] = { "features": features, } if request.user.is_authenticated: - content["risk_acceptance_expiry_days"] = ( - settings.risk_acceptance_expiry_days - ) + content["risk_acceptance_expiry_days"] = settings.risk_acceptance_expiry_days + content["vex_justification_style"] = settings.vex_justification_style return Response(content) @@ -89,50 +85,24 @@ class SettingsView(APIView): permission_classes = (IsAuthenticated, UserHasSuperuserPermission) @action(detail=True, methods=["get"], url_name="settings") - def get(self, request, pk=None): # pylint: disable=unused-argument + def get(self, request: Request, pk: int = None) -> Response: # pylint: disable=unused-argument # pk is needed for the API signature but we don't need it settings = Settings.load() response_serializer = SettingsSerializer(settings) return Response(response_serializer.data) @action(detail=True, methods=["patch"], url_name="settings") - def patch(self, request, pk=None): # pylint: disable=unused-argument + def patch(self, request: Request, pk: int = None) -> Response: # pylint: disable=unused-argument # pk is needed for the API signature but we don't need it request_serializer = SettingsSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) - settings = request_serializer.create(request_serializer.validated_data) - settings.save() + if request_serializer.validated_data.get("feature_automatic_osv_scanning"): + request_serializer.validated_data["feature_license_management"] = True + + settings = Settings.load() + request_serializer.update(settings, request_serializer.validated_data) response_serializer = SettingsSerializer(settings) return Response(response_serializer.data) - - -class NotificationViewSet( - GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin -): - serializer_class = NotificationSerializer - filterset_class = NotificationFilter - permission_classes = (IsAuthenticated, UserHasNotificationPermission) - queryset = Notification.objects.all() - filter_backends = [SearchFilter, DjangoFilterBackend] - search_fields = ["name"] - - def get_queryset(self): - return get_notifications() - - @extend_schema( - methods=["POST"], - request=NotificationBulkSerializer, - responses={HTTP_204_NO_CONTENT: None}, - ) - @action(detail=False, methods=["post"]) - def bulk_delete(self, request): - request_serializer = NotificationBulkSerializer(data=request.data) - if not request_serializer.is_valid(): - raise ValidationError(request_serializer.errors) - - bulk_delete(request_serializer.validated_data.get("notifications")) - - return Response(status=HTTP_204_NO_CONTENT) diff --git a/backend/application/commons/apps.py b/backend/application/commons/apps.py index e2114bc4b..4811a36ff 100644 --- a/backend/application/commons/apps.py +++ b/backend/application/commons/apps.py @@ -8,16 +8,14 @@ class UtilsConfig(AppConfig): name = "application.commons" verbose_name = "Commons" - def ready(self): + def ready(self) -> None: try: import application.commons.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import except ImportError: pass if os.path.isfile("/sys/fs/cgroup/memory/memory.limit_in_bytes"): - with open( - "/sys/fs/cgroup/memory/memory.limit_in_bytes", encoding="utf-8" - ) as limit: + with open("/sys/fs/cgroup/memory/memory.limit_in_bytes", encoding="utf-8") as limit: mem = int(limit.read()) resource.setrlimit(resource.RLIMIT_AS, (mem, mem)) diff --git a/backend/application/commons/migrations/0002_settings.py b/backend/application/commons/migrations/0002_settings.py index 4728dc911..5f74b2719 100644 --- a/backend/application/commons/migrations/0002_settings.py +++ b/backend/application/commons/migrations/0002_settings.py @@ -25,9 +25,7 @@ class Migration(migrations.Migration): ), ( "security_gate_active", - models.BooleanField( - default=True, help_text="Is the security gate activated?" - ), + models.BooleanField(default=True, help_text="Is the security gate activated?"), ), ( "security_gate_threshold_critical", @@ -225,9 +223,7 @@ class Migration(migrations.Migration): ), ( "branch_housekeeping_active", - models.BooleanField( - default=True, help_text="Delete inactive branches" - ), + models.BooleanField(default=True, help_text="Delete inactive branches"), ), ( "branch_housekeeping_keep_inactive_days", diff --git a/backend/application/commons/migrations/0005_settings_feature_general_rules_need_approval.py b/backend/application/commons/migrations/0005_settings_feature_general_rules_need_approval.py index c2906c8ee..dd9f961a3 100644 --- a/backend/application/commons/migrations/0005_settings_feature_general_rules_need_approval.py +++ b/backend/application/commons/migrations/0005_settings_feature_general_rules_need_approval.py @@ -13,8 +13,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name="settings", name="feature_general_rules_need_approval", - field=models.BooleanField( - default=False, help_text="General rules need approval" - ), + field=models.BooleanField(default=False, help_text="General rules need approval"), ), ] diff --git a/backend/application/commons/migrations/0008_remove_settings_background_epss_import_crontab_hours_and_more.py b/backend/application/commons/migrations/0008_remove_settings_background_epss_import_crontab_hours_and_more.py index f6a6f3f01..d93828718 100644 --- a/backend/application/commons/migrations/0008_remove_settings_background_epss_import_crontab_hours_and_more.py +++ b/backend/application/commons/migrations/0008_remove_settings_background_epss_import_crontab_hours_and_more.py @@ -44,9 +44,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="settings", name="feature_automatic_api_import", - field=models.BooleanField( - default=True, help_text="Enable automatic API imports" - ), + field=models.BooleanField(default=True, help_text="Enable automatic API imports"), ), migrations.AddField( model_name="settings", diff --git a/backend/application/commons/migrations/0010_settings_feature_license_management_and_more.py b/backend/application/commons/migrations/0010_settings_feature_license_management_and_more.py index 8d365f14a..5a8542c79 100644 --- a/backend/application/commons/migrations/0010_settings_feature_license_management_and_more.py +++ b/backend/application/commons/migrations/0010_settings_feature_license_management_and_more.py @@ -14,9 +14,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="settings", name="feature_license_management", - field=models.BooleanField( - default=True, help_text="Enable license management" - ), + field=models.BooleanField(default=True, help_text="Enable license management"), ), migrations.AddField( model_name="settings", diff --git a/backend/application/commons/migrations/0013_notification_viewed.py b/backend/application/commons/migrations/0013_notification_viewed.py new file mode 100644 index 000000000..1ba801a8a --- /dev/null +++ b/backend/application/commons/migrations/0013_notification_viewed.py @@ -0,0 +1,44 @@ +# Generated by Django 5.1.5 on 2025-02-01 21:10 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0012_alter_settings_security_gate_threshold_unknown"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name="Notification_Viewed", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "notification", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="commons.notification", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + ] diff --git a/backend/application/commons/migrations/0014_settings_feature_automatic_osv_scanning.py b/backend/application/commons/migrations/0014_settings_feature_automatic_osv_scanning.py new file mode 100644 index 000000000..644274f40 --- /dev/null +++ b/backend/application/commons/migrations/0014_settings_feature_automatic_osv_scanning.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-02-02 15:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0013_notification_viewed"), + ] + + operations = [ + migrations.AddField( + model_name="settings", + name="feature_automatic_osv_scanning", + field=models.BooleanField(default=True, help_text="Enable automatic OSV scanning"), + ), + ] diff --git a/backend/application/commons/migrations/0015_settings_exploit_information_max_age_years_and_more.py b/backend/application/commons/migrations/0015_settings_exploit_information_max_age_years_and_more.py new file mode 100644 index 000000000..c9ef8755a --- /dev/null +++ b/backend/application/commons/migrations/0015_settings_exploit_information_max_age_years_and_more.py @@ -0,0 +1,31 @@ +# Generated by Django 5.1.6 on 2025-03-06 04:40 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0014_settings_feature_automatic_osv_scanning"), + ] + + operations = [ + migrations.AddField( + model_name="settings", + name="exploit_information_max_age_years", + field=models.IntegerField( + default=10, + help_text="Maximum age of CVEs for enrichment in years", + validators=[ + django.core.validators.MinValueValidator(0), + django.core.validators.MaxValueValidator(999999), + ], + ), + ), + migrations.AddField( + model_name="settings", + name="feature_exploit_information", + field=models.BooleanField(default=True, help_text="Enable CVSS enrichment"), + ), + ] diff --git a/backend/application/commons/migrations/0016_remove_notification_viewed_notification_and_more.py b/backend/application/commons/migrations/0016_remove_notification_viewed_notification_and_more.py new file mode 100644 index 000000000..9a5b0d3f3 --- /dev/null +++ b/backend/application/commons/migrations/0016_remove_notification_viewed_notification_and_more.py @@ -0,0 +1,47 @@ +# Generated by Django 5.1.8 on 2025-04-15 06:13 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0015_settings_exploit_information_max_age_years_and_more"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.RemoveField( + model_name="notification_viewed", + name="notification", + ), + ], + database_operations=[], + ), + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.RemoveField( + model_name="notification_viewed", + name="user", + ), + ], + database_operations=[], + ), + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.DeleteModel( + name="Notification", + ), + ], + database_operations=[], + ), + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.DeleteModel( + name="Notification_Viewed", + ), + ], + database_operations=[], + ), + ] diff --git a/backend/application/commons/migrations/0017_settings_periodic_task_max_entries.py b/backend/application/commons/migrations/0017_settings_periodic_task_max_entries.py new file mode 100644 index 000000000..448ab49f8 --- /dev/null +++ b/backend/application/commons/migrations/0017_settings_periodic_task_max_entries.py @@ -0,0 +1,26 @@ +# Generated by Django 5.2.4 on 2025-07-21 08:20 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0016_remove_notification_viewed_notification_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="settings", + name="periodic_task_max_entries", + field=models.IntegerField( + default=10, + help_text="Maximum number of entries to keep per periodic task", + validators=[ + django.core.validators.MinValueValidator(1), + django.core.validators.MaxValueValidator(999999), + ], + ), + ), + ] diff --git a/backend/application/commons/migrations/0018_settings_vex_justification_style_and_more.py b/backend/application/commons/migrations/0018_settings_vex_justification_style_and_more.py new file mode 100644 index 000000000..114a3050e --- /dev/null +++ b/backend/application/commons/migrations/0018_settings_vex_justification_style_and_more.py @@ -0,0 +1,27 @@ +# Generated by Django 5.2.5 on 2025-08-26 16:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0017_settings_periodic_task_max_entries"), + ] + + operations = [ + migrations.AddField( + model_name="settings", + name="vex_justification_style", + field=models.CharField( + choices=[("CSAF/OpenVEX", "CSAF/OpenVEX"), ("CycloneDX", "CycloneDX")], + default="CSAF/OpenVEX", + max_length=16, + ), + ), + migrations.AlterField( + model_name="settings", + name="feature_vex", + field=models.BooleanField(default=False, help_text="Export and import VEX documents in various formats"), + ), + ] diff --git a/backend/application/commons/migrations/0019_settings_oidc_clock_skew.py b/backend/application/commons/migrations/0019_settings_oidc_clock_skew.py new file mode 100644 index 000000000..ef0d58711 --- /dev/null +++ b/backend/application/commons/migrations/0019_settings_oidc_clock_skew.py @@ -0,0 +1,26 @@ +# Generated by Django 5.2.9 on 2025-12-27 17:28 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("commons", "0018_settings_vex_justification_style_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="settings", + name="oidc_clock_skew", + field=models.IntegerField( + default=0, + help_text="Time margin in seconds for checks of issued at, not before and expiration of OIDC tokens", + validators=[ + django.core.validators.MinValueValidator(0), + django.core.validators.MaxValueValidator(999999), + ], + ), + ), + ] diff --git a/backend/application/commons/models.py b/backend/application/commons/models.py index 055a88471..591ca14e0 100644 --- a/backend/application/commons/models.py +++ b/backend/application/commons/models.py @@ -1,45 +1,20 @@ +from typing import Any + +from dirtyfields import DirtyFieldsMixin from django.core.validators import MaxValueValidator, MinValueValidator from django.db.models import ( - CASCADE, BooleanField, CharField, - DateTimeField, - ForeignKey, IntegerField, Model, - TextField, ) -from application.access_control.models import User -from application.core.models import Observation, Product - - -class Notification(Model): - TYPE_EXCEPTION = "Exception" - TYPE_SECURITY_GATE = "Security gate" - TYPE_TASK = "Task" - - TYPE_CHOICES = [ - (TYPE_EXCEPTION, TYPE_EXCEPTION), - (TYPE_SECURITY_GATE, TYPE_SECURITY_GATE), - (TYPE_TASK, TYPE_TASK), - ] - - name = CharField(max_length=255) - created = DateTimeField(auto_now_add=True) - message = TextField(max_length=4096) - user = ForeignKey(User, on_delete=CASCADE, null=True) - product = ForeignKey(Product, on_delete=CASCADE, null=True) - observation = ForeignKey(Observation, on_delete=CASCADE, null=True) - type = CharField(max_length=20, choices=TYPE_CHOICES) - function = CharField(max_length=255, blank=True) - arguments = TextField(max_length=4096, blank=True) +from application.commons.services.request_cache import cache_for_request +from application.commons.types import VEX_Justification_Styles -class Settings(Model): - security_gate_active = BooleanField( - default=True, help_text="Is the security gate activated?" - ) +class Settings(Model, DirtyFieldsMixin): + security_gate_active = BooleanField(default=True, help_text="Is the security gate activated?") security_gate_threshold_critical = IntegerField( default=0, validators=[MinValueValidator(0), MaxValueValidator(999999)], @@ -145,9 +120,7 @@ class Settings(Model): validators=[MinValueValidator(0), MaxValueValidator(23)], help_text="Hour crontab expression for branch housekeeping (UTC)", ) - branch_housekeeping_active = BooleanField( - default=True, help_text="Delete inactive branches" - ) + branch_housekeeping_active = BooleanField(default=True, help_text="Delete inactive branches") branch_housekeeping_keep_inactive_days = IntegerField( default=30, validators=[MinValueValidator(0), MaxValueValidator(999999)], @@ -159,16 +132,16 @@ class Settings(Model): help_text="Regular expression which branches to exempt from deletion", ) - feature_vex = BooleanField( - default=False, help_text="Generate VEX documents in OpenVEX and CSAF format" - ) - feature_disable_user_login = BooleanField( - default=False, help_text="Disable user login" - ) - feature_general_rules_need_approval = BooleanField( - default=False, help_text="General rules need approval" + feature_vex = BooleanField(default=False, help_text="Export and import VEX documents in various formats") + vex_justification_style = CharField( + max_length=16, + choices=VEX_Justification_Styles.STYLE_CHOICES, + default=VEX_Justification_Styles.STYLE_CSAF_OPENVEX, ) + feature_disable_user_login = BooleanField(default=False, help_text="Disable user login") + feature_general_rules_need_approval = BooleanField(default=False, help_text="General rules need approval") + risk_acceptance_expiry_days = IntegerField( default=30, validators=[MinValueValidator(0), MaxValueValidator(999999)], @@ -185,9 +158,7 @@ class Settings(Model): help_text="Hour crontab expression for checking risk acceptance expiry (UTC)", ) - feature_automatic_api_import = BooleanField( - default=True, help_text="Enable automatic API imports" - ) + feature_automatic_api_import = BooleanField(default=True, help_text="Enable automatic API imports") api_import_crontab_minute = IntegerField( default=0, validators=[MinValueValidator(0), MaxValueValidator(59)], @@ -215,9 +186,7 @@ class Settings(Model): default=True, help_text="Validate that the password is not entirely numeric." ) - feature_license_management = BooleanField( - default=True, help_text="Enable license management" - ) + feature_license_management = BooleanField(default=True, help_text="Enable license management") license_import_crontab_minute = IntegerField( default=30, validators=[MinValueValidator(0), MaxValueValidator(59)], @@ -228,8 +197,26 @@ class Settings(Model): validators=[MinValueValidator(0), MaxValueValidator(23)], help_text="Hour crontab expression for importing licenses (UTC)", ) + feature_automatic_osv_scanning = BooleanField(default=True, help_text="Enable automatic OSV scanning") + feature_exploit_information = BooleanField(default=True, help_text="Enable CVSS enrichment") + exploit_information_max_age_years = IntegerField( + default=10, + validators=[MinValueValidator(0), MaxValueValidator(999999)], + help_text="Maximum age of CVEs for enrichment in years", + ) + periodic_task_max_entries = IntegerField( + default=10, + validators=[MinValueValidator(1), MaxValueValidator(999999)], + help_text="Maximum number of entries to keep per periodic task", + ) + + oidc_clock_skew = IntegerField( + default=0, + validators=[MinValueValidator(0), MaxValueValidator(999999)], + help_text="Time margin in seconds for checks of issued at, not before and expiration of OIDC tokens", + ) - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: """ Save object to the database. Removes all other entries if there are any. @@ -238,7 +225,8 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) @classmethod - def load(cls): + @cache_for_request + def load(cls) -> "Settings": """ Load object from the database. Failing that, create a new empty (default) instance of the object and return it (without saving it diff --git a/backend/application/commons/queries/notification.py b/backend/application/commons/queries/notification.py deleted file mode 100644 index c0d2285b7..000000000 --- a/backend/application/commons/queries/notification.py +++ /dev/null @@ -1,58 +0,0 @@ -from django.db.models import Exists, OuterRef, Q -from django.db.models.query import QuerySet - -from application.commons.models import Notification -from application.commons.services.global_request import get_current_user -from application.core.models import Product_Authorization_Group_Member, Product_Member - - -def get_notifications() -> QuerySet[Notification]: - user = get_current_user() - - if user is None: - return Notification.objects.none() - - notifications = Notification.objects.all() - - if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) - - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) - ) - - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) - ) - - notifications = notifications.annotate( - product__member=Exists(product_members), - product__product_group__member=Exists(product_group_members), - authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), - ) - - notifications = notifications.filter( - ( - Q(product__member=True) - | Q(product__product_group__member=True) - | Q(authorization_group_member=True) - | Q(product_group_authorization_group_member=True) - ) - & (Q(type=Notification.TYPE_SECURITY_GATE) | Q(type=Notification.TYPE_TASK)) - ) - - return notifications diff --git a/backend/application/commons/services/export.py b/backend/application/commons/services/export.py index 0bd067c86..80fefc908 100644 --- a/backend/application/commons/services/export.py +++ b/backend/application/commons/services/export.py @@ -1,6 +1,6 @@ import json from datetime import datetime -from typing import Any +from typing import Any, Optional import jsonpickle from defusedcsv import csv @@ -10,9 +10,7 @@ from openpyxl.styles import Font -def export_excel( - objects: QuerySet, title: str, excludes: list[str], foreign_keys: list[str] -) -> Workbook: +def export_excel(objects: QuerySet, title: str, excludes: list[str], foreign_keys: list[str]) -> Workbook: workbook = Workbook() workbook.iso_dates = True worksheet = workbook.active @@ -26,11 +24,7 @@ def export_excel( if row_num == 1: col_num = 1 for key in dir(current_object): - if ( - key not in excludes - and not callable(getattr(current_object, key)) - and not key.startswith("_") - ): + if key not in excludes and not callable(getattr(current_object, key)) and not key.startswith("_"): value = key.replace("_", " ").capitalize() cell = worksheet.cell(row=row_num, column=col_num, value=value) cell.font = font_bold @@ -40,11 +34,7 @@ def export_excel( if row_num > 1: col_num = 1 for key in dir(current_object): - if ( - key not in excludes - and not callable(getattr(current_object, key)) - and not key.startswith("_") - ): + if key not in excludes and not callable(getattr(current_object, key)) and not key.startswith("_"): value = current_object.__dict__.get(key) if key in foreign_keys and getattr(current_object, key): value = str(getattr(current_object, key)) @@ -73,11 +63,7 @@ def export_csv( if first_row: fields.clear() for key in dir(current_object): - if ( - key not in excludes - and not callable(getattr(current_object, key)) - and not key.startswith("_") - ): + if key not in excludes and not callable(getattr(current_object, key)) and not key.startswith("_"): value = key.replace("_", " ").capitalize() fields.append(value) @@ -87,11 +73,7 @@ def export_csv( if not first_row: fields.clear() for key in dir(current_object): - if ( - key not in excludes - and not callable(getattr(current_object, key)) - and not key.startswith("_") - ): + if key not in excludes and not callable(getattr(current_object, key)) and not key.startswith("_"): value = current_object.__dict__.get(key) if key in foreign_keys and getattr(current_object, key): value = str(getattr(current_object, key)) @@ -112,19 +94,15 @@ def object_to_json(object_to_encode: Any) -> str: return json.dumps(json_dict, indent=4, sort_keys=True, ensure_ascii=False) -def _remove_empty_elements(d: dict) -> dict: +def _remove_empty_elements(d: dict | list) -> dict | list: """recursively remove empty lists, empty dicts, or None elements from a dictionary""" - def empty(x): + def empty(x: Optional[(dict | list)]) -> bool: return x is None or x == {} or x == [] - if not isinstance(d, (dict, list)): + if not isinstance(d, (dict | list)): return d if isinstance(d, list): return [v for v in (_remove_empty_elements(v) for v in d) if not empty(v)] - return { - k: v - for k, v in ((k, _remove_empty_elements(v)) for k, v in d.items()) - if not empty(v) - } + return {k: v for k, v in ((k, _remove_empty_elements(v)) for k, v in d.items()) if not empty(v)} diff --git a/backend/application/commons/services/functions.py b/backend/application/commons/services/functions.py index c429d5b89..0f9c147a7 100644 --- a/backend/application/commons/services/functions.py +++ b/backend/application/commons/services/functions.py @@ -1,14 +1,16 @@ +from typing import Any + from django.apps import apps from django.db.models.fields import CharField, TextField from application.commons.models import Settings -def get_classname(obj): +def get_classname(obj: Any) -> str: cls = type(obj) module = cls.__module__ name = cls.__qualname__ - if module is not None and module != "__builtin__": + if module != "__builtin__": name = module + "." + name return name @@ -21,7 +23,7 @@ def get_base_url_frontend() -> str: return base_url_frontend -def clip_fields(application: str, model: str, my_object) -> None: +def clip_fields(application: str, model: str, my_object: Any) -> None: Model = apps.get_model(application, model) for field in Model._meta.get_fields(): if isinstance(field, (CharField, TextField)): @@ -39,3 +41,8 @@ def clip_fields(application: str, model: str, my_object) -> None: field.name, value[: max_length - 9] + "\n```\n\n...", ) + + +def get_comma_separated_as_list(comma_separated_string: str) -> list[str]: + return_list = comma_separated_string.split(",") if comma_separated_string else [] + return [x.strip() for x in return_list] diff --git a/backend/application/commons/services/global_request.py b/backend/application/commons/services/global_request.py index 2d15b2fc5..4bbc48bac 100644 --- a/backend/application/commons/services/global_request.py +++ b/backend/application/commons/services/global_request.py @@ -1,32 +1,22 @@ from threading import current_thread -from typing import Optional +from typing import Any, Optional -from django.contrib.auth.models import AnonymousUser -from django.http.request import HttpRequest +from rest_framework.request import Request +from rest_framework.response import Response -from application.access_control.models import User +_requests: dict[str, Request] = {} -_requests: dict[str, HttpRequest] = {} - -def get_current_request() -> Optional[HttpRequest]: +def get_current_request() -> Optional[Request]: return _requests.get(current_thread().name) -def get_current_user() -> Optional[User]: - request = get_current_request() - if request and request.user and not isinstance(request.user, AnonymousUser): - return request.user - - return None - - class GlobalRequestMiddleware: - def __init__(self, get_response): + def __init__(self, get_response: Any) -> None: self.get_response = get_response # One-time configuration and initialization. - def __call__(self, request): + def __call__(self, request: Request) -> Response: # Code to be executed for each request before # the view (and later middleware) are called. _requests[current_thread().name] = request diff --git a/backend/application/commons/services/log_message.py b/backend/application/commons/services/log_message.py index 308f02d12..252d4e1a9 100644 --- a/backend/application/commons/services/log_message.py +++ b/backend/application/commons/services/log_message.py @@ -1,19 +1,17 @@ -from django.contrib.auth.models import AnonymousUser +from typing import Optional + +from rest_framework.request import Request from rest_framework.response import Response -from application.access_control.models import User from application.commons.services.functions import get_classname -from application.commons.services.global_request import ( - get_current_request, - get_current_user, -) +from application.commons.services.global_request import get_current_request def format_log_message( # pylint: disable=too-many-branches # There are quite a lot of branches, but at least they are not nested too much message: str = None, data: dict = None, - user: User = None, + username: Optional[str] = None, response: Response = None, exception: Exception = None, ) -> str: @@ -29,15 +27,10 @@ def format_log_message( # pylint: disable=too-many-branches for key in data.keys(): message_dict[f"data_{str(key)}"] = str(data[key]) - current_user = get_current_user() - current_request = get_current_request() - - if user: - message_dict["user"] = user.username - elif current_user: - if not isinstance(current_user, AnonymousUser): - message_dict["user"] = current_user.username + if username: + message_dict["user"] = username + current_request = get_current_request() if current_request: if current_request.method: message_dict["request_method"] = current_request.method @@ -55,7 +48,7 @@ def format_log_message( # pylint: disable=too-many-branches return str(message_dict) -def __get_client_ip(request): +def __get_client_ip(request: Request) -> str: x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR") if x_forwarded_for: ip = x_forwarded_for.split(",")[0] diff --git a/backend/application/commons/services/notification.py b/backend/application/commons/services/notification.py deleted file mode 100644 index 39d164e5f..000000000 --- a/backend/application/commons/services/notification.py +++ /dev/null @@ -1,34 +0,0 @@ -from django.db.models.query import QuerySet -from rest_framework.exceptions import ValidationError - -from application.access_control.services.authorization import user_has_permission -from application.access_control.services.roles_permissions import Permissions -from application.commons.models import Notification -from application.commons.queries.notification import get_notifications -from application.commons.services.global_request import get_current_user - - -def bulk_delete(notification_ids: list[int]) -> None: - notifications = _check_notifications(notification_ids) - notifications.delete() - - -def _check_notifications(notification_ids: list[int]) -> QuerySet[Notification]: - notifications = get_notifications().filter(id__in=notification_ids) - if len(notifications) != len(notification_ids): - raise ValidationError("Some notifications do not exist") - - user = get_current_user() - if not user: - raise ValidationError("No user in backend request") - - if not user.is_superuser: - for notification in notifications: - if not notification.product or not user_has_permission( - notification.product, Permissions.Product_Delete - ): - raise ValidationError( - "User does not have permission to delete some notifications" - ) - - return notifications diff --git a/backend/application/commons/services/request_cache.py b/backend/application/commons/services/request_cache.py new file mode 100644 index 000000000..e8219da75 --- /dev/null +++ b/backend/application/commons/services/request_cache.py @@ -0,0 +1,107 @@ +from collections import OrderedDict +from threading import Lock +from typing import Any, Callable, Optional, TypeVar + +from django.core.cache.backends.base import BaseCache +from django.core.cache.backends.locmem import LocMemCache +from django.utils.deprecation import MiddlewareMixin +from rest_framework.request import Request + +from application.commons.services.global_request import get_current_request + +T = TypeVar("T") + +# Attribution 1: This code has been taken from https://github.com/anexia-it/django-request-cache, which has +# been published under the MIT License. Since this project hasn't been updated for several years, +# the code has been copied to SecObserve, to be able to fix issues ourselves. + +# Attribution 2: RequestCache and RequestCacheMiddleware are from a source code snippet on StackOverflow +# https://stackoverflow.com/questions/3151469/per-request-cache-in-django/37015573#37015573 +# created by coredumperror https://stackoverflow.com/users/464318/coredumperror +# Original Question was posted by https://stackoverflow.com/users/7679/chase-seibert +# at https://stackoverflow.com/questions/3151469/per-request-cache-in-django +# copied on 2017-Dec-20 + + +class RequestCache(LocMemCache): + """ + RequestCache is a customized LocMemCache which stores its data cache as an instance attribute, rather than + a global. It's designed to live only as long as the request object that RequestCacheMiddleware attaches it to. + """ + + def __init__(self) -> None: # pylint: disable=super-init-not-called) + # We explicitly do not call super() here, because while we want BaseCache.__init__() to run, we *don't* + # want LocMemCache.__init__() to run, because that would store our caches in its globals. + BaseCache.__init__(self, params={}) # pylint: disable=non-parent-init-called + + self._cache: dict[Any, Any] = OrderedDict() + self._expire_info: dict[Any, Any] = {} + self._lock = Lock() + + +class RequestCacheMiddleware(MiddlewareMixin): + """ + For every request, a fresh cache instance is stored in ``request.cache``. + The cache instance lives only as long as request does. + """ + + def process_request(self, request: Request) -> None: + setattr(request, "cache", RequestCache()) + # request.cache = RequestCache() + + +def cache_for_request(fn: Callable[..., T]) -> Callable[..., T]: + """ + Decorator that allows to cache a function call with parameters and its result only for the current request + The result is stored in the memory of the current process + As soon as the request is destroyed, the cache is destroyed + :param fn: + :return: + """ + + def wrapper(*args: Any, **kwargs: Any) -> Any: + cache = _get_request_cache() + + if not cache: + # no cache found -> directly execute function without caching + return fn(*args, **kwargs) + + # cache found -> check if a result is already available for this function call + key = _cache_calculate_key(fn.__name__, *args, **kwargs) + + try: + result = getattr(cache, key) + except AttributeError: + # no result available -> execute function + result = fn(*args, **kwargs) + setattr(cache, key, result) + + return result + + return wrapper + + +def _get_request_cache() -> Optional[RequestCache]: + """ + Return the current requests cache + :return: + """ + return getattr(get_current_request(), "cache", None) + + +cache_args_kwargs_marker = object() # marker for separating args from kwargs (needs to be global) + + +def _cache_calculate_key(*args: Any, **kwargs: Any) -> str: + """ + Calculate the cache key of a function call with args and kwargs + Taken from lru_cache + :param args: + :param kwargs: + :return: the calculated key for the function call + :rtype: basestring + """ + # combine args with kwargs, separated by the cache_args_kwargs_marker + key = (*args, cache_args_kwargs_marker, *tuple(sorted(kwargs.items()))) + # return as a string + return str(key) diff --git a/backend/application/commons/services/security_headers.py b/backend/application/commons/services/security_headers.py index 610c347c6..ca9bd1b0d 100644 --- a/backend/application/commons/services/security_headers.py +++ b/backend/application/commons/services/security_headers.py @@ -1,11 +1,16 @@ +from typing import Any + +from rest_framework.request import Request +from rest_framework.response import Response + # see https://adamj.eu/tech/2021/05/01/how-to-set-coep-coop-corp-security-headers-in-django/ class SecurityHeadersMiddleware: - def __init__(self, get_response): + def __init__(self, get_response: Any) -> None: self.get_response = get_response - def __call__(self, request): + def __call__(self, request: Request) -> Response: response = self.get_response(request) response["Cross-Origin-Embedder-Policy"] = "require-corp" response["Cross-Origin-Opener-Policy"] = "same-origin" diff --git a/backend/application/commons/signals.py b/backend/application/commons/signals.py deleted file mode 100644 index 0085af8a7..000000000 --- a/backend/application/commons/signals.py +++ /dev/null @@ -1,15 +0,0 @@ -from django.db.models.signals import post_save -from django.dispatch import receiver - -from application.commons.models import Settings -from application.core.models import Product -from application.core.services.security_gate import check_security_gate - - -@receiver(post_save, sender=Settings) -def settings_post_save( # pylint: disable=unused-argument - sender, instance, created, **kwargs -) -> None: - # parameters are needed according to Django documentation - for product in Product.objects.filter(is_product_group=False): - check_security_gate(product) diff --git a/backend/application/commons/types.py b/backend/application/commons/types.py index b90f479d5..f137a3224 100644 --- a/backend/application/commons/types.py +++ b/backend/application/commons/types.py @@ -1,3 +1,6 @@ +from typing import Optional + + class Age_Choices: AGE_DAY = "Today" AGE_WEEK = "Past 7 days" @@ -14,7 +17,7 @@ class Age_Choices: ] @classmethod - def get_days_from_age(cls, value): + def get_days_from_age(cls, value: "Age_Choices") -> Optional[int]: if value == cls.AGE_DAY: days = 0 elif value == cls.AGE_WEEK: @@ -28,3 +31,13 @@ def get_days_from_age(cls, value): else: days = None return days + + +class VEX_Justification_Styles: + STYLE_CSAF_OPENVEX = "CSAF/OpenVEX" + STYLE_CYCLONEDX = "CycloneDX" + + STYLE_CHOICES = [ + (STYLE_CSAF_OPENVEX, STYLE_CSAF_OPENVEX), + (STYLE_CYCLONEDX, STYLE_CYCLONEDX), + ] diff --git a/backend/application/commons/views.py b/backend/application/commons/views.py index b102f9a89..41fae0daa 100644 --- a/backend/application/commons/views.py +++ b/backend/application/commons/views.py @@ -1,5 +1,7 @@ -from django.http import HttpResponse +from django.http import HttpRequest, HttpResponse +from django.views.decorators.http import require_GET -def empty_view(request): +@require_GET +def empty_view(request: HttpRequest) -> HttpResponse: return HttpResponse(status=204) diff --git a/backend/application/constance/migrations/0001_initial.py b/backend/application/constance/migrations/0001_initial.py index aa9c667d6..a98f0f6f5 100644 --- a/backend/application/constance/migrations/0001_initial.py +++ b/backend/application/constance/migrations/0001_initial.py @@ -26,9 +26,7 @@ class Migration(migrations.Migration): ("key", models.CharField(max_length=255, unique=True)), ( "value", - picklefield.fields.PickledObjectField( - blank=True, editable=False, null=True - ), + picklefield.fields.PickledObjectField(blank=True, editable=False, null=True), ), ], options={ diff --git a/backend/application/constance/models.py b/backend/application/constance/models.py index c6891cd99..26f24e3c6 100644 --- a/backend/application/constance/models.py +++ b/backend/application/constance/models.py @@ -16,5 +16,5 @@ class Meta: ("view_config", "Can view config"), ] - def __str__(self): + def __str__(self) -> str: return self.key diff --git a/backend/application/core/api/filters.py b/backend/application/core/api/filters.py index 55584055c..143894cfc 100644 --- a/backend/application/core/api/filters.py +++ b/backend/application/core/api/filters.py @@ -1,5 +1,7 @@ from datetime import timedelta +from typing import Any, Optional +from django.db.models import Q, QuerySet from django.utils import timezone from django_filters import ( BooleanFilter, @@ -7,12 +9,15 @@ ChoiceFilter, FilterSet, ModelChoiceFilter, + MultipleChoiceFilter, OrderingFilter, ) +from application.commons.api.extended_ordering_filter import ExtendedOrderingFilter from application.commons.types import Age_Choices from application.core.models import ( Branch, + Component, Evidence, Observation, Observation_Log, @@ -22,7 +27,8 @@ Product_Member, Service, ) -from application.core.types import Status +from application.core.types import Severity, Status +from application.licenses.models import License_Component class ProductGroupFilter(FilterSet): @@ -30,7 +36,7 @@ class ProductGroupFilter(FilterSet): ordering = OrderingFilter( # tuple-mapping retains order - fields=(("name", "name")), + fields=(("name", "name"),), ) class Meta: @@ -40,9 +46,7 @@ class Meta: class ProductFilter(FilterSet): name = CharFilter(field_name="name", lookup_expr="icontains") - age = ChoiceFilter( - field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES - ) + age = ChoiceFilter(field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES) ordering = OrderingFilter( # tuple-mapping retains order @@ -54,7 +58,12 @@ class ProductFilter(FilterSet): ), ) - def get_age(self, queryset, field_name, value): # pylint: disable=unused-argument + def get_age( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: # field_name is used as a positional argument days = Age_Choices.get_days_from_age(value) @@ -106,6 +115,64 @@ class Meta: class BranchFilter(FilterSet): + for_observations = BooleanFilter( + field_name="for_observations", + method="get_for_observations", + ) + for_license_components = BooleanFilter( + field_name="for_license_components", + method="get_for_license_components", + ) + + def get_for_observations( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: + # field_name is used as a positional argument + if value: + product_data = self.data.get("product") + if product_data: + product_id = int(product_data) + observation_branches = ( + Observation.objects.filter(product_id=product_id, branch__isnull=False) + .values("branch_id") + .distinct() + ) + product_default_branches = ( + Product.objects.filter(id=product_id, repository_default_branch__isnull=False) + .values("repository_default_branch") + .distinct() + ) + return queryset.filter(Q(id__in=observation_branches) | Q(id__in=product_default_branches)) + + return queryset + + def get_for_license_components( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: + # field_name is used as a positional argument + if value: + product_data = self.data.get("product") + if product_data: + product_id = int(product_data) + license_component_branches = ( + License_Component.objects.filter(product_id=product_id, branch__isnull=False) + .values("branch_id") + .distinct() + ) + product_default_branches = ( + Product.objects.filter(id=product_id, repository_default_branch__isnull=False) + .values("repository_default_branch") + .distinct() + ) + return queryset.filter(Q(id__in=license_component_branches) | Q(id__in=product_default_branches)) + + return queryset ordering = OrderingFilter( # tuple-mapping retains order @@ -125,7 +192,7 @@ class Meta: class ServiceFilter(FilterSet): ordering = OrderingFilter( # tuple-mapping retains order - fields=(("name", "name")), + fields=(("name", "name"),), ) class Meta: @@ -135,60 +202,30 @@ class Meta: class ObservationFilter(FilterSet): title = CharFilter(field_name="title", lookup_expr="icontains") - origin_component_name_version = CharFilter( - field_name="origin_component_name_version", lookup_expr="icontains" - ) + current_severity = MultipleChoiceFilter(field_name="current_severity", choices=Severity.SEVERITY_CHOICES) + current_status = MultipleChoiceFilter(field_name="current_status", choices=Status.STATUS_CHOICES) + branch_name = CharFilter(field_name="branch__name", lookup_expr="icontains") + origin_service_name = CharFilter(field_name="origin_service__name", lookup_expr="icontains") + origin_component_name_version = CharFilter(field_name="origin_component_name_version", lookup_expr="icontains") origin_docker_image_name_tag_short = CharFilter( field_name="origin_docker_image_name_tag_short", lookup_expr="icontains" ) - origin_service_name = CharFilter( - field_name="origin_service_name", lookup_expr="icontains" - ) - origin_endpoint_hostname = CharFilter( - field_name="origin_endpoint_hostname", lookup_expr="icontains" - ) - origin_source_file = CharFilter( - field_name="origin_source_file", lookup_expr="icontains" - ) - origin_cloud_qualified_resource = CharFilter( - field_name="origin_cloud_qualified_resource", lookup_expr="icontains" - ) + origin_service_name = CharFilter(field_name="origin_service_name", lookup_expr="icontains") + origin_endpoint_hostname = CharFilter(field_name="origin_endpoint_hostname", lookup_expr="icontains") + origin_source_file = CharFilter(field_name="origin_source_file", lookup_expr="icontains") + origin_cloud_qualified_resource = CharFilter(field_name="origin_cloud_qualified_resource", lookup_expr="icontains") origin_kubernetes_qualified_resource = CharFilter( field_name="origin_kubernetes_qualified_resource", lookup_expr="icontains" ) scanner = CharFilter(field_name="scanner", lookup_expr="icontains") - age = ChoiceFilter( - field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES - ) + age = ChoiceFilter(field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES) product_group = ModelChoiceFilter( field_name="product__product_group", queryset=Product.objects.filter(is_product_group=True), ) + cve_known_exploited = BooleanFilter(field_name="cve_known_exploited", method="get_cve_known_exploited") - has_pending_assessment = BooleanFilter( - field_name="has_pending_assessment", - method="get_has_pending_assessment", - ) - - def get_has_pending_assessment( - self, queryset, field_name, value - ): # pylint: disable=unused-argument - # field_name is used as a positional argument - - if value: - return queryset.filter( - id__in=Observation_Log.objects.filter( - assessment_status="Needs approval" - ).values("observation_id") - ) - - return queryset.exclude( - id__in=Observation_Log.objects.filter( - assessment_status="Needs approval" - ).values("observation_id") - ) - - ordering = OrderingFilter( + ordering = ExtendedOrderingFilter( # tuple-mapping retains order fields=( ("id", "id"), @@ -196,8 +233,9 @@ def get_has_pending_assessment( ("product__product_group__name", "product_data.product_group_name"), ("branch__name", "branch_name"), ("title", "title"), - ("numerical_severity", "current_severity"), + (("numerical_severity", "id"), "current_severity"), ("current_status", "current_status"), + ("current_priority", "current_priority"), ("origin_component_name_version", "origin_component_name_version"), ( "origin_docker_image_name_tag_short", @@ -206,6 +244,7 @@ def get_has_pending_assessment( ("origin_service_name", "origin_service_name"), ("origin_endpoint_hostname", "origin_endpoint_hostname"), ("origin_source_file", "origin_source_file"), + ("origin_source_file", "origin_source_file_short"), ("origin_cloud_qualified_resource", "origin_cloud_qualified_resource"), ( "origin_kubernetes_qualified_resource", @@ -218,6 +257,8 @@ def get_has_pending_assessment( ("epss_score", "epss_score"), ("has_potential_duplicates", "has_potential_duplicates"), ("origin_component_purl_type", "origin_component_purl_type"), + ("update_impact_score", "update_impact_score"), + ("fix_available", "fix_available"), ), ) @@ -236,9 +277,19 @@ class Meta: # pylint: disable=duplicate-code "origin_service", "has_potential_duplicates", "origin_component_purl_type", + "origin_component_purl", + "origin_component_cpe", + "origin_component_cyclonedx_bom_link", + "update_impact_score", + "fix_available", ] - def get_age(self, queryset, field_name, value): # pylint: disable=unused-argument + def get_age( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: # field_name is used as a positional argument days = Age_Choices.get_days_from_age(value) @@ -250,11 +301,22 @@ def get_age(self, queryset, field_name, value): # pylint: disable=unused-argume time_threshold = today - timedelta(days=int(days)) return queryset.filter(last_observation_log__gte=time_threshold) + def get_cve_known_exploited( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Optional[bool], + ) -> QuerySet: + # name is used as a positional argument + if value is True: + return queryset.exclude(cve_found_in="") + if value is False: + return queryset.filter(cve_found_in="") + return queryset + class ObservationLogFilter(FilterSet): - age = ChoiceFilter( - field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES - ) + age = ChoiceFilter(field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES) product = ModelChoiceFilter( field_name="observation__product", queryset=Product.objects.all(), @@ -267,12 +329,10 @@ class ObservationLogFilter(FilterSet): field_name="observation__title", lookup_expr="icontains", ) - branch_name = CharFilter( - field_name="observation__branch__name", lookup_expr="icontains" - ) - branch = ModelChoiceFilter( - field_name="observation__branch", queryset=Branch.objects.all() - ) + branch_name = CharFilter(field_name="observation__branch__name", lookup_expr="icontains") + branch = ModelChoiceFilter(field_name="observation__branch", queryset=Branch.objects.all()) + origin_service_name = CharFilter(field_name="observation__origin_service__name", lookup_expr="icontains") + origin_service = ModelChoiceFilter(field_name="observation__origin_service", queryset=Service.objects.all()) origin_component_name_version = CharFilter( field_name="observation__origin_component_name_version", lookup_expr="icontains" ) @@ -280,12 +340,8 @@ class ObservationLogFilter(FilterSet): field_name="observation__origin_docker_image_name_tag_short", lookup_expr="icontains", ) - origin_endpoint_hostname = CharFilter( - field_name="observation__origin_endpoint_hostname", lookup_expr="icontains" - ) - origin_source_file = CharFilter( - field_name="observation__origin_source_file", lookup_expr="icontains" - ) + origin_endpoint_hostname = CharFilter(field_name="observation__origin_endpoint_hostname", lookup_expr="icontains") + origin_source_file = CharFilter(field_name="observation__origin_source_file", lookup_expr="icontains") origin_cloud_qualified_resource = CharFilter( field_name="observation__origin_cloud_qualified_resource", lookup_expr="icontains", @@ -330,6 +386,7 @@ class ObservationLogFilter(FilterSet): ), ("severity", "severity"), ("status", "status"), + ("priority", "priority"), ("comment", "comment"), ("created", "created"), ("assessment_status", "assessment_status"), @@ -343,7 +400,12 @@ class Meta: model = Observation_Log fields = ["observation", "user", "assessment_status", "status", "severity"] - def get_age(self, queryset, field_name, value): # pylint: disable=unused-argument + def get_age( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: # field_name is used as a positional argument days = Age_Choices.get_days_from_age(value) @@ -432,3 +494,40 @@ class PotentialDuplicateFilter(FilterSet): class Meta: model = Potential_Duplicate fields = ["observation"] + + +class ComponentFilter(FilterSet): + component_name_version = CharFilter(field_name="component_name_version", lookup_expr="icontains") + product_group = ModelChoiceFilter( + field_name="product__product_group", + queryset=Product.objects.filter(is_product_group=True), + ) + branch_name = CharFilter(field_name="branch__name", lookup_expr="icontains") + origin_service_name = CharFilter(field_name="origin_service__name", lookup_expr="icontains") + + ordering = ExtendedOrderingFilter( + # tuple-mapping retains order + fields=( + ("id", "id"), + (("product__name", "branch__name", "component_name_version"), "product_name"), + (("product__product_group__name", "branch__name", "component_name_version"), "product_group_name"), + (("branch__name", "product__name", "component_name_version"), "branch_name"), + (("component_name_version", "product__name", "branch__name"), "component_name_version_type"), + ( + ("origin_service__name", "product__name", "branch__name", "component_name_version"), + "origin_service_name", + ), + (("has_observations", "product__name", "branch__name", "component_name_version"), "has_observations"), + ), + ) + + class Meta: # pylint: disable=duplicate-code + model = Component + fields = [ + "product", + "branch", + "component_name_version", + "component_purl_type", + "origin_service", + "has_observations", + ] diff --git a/backend/application/core/api/permissions.py b/backend/application/core/api/permissions.py index 7064f6f27..12f6118a1 100644 --- a/backend/application/core/api/permissions.py +++ b/backend/application/core/api/permissions.py @@ -1,23 +1,31 @@ -from rest_framework.exceptions import ValidationError +from typing import Any + +from django.contrib.auth.models import AnonymousUser +from rest_framework.exceptions import PermissionDenied, ValidationError from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView -from application.access_control.api.permissions_base import ( +from application.authorization.api.permissions_base import ( check_object_permission, check_post_permission, ) -from application.access_control.services.authorization import get_highest_user_role -from application.access_control.services.roles_permissions import Permissions, Roles +from application.authorization.services.authorization import get_highest_user_role +from application.authorization.services.roles_permissions import Permissions, Roles from application.core.models import Product class UserHasProductPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": + if isinstance(request.user, AnonymousUser): + raise PermissionDenied("You must be authenticated to create a Product") + return not request.user.is_external return True - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, @@ -28,13 +36,16 @@ def has_object_permission(self, request, view, obj): class UserHasProductGroupPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": + if isinstance(request.user, AnonymousUser): + raise PermissionDenied("You must be authenticated to create a Product Group") + return not request.user.is_external return True - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, @@ -45,17 +56,11 @@ def has_object_permission(self, request, view, obj): class UserHasProductMemberPermission(BasePermission): - def has_permission(self, request, view): - return check_post_permission( - request, Product, "product", Permissions.Product_Member_Create - ) + def has_permission(self, request: Request, view: APIView) -> bool: + return check_post_permission(request, Product, "product", Permissions.Product_Member_Create) - def has_object_permission(self, request, view, obj): - if ( - request.method == "DELETE" - and obj.role == Roles.Owner - and not request.user.is_superuser - ): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: + if request.method == "DELETE" and obj.role == Roles.Owner and not request.user.is_superuser: _check_delete_owner(request, obj) return check_object_permission( @@ -68,7 +73,7 @@ def has_object_permission(self, request, view, obj): class UserHasProductAuthorizationGroupMemberPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: return check_post_permission( request, Product, @@ -76,12 +81,8 @@ def has_permission(self, request, view): Permissions.Product_Authorization_Group_Member_Create, ) - def has_object_permission(self, request, view, obj): - if ( - request.method == "DELETE" - and obj.role == Roles.Owner - and not request.user.is_superuser - ): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: + if request.method == "DELETE" and obj.role == Roles.Owner and not request.user.is_superuser: _check_delete_owner(request, obj) return check_object_permission( @@ -93,7 +94,10 @@ def has_object_permission(self, request, view, obj): ) -def _check_delete_owner(request, obj) -> bool: +def _check_delete_owner(request: Request, obj: Any) -> bool: + if isinstance(request.user, AnonymousUser): + raise PermissionDenied("You must be authenticated to delete an Owner") + if get_highest_user_role(obj.product, request.user) == Roles.Owner: return True @@ -101,12 +105,10 @@ def _check_delete_owner(request, obj) -> bool: class UserHasBranchPermission(BasePermission): - def has_permission(self, request, view): - return check_post_permission( - request, Product, "product", Permissions.Branch_Create - ) + def has_permission(self, request: Request, view: APIView) -> bool: + return check_post_permission(request, Product, "product", Permissions.Branch_Create) - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, @@ -117,26 +119,27 @@ def has_object_permission(self, request, view, obj): class UserHasServicePermission(BasePermission): - def has_object_permission(self, request, view, obj): + def has_permission(self, request: Request, view: APIView) -> bool: + return check_post_permission(request, Product, "product", Permissions.Service_Create) + + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, get_permission=Permissions.Service_View, - put_permission=None, + put_permission=Permissions.Serice_Edit, delete_permission=Permissions.Service_Delete, ) class UserHasObservationPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.path.endswith("/bulk_assessment/"): return True - return check_post_permission( - request, Product, "product", Permissions.Observation_Create - ) + return check_post_permission(request, Product, "product", Permissions.Observation_Create) - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, diff --git a/backend/application/core/api/serializers_component.py b/backend/application/core/api/serializers_component.py new file mode 100644 index 000000000..cf57a3a9f --- /dev/null +++ b/backend/application/core/api/serializers_component.py @@ -0,0 +1,50 @@ +from rest_framework.serializers import ( + ModelSerializer, + SerializerMethodField, +) + +from application.core.models import Component + + +class ComponentSerializer(ModelSerializer): + product_name = SerializerMethodField() + product_group_name = SerializerMethodField() + branch_name = SerializerMethodField() + component_name_version_type = SerializerMethodField() + origin_service_name = SerializerMethodField() + + def get_product_name(self, obj: Component) -> str: + return obj.product.name + + def get_product_group_name(self, obj: Component) -> str: + if obj.product.product_group: + return obj.product.product_group.name + return "" + + def get_branch_name(self, obj: Component) -> str: + if obj.branch: + return obj.branch.name + return "" + + def get_component_name_version_type(self, obj: Component) -> str: + if obj.component_name_version: + component_name_version_type = obj.component_name_version + if obj.component_purl_type: + component_name_version_type += f" ({obj.component_purl_type})" + return component_name_version_type + return "" + + def get_origin_service_name(self, obj: Component) -> str: + if obj.origin_service: + return obj.origin_service.name + return "" + + class Meta: + model = Component + fields = "__all__" + + +class ComponentNameSerializer(ModelSerializer): + class Meta: + model = Component + fields = ["id", "component_name_version"] diff --git a/backend/application/core/api/serializers_helpers.py b/backend/application/core/api/serializers_helpers.py index 651256aa4..9d239af52 100644 --- a/backend/application/core/api/serializers_helpers.py +++ b/backend/application/core/api/serializers_helpers.py @@ -1,6 +1,7 @@ import re from decimal import Decimal from typing import Optional +from urllib.parse import urlsplit import validators from cvss import CVSS3, CVSS4, CVSSError @@ -31,16 +32,22 @@ def get_origin_component_name_version(observation: Observation) -> str: return "" origin_component_name_version_with_type = observation.origin_component_name_version - if observation.origin_component_purl: - purl = PackageURL.from_string(observation.origin_component_purl) - if purl.type: - origin_component_name_version_with_type += f" ({purl.type})" + if observation.origin_component_purl_type: + origin_component_name_version_with_type += f" ({observation.origin_component_purl_type})" return origin_component_name_version_with_type def validate_url(url: str) -> str: - if url and not validators.url(url): + if not url: + return url + + hostname = urlsplit(url).hostname + simple_host = hostname == "localhost" + + scheme = urlsplit(url).scheme + + if not (validators.url(url, simple_host=simple_host) and scheme in ["http", "https"]): raise ValidationError("Not a valid URL") return url @@ -70,7 +77,7 @@ def validate_cvss4_vector(cvss4_vector: str) -> str: return cvss4_vector -def validate_cvss_and_severity(attrs): +def validate_cvss_and_severity(attrs: dict) -> None: cvss3_severity = _validate_cvss3(attrs) cvss4_severity = _validate_cvss4(attrs) @@ -88,9 +95,7 @@ def validate_cvss_and_severity(attrs): ) else: if not cvss_severity: - raise ValidationError( - "Either Severity, CVSS3/4 score or CVSS3/4 vector has to be set" - ) + raise ValidationError("Either Severity, CVSS3/4 score or CVSS3/4 vector has to be set") def _validate_cvss3(attrs: dict) -> Optional[str]: diff --git a/backend/application/core/api/serializers_observation.py b/backend/application/core/api/serializers_observation.py index a1e7eead5..d2ad88da5 100644 --- a/backend/application/core/api/serializers_observation.py +++ b/backend/application/core/api/serializers_observation.py @@ -1,9 +1,7 @@ from typing import Optional from urllib.parse import urlparse -import validators from django.utils import timezone -from packageurl import PackageURL from rest_framework.serializers import ( CharField, ChoiceField, @@ -16,7 +14,8 @@ ValidationError, ) -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.commons.services.functions import get_comma_separated_as_list from application.core.api.serializers_helpers import ( get_branch_name, get_origin_component_name_version, @@ -24,6 +23,7 @@ validate_cvss3_vector, validate_cvss4_vector, validate_cvss_and_severity, + validate_url, ) from application.core.api.serializers_product import ( NestedProductListSerializer, @@ -41,8 +41,13 @@ ) from application.core.queries.observation import get_current_observation_log from application.core.services.observation_log import create_observation_log -from application.core.services.security_gate import check_security_gate -from application.core.types import Assessment_Status, Severity, Status, VexJustification +from application.core.services.security_gate import check_security_gate_observation +from application.core.types import ( + Assessment_Status, + Severity, + Status, + VEX_Justification, +) from application.import_observations.api.serializers import ParserSerializer from application.import_observations.models import Parser from application.import_observations.types import Parser_Type @@ -82,16 +87,16 @@ class ObservationSerializer(ModelSerializer): references = NestedReferenceSerializer(many=True) evidences = NestedEvidenceSerializer(many=True) origin_source_file_url = SerializerMethodField() - origin_component_purl_type = SerializerMethodField() - origin_component_purl_namespace = SerializerMethodField() issue_tracker_issue_url = SerializerMethodField() assessment_needs_approval = SerializerMethodField() + vulnerability_id_aliases = SerializerMethodField() + cve_found_in = SerializerMethodField() class Meta: model = Observation - exclude = ["numerical_severity", "issue_tracker_jira_initial_status"] + exclude = ["numerical_severity", "issue_tracker_jira_initial_status", "origin_source_file_link"] - def to_representation(self, instance): + def to_representation(self, instance: Observation) -> dict: response = super().to_representation(instance) response["evidences"] = sorted(response["evidences"], key=lambda x: x["name"]) return response @@ -100,94 +105,14 @@ def get_branch_name(self, observation: Observation) -> str: return get_branch_name(observation) def get_origin_source_file_url(self, observation: Observation) -> Optional[str]: - origin_source_file_url = None - - if observation.product.repository_prefix and observation.origin_source_file: - if not validators.url(observation.product.repository_prefix): - return None - - parsed_url = urlparse(observation.product.repository_prefix) - if parsed_url.scheme not in ["http", "https"]: - return None - - origin_source_file_url = observation.product.repository_prefix - if origin_source_file_url.endswith("/"): - origin_source_file_url = origin_source_file_url[:-1] - if parsed_url.netloc == "dev.azure.com": - origin_source_file_url = self._create_azure_devops_url( - observation, origin_source_file_url - ) - else: - origin_source_file_url = self._create_common_url( - observation, origin_source_file_url - ) - - return origin_source_file_url - - def get_origin_component_purl_type(self, observation: Observation) -> str: - if observation.origin_component_purl: - try: - purl = PackageURL.from_string(observation.origin_component_purl) - return purl.type - except ValueError: - return "" - - return "" - - def get_origin_component_purl_namespace( - self, observation: Observation - ) -> Optional[str]: - if observation.origin_component_purl: - try: - purl = PackageURL.from_string(observation.origin_component_purl) - return purl.namespace - except ValueError: - return "" - return "" - - def _create_azure_devops_url( - self, observation: Observation, origin_source_file_url: str - ) -> str: - origin_source_file_url += f"?path={observation.origin_source_file}" - if observation.branch: - origin_source_file_url += f"&version=GB{observation.branch.name}" - if observation.origin_source_line_start: - origin_source_file_url += f"&line={observation.origin_source_line_start}" - origin_source_file_url += "&lineStartColumn=1&lineEndColumn=1" - if observation.origin_source_line_end: - origin_source_file_url += ( - f"&lineEnd={observation.origin_source_line_end+1}" - ) - else: - origin_source_file_url += ( - f"&lineEnd={observation.origin_source_line_start+1}" - ) - - return origin_source_file_url - - def _create_common_url( - self, observation: Observation, origin_source_file_url: str - ) -> str: - if observation.branch: - origin_source_file_url += f"/{observation.branch.name}" - origin_source_file_url += f"/{observation.origin_source_file}" - if observation.origin_source_line_start: - origin_source_file_url += "#L" + str(observation.origin_source_line_start) - if observation.origin_source_line_end: - origin_source_file_url += "-" + str(observation.origin_source_line_end) - - return origin_source_file_url + return _get_origin_source_file_url(observation) def get_issue_tracker_issue_url(self, observation: Observation) -> Optional[str]: issue_url = None if observation.issue_tracker_issue_id: - issue_tracker = issue_tracker_factory( - observation.product, with_communication=False - ) - issue_url = issue_tracker.get_frontend_issue_url( - observation.product, observation.issue_tracker_issue_id - ) + issue_tracker = issue_tracker_factory(observation.product, with_communication=False) + issue_url = issue_tracker.get_frontend_issue_url(observation.product, observation.issue_tracker_issue_id) return issue_url @@ -195,12 +120,17 @@ def get_assessment_needs_approval(self, observation: Observation) -> Optional[in current_observation_log = get_current_observation_log(observation) if ( current_observation_log - and current_observation_log.assessment_status - == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL + and current_observation_log.assessment_status == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL ): return current_observation_log.pk return None + def get_vulnerability_id_aliases(self, observation: Observation) -> list[dict[str, str]]: + return _get_vulnerability_id_aliases(observation) + + def get_cve_found_in(self, observation: Observation) -> list[dict[str, str]]: + return _get_cve_found_in_sources(observation) + def validate_product(self, product: Product) -> Product: if product and product.is_product_group: raise ValidationError("Product must not be a product group") @@ -220,6 +150,10 @@ class ObservationListSerializer(ModelSerializer): parser_data = ParserSerializer(source="parser") scanner_name = SerializerMethodField() origin_component_name_version = SerializerMethodField() + origin_source_file_short = SerializerMethodField() + origin_source_file_url = SerializerMethodField() + vulnerability_id_aliases = SerializerMethodField() + cve_found_in = SerializerMethodField() class Meta: model = Observation @@ -227,6 +161,7 @@ class Meta: "numerical_severity", "issue_tracker_jira_initial_status", "origin_component_dependencies", + "origin_source_file_link", ] def get_branch_name(self, observation: Observation) -> str: @@ -238,9 +173,94 @@ def get_scanner_name(self, observation: Observation) -> str: def get_origin_component_name_version(self, observation: Observation) -> str: return get_origin_component_name_version(observation) + def get_origin_source_file_short(self, observation: Observation) -> Optional[str]: + if observation.origin_source_file: + source_file_parts = observation.origin_source_file.split("/") + if len(source_file_parts) > 2: + return f"{source_file_parts[0]}/.../{source_file_parts[-1]}" + return observation.origin_source_file + + def get_origin_source_file_url(self, observation: Observation) -> Optional[str]: + return _get_origin_source_file_url(observation) + + def get_vulnerability_id_aliases(self, observation: Observation) -> list[dict[str, str]]: + return _get_vulnerability_id_aliases(observation) + + def get_cve_found_in(self, observation: Observation) -> list[dict[str, str]]: + return _get_cve_found_in_sources(observation) + + +def _get_origin_source_file_url(observation: Observation) -> Optional[str]: + origin_source_file_url = None + + if observation.origin_source_file_link: + return observation.origin_source_file_link + + if observation.product.repository_prefix and observation.origin_source_file: + if not validate_url(observation.product.repository_prefix): + return None + + parsed_url = urlparse(observation.product.repository_prefix) + + origin_source_file_url = observation.product.repository_prefix + if origin_source_file_url.endswith("/"): + origin_source_file_url = origin_source_file_url[:-1] + if parsed_url.netloc == "dev.azure.com": + origin_source_file_url = _create_azure_devops_url(observation, origin_source_file_url) + else: + origin_source_file_url = _create_common_url(observation, origin_source_file_url) + + return origin_source_file_url + + +def _create_azure_devops_url(observation: Observation, origin_source_file_url: str) -> str: + origin_source_file_url += f"?path={observation.origin_source_file}" + if observation.branch: + origin_source_file_url += f"&version=GB{observation.branch.name}" + if observation.origin_source_line_start: + origin_source_file_url += f"&line={observation.origin_source_line_start}" + origin_source_file_url += "&lineStartColumn=1&lineEndColumn=1" + if observation.origin_source_line_end: + origin_source_file_url += f"&lineEnd={observation.origin_source_line_end+1}" + else: + origin_source_file_url += f"&lineEnd={observation.origin_source_line_start+1}" + + return origin_source_file_url + + +def _create_common_url(observation: Observation, origin_source_file_url: str) -> str: + if observation.branch: + origin_source_file_url += f"/{observation.branch.name}" + origin_source_file_url += f"/{observation.origin_source_file}" + if observation.origin_source_line_start: + origin_source_file_url += "#L" + str(observation.origin_source_line_start) + if ( + observation.origin_source_line_end + and observation.origin_source_line_start != observation.origin_source_line_end + ): + origin_source_file_url += "-L" + str(observation.origin_source_line_end) + + return origin_source_file_url + + +def _get_vulnerability_id_aliases(observation: Observation) -> list[dict[str, str]]: + aliases_list = get_comma_separated_as_list(observation.vulnerability_id_aliases) + return_list = [] + for alias in aliases_list: + return_list.append({"alias": alias}) + return return_list + + +def _get_cve_found_in_sources(observation: Observation) -> list[dict[str, str]]: + sources_list = get_comma_separated_as_list(observation.cve_found_in) + return_list = [] + for source in sources_list: + return_list.append({"source": source}) + return return_list + class ObservationUpdateSerializer(ModelSerializer): - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: Observation if self.instance and self.instance.parser.type != Parser_Type.TYPE_MANUAL: raise ValidationError("Only manual observations can be updated") @@ -253,17 +273,13 @@ def validate(self, attrs: dict): def validate_branch(self, branch: Branch) -> Branch: if branch and branch.product != self.instance.product: - raise ValidationError( - "Branch does not belong to the same product as the observation" - ) + raise ValidationError("Branch does not belong to the same product as the observation") return branch def validate_origin_service(self, service: Service) -> Service: if service and service.product != self.instance.product: - raise ValidationError( - "Service does not belong to the same product as the observation" - ) + raise ValidationError("Service does not belong to the same product as the observation") return service @@ -273,7 +289,7 @@ def validate_cvss3_vector(self, cvss3_vector: str) -> str: def validate_cvss4_vector(self, cvss4_vector: str) -> str: return validate_cvss4_vector(cvss4_vector) - def update(self, instance: Observation, validated_data: dict): + def update(self, instance: Observation, validated_data: dict) -> Observation: actual_severity = instance.current_severity actual_status = instance.current_status actual_vex_justification = instance.current_vex_justification @@ -294,17 +310,9 @@ def update(self, instance: Observation, validated_data: dict): observation: Observation = super().update(instance, validated_data) - log_severity = ( - observation.current_severity - if actual_severity != observation.current_severity - else "" - ) + log_severity = observation.current_severity if actual_severity != observation.current_severity else "" - log_status = ( - observation.current_status - if actual_status != observation.current_status - else "" - ) + log_status = observation.current_status if actual_status != observation.current_status else "" log_vex_justification = ( observation.current_vex_justification @@ -314,17 +322,11 @@ def update(self, instance: Observation, validated_data: dict): log_risk_acceptance_expiry_date = ( observation.risk_acceptance_expiry_date - if actual_risk_acceptance_expiry_date - != observation.risk_acceptance_expiry_date + if actual_risk_acceptance_expiry_date != observation.risk_acceptance_expiry_date else None ) - if ( - log_severity - or log_status - or log_vex_justification - or log_risk_acceptance_expiry_date - ): + if log_severity or log_status or log_vex_justification or log_risk_acceptance_expiry_date: create_observation_log( observation=observation, severity=log_severity, @@ -335,7 +337,7 @@ def update(self, instance: Observation, validated_data: dict): risk_acceptance_expiry_date=log_risk_acceptance_expiry_date, ) - check_security_gate(observation.product) + check_security_gate_observation(observation) push_observation_to_issue_tracker(observation, get_current_user()) if observation.branch: observation.branch.last_import = timezone.now() @@ -343,7 +345,7 @@ def update(self, instance: Observation, validated_data: dict): return observation - def to_representation(self, instance): + def to_representation(self, instance: Observation) -> dict: serializer = ObservationSerializer(instance) return serializer.data @@ -387,22 +389,16 @@ class Meta: class ObservationCreateSerializer(ModelSerializer): - def validate(self, attrs): + def validate(self, attrs: dict) -> dict: attrs["parser"] = Parser.objects.get(type=Parser_Type.TYPE_MANUAL) attrs["scanner"] = Parser_Type.TYPE_MANUAL attrs["import_last_seen"] = timezone.now() - if attrs.get("branch"): - if attrs["branch"].product != attrs["product"]: - raise ValidationError( - "Branch does not belong to the same product as the observation" - ) + if attrs.get("branch") and attrs["branch"].product != attrs["product"]: + raise ValidationError("Branch does not belong to the same product as the observation") - if attrs.get("service"): - if attrs["service"].product != attrs["product"]: - raise ValidationError( - "Service does not belong to the same product as the observation" - ) + if attrs.get("service") and attrs["service"].product != attrs["product"]: + raise ValidationError("Service does not belong to the same product as the observation") validate_cvss_and_severity(attrs) @@ -414,7 +410,7 @@ def validate_cvss3_vector(self, cvss3_vector: str) -> str: def validate_cvss4_vector(self, cvss4_vector: str) -> str: return validate_cvss4_vector(cvss4_vector) - def create(self, validated_data): + def create(self, validated_data: dict) -> Observation: if validated_data.get("origin_service"): service = Service.objects.get(pk=validated_data["origin_service"].id) validated_data["origin_service_name"] = service.name @@ -433,7 +429,7 @@ def create(self, validated_data): risk_acceptance_expiry_date=observation.risk_acceptance_expiry_date, ) - check_security_gate(observation.product) + check_security_gate_observation(observation) push_observation_to_issue_tracker(observation, get_current_user()) if observation.branch: observation.branch.last_import = timezone.now() @@ -441,7 +437,7 @@ def create(self, validated_data): return observation - def to_representation(self, instance): + def to_representation(self, instance: Observation) -> dict: serializer = ObservationSerializer(instance) return serializer.data @@ -489,7 +485,7 @@ class ObservationAssessmentSerializer(Serializer): severity = ChoiceField(choices=Severity.SEVERITY_CHOICES, required=False) status = ChoiceField(choices=Status.STATUS_CHOICES, required=False) vex_justification = ChoiceField( - choices=VexJustification.VEX_JUSTIFICATION_CHOICES, + choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, required=False, allow_blank=True, ) @@ -502,20 +498,16 @@ class ObservationRemoveAssessmentSerializer(Serializer): class ObservationBulkDeleteSerializer(Serializer): - observations = ListField( - child=IntegerField(min_value=1), min_length=0, max_length=100, required=True - ) + observations = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) class ObservationBulkAssessmentSerializer(Serializer): severity = ChoiceField(choices=Severity.SEVERITY_CHOICES, required=False) status = ChoiceField(choices=Status.STATUS_CHOICES, required=False) comment = CharField(max_length=4096, required=True) - observations = ListField( - child=IntegerField(min_value=1), min_length=0, max_length=100, required=True - ) + observations = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) vex_justification = ChoiceField( - choices=VexJustification.VEX_JUSTIFICATION_CHOICES, + choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, required=False, allow_blank=True, ) @@ -524,18 +516,17 @@ class ObservationBulkAssessmentSerializer(Serializer): class ObservationBulkMarkDuplicatesSerializer(Serializer): observation_id = IntegerField(min_value=1, required=True) - potential_duplicates = ListField( - child=IntegerField(min_value=1), min_length=0, max_length=100, required=True - ) + potential_duplicates = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) class NestedObservationSerializer(ModelSerializer): scanner_name = SerializerMethodField() origin_component_name_version = SerializerMethodField() + cve_found_in = SerializerMethodField() class Meta: model = Observation - exclude = ["numerical_severity", "issue_tracker_jira_initial_status"] + exclude = ["numerical_severity", "issue_tracker_jira_initial_status", "origin_source_file_link"] def get_scanner_name(self, observation: Observation) -> str: return get_scanner_name(observation) @@ -543,6 +534,9 @@ def get_scanner_name(self, observation: Observation) -> str: def get_origin_component_name_version(self, observation: Observation) -> str: return get_origin_component_name_version(observation) + def get_cve_found_in(self, observation: Observation) -> list[dict[str, str]]: + return _get_cve_found_in_sources(observation) + class ObservationLogSerializer(ModelSerializer): observation_data = ObservationSerializer(source="observation") @@ -589,20 +583,18 @@ class Meta: class ObservationLogApprovalSerializer(Serializer): - assessment_status = ChoiceField( - choices=Assessment_Status.ASSESSMENT_STATUS_CHOICES_APPROVAL, required=False - ) + assessment_status = ChoiceField(choices=Assessment_Status.ASSESSMENT_STATUS_CHOICES_APPROVAL, required=False) approval_remark = CharField(max_length=255, required=True) class ObservationLogBulkApprovalSerializer(Serializer): - assessment_status = ChoiceField( - choices=Assessment_Status.ASSESSMENT_STATUS_CHOICES_APPROVAL, required=False - ) + assessment_status = ChoiceField(choices=Assessment_Status.ASSESSMENT_STATUS_CHOICES_APPROVAL, required=False) approval_remark = CharField(max_length=255, required=True) - observation_logs = ListField( - child=IntegerField(min_value=1), min_length=0, max_length=100, required=True - ) + observation_logs = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) + + +class ObservationLogBulkDeleteSerializer(Serializer): + observation_logs = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) class PotentialDuplicateSerializer(ModelSerializer): diff --git a/backend/application/core/api/serializers_product.py b/backend/application/core/api/serializers_product.py index f6b84bce2..51a685ecb 100644 --- a/backend/application/core/api/serializers_product.py +++ b/backend/application/core/api/serializers_product.py @@ -1,9 +1,14 @@ from datetime import date from typing import Optional +from django.core.validators import MaxValueValidator, MinValueValidator from rest_framework.serializers import ( + CharField, + DateField, IntegerField, + ListField, ModelSerializer, + Serializer, SerializerMethodField, ValidationError, ) @@ -12,13 +17,14 @@ NestedAuthorizationGroupSerializer, UserListSerializer, ) -from application.access_control.services.authorization import get_highest_user_role -from application.access_control.services.roles_permissions import ( +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import get_highest_user_role +from application.authorization.services.roles_permissions import ( Permissions, Roles, get_permissions_for_role, ) -from application.commons.services.global_request import get_current_user +from application.commons.models import Settings from application.core.api.serializers_helpers import ( validate_cpe23, validate_purl, @@ -37,17 +43,13 @@ get_product_authorization_group_member, get_product_member, ) -from application.core.services.product import ( - get_product_group_license_count, - get_product_group_observation_count, -) from application.core.services.risk_acceptance_expiry import ( calculate_risk_acceptance_expiry_date, ) -from application.core.types import Assessment_Status, Severity, Status +from application.core.types import Assessment_Status, Status +from application.import_observations.models import Api_Configuration from application.issue_tracker.types import Issue_Tracker from application.licenses.models import License_Component -from application.licenses.types import License_Policy_Evaluation_Result from application.rules.models import Rule from application.rules.types import Rule_Status @@ -55,35 +57,41 @@ class ProductCoreSerializer(ModelSerializer): permissions = SerializerMethodField() - def get_permissions(self, obj: Product) -> list[Permissions]: + def get_permissions(self, obj: Product) -> Optional[set[Permissions]]: return get_permissions_for_role(get_highest_user_role(obj)) class Meta: model = Product fields = "__all__" - def validate(self, attrs: dict): - if attrs.get("repository_branch_housekeeping_active"): + def validate(self, attrs: dict) -> dict: + settings = Settings.load() + + if attrs.get("repository_branch_housekeeping_active") is True: if not attrs.get("repository_branch_housekeeping_keep_inactive_days"): - attrs["repository_branch_housekeeping_keep_inactive_days"] = 1 - else: + attrs["repository_branch_housekeeping_keep_inactive_days"] = ( + settings.branch_housekeeping_keep_inactive_days + ) + + if attrs.get("repository_branch_housekeeping_active") is False: attrs["repository_branch_housekeeping_keep_inactive_days"] = None attrs["repository_branch_housekeeping_exempt_branches"] = "" - if attrs.get("security_gate_active"): + if attrs.get("security_gate_active") is True: if not attrs.get("security_gate_threshold_critical"): - attrs["security_gate_threshold_critical"] = 0 + attrs["security_gate_threshold_critical"] = settings.security_gate_threshold_critical if not attrs.get("security_gate_threshold_high"): - attrs["security_gate_threshold_high"] = 0 + attrs["security_gate_threshold_high"] = settings.security_gate_threshold_high if not attrs.get("security_gate_threshold_medium"): - attrs["security_gate_threshold_medium"] = 0 + attrs["security_gate_threshold_medium"] = settings.security_gate_threshold_medium if not attrs.get("security_gate_threshold_low"): - attrs["security_gate_threshold_low"] = 0 + attrs["security_gate_threshold_low"] = settings.security_gate_threshold_low if not attrs.get("security_gate_threshold_none"): - attrs["security_gate_threshold_none"] = 0 + attrs["security_gate_threshold_none"] = settings.security_gate_threshold_none if not attrs.get("security_gate_threshold_unknown"): - attrs["security_gate_threshold_unknown"] = 0 - else: + attrs["security_gate_threshold_unknown"] = settings.security_gate_threshold_unknown + + if attrs.get("security_gate_active") is False: attrs["security_gate_threshold_critical"] = None attrs["security_gate_threshold_high"] = None attrs["security_gate_threshold_medium"] = None @@ -95,63 +103,21 @@ def validate(self, attrs: dict): class ProductGroupSerializer(ProductCoreSerializer): - open_critical_observation_count = SerializerMethodField() - open_high_observation_count = SerializerMethodField() - open_medium_observation_count = SerializerMethodField() - open_low_observation_count = SerializerMethodField() - open_none_observation_count = SerializerMethodField() - open_unknown_observation_count = SerializerMethodField() - forbidden_licenses_count = SerializerMethodField() - review_required_licenses_count = SerializerMethodField() - unknown_licenses_count = SerializerMethodField() - allowed_licenses_count = SerializerMethodField() - ignored_licenses_count = SerializerMethodField() + active_critical_observation_count = IntegerField(read_only=True) + active_high_observation_count = IntegerField(read_only=True) + active_medium_observation_count = IntegerField(read_only=True) + active_low_observation_count = IntegerField(read_only=True) + active_none_observation_count = IntegerField(read_only=True) + active_unknown_observation_count = IntegerField(read_only=True) + forbidden_licenses_count = IntegerField(read_only=True) + review_required_licenses_count = IntegerField(read_only=True) + unknown_licenses_count = IntegerField(read_only=True) + allowed_licenses_count = IntegerField(read_only=True) + ignored_licenses_count = IntegerField(read_only=True) + products_count = SerializerMethodField() product_rule_approvals = SerializerMethodField() - def get_open_critical_observation_count(self, obj: Product) -> int: - return get_product_group_observation_count(obj, Severity.SEVERITY_CRITICAL) - - def get_open_high_observation_count(self, obj: Product) -> int: - return get_product_group_observation_count(obj, Severity.SEVERITY_HIGH) - - def get_open_medium_observation_count(self, obj: Product) -> int: - return get_product_group_observation_count(obj, Severity.SEVERITY_MEDIUM) - - def get_open_low_observation_count(self, obj: Product) -> int: - return get_product_group_observation_count(obj, Severity.SEVERITY_LOW) - - def get_open_none_observation_count(self, obj: Product) -> int: - return get_product_group_observation_count(obj, Severity.SEVERITY_NONE) - - def get_open_unknown_observation_count(self, obj: Product) -> int: - return get_product_group_observation_count(obj, Severity.SEVERITY_UNKNOWN) - - def get_forbidden_licenses_count(self, obj: Product) -> int: - return get_product_group_license_count( - obj, License_Policy_Evaluation_Result.RESULT_FORBIDDEN - ) - - def get_review_required_licenses_count(self, obj: Product) -> int: - return get_product_group_license_count( - obj, License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED - ) - - def get_unknown_licenses_count(self, obj: Product) -> int: - return get_product_group_license_count( - obj, License_Policy_Evaluation_Result.RESULT_UNKNOWN - ) - - def get_allowed_licenses_count(self, obj: Product) -> int: - return get_product_group_license_count( - obj, License_Policy_Evaluation_Result.RESULT_ALLOWED - ) - - def get_ignored_licenses_count(self, obj: Product) -> int: - return get_product_group_license_count( - obj, License_Policy_Evaluation_Result.RESULT_IGNORED - ) - def get_products_count(self, obj: Product) -> int: return obj.products.count() @@ -159,9 +125,7 @@ def get_product_rule_approvals(self, obj: Product) -> int: if not obj.product_rules_need_approval: return 0 - return Rule.objects.filter( - product=obj, approval_status=Rule_Status.RULE_STATUS_NEEDS_APPROVAL - ).count() + return Rule.objects.filter(product=obj, approval_status=Rule_Status.RULE_STATUS_NEEDS_APPROVAL).count() class Meta: model = Product @@ -171,12 +135,12 @@ class Meta: "description", "products_count", "permissions", - "open_critical_observation_count", - "open_high_observation_count", - "open_medium_observation_count", - "open_low_observation_count", - "open_none_observation_count", - "open_unknown_observation_count", + "active_critical_observation_count", + "active_high_observation_count", + "active_medium_observation_count", + "active_low_observation_count", + "active_none_observation_count", + "active_unknown_observation_count", "repository_branch_housekeeping_active", "repository_branch_housekeeping_keep_inactive_days", "repository_branch_housekeeping_exempt_branches", @@ -224,16 +188,13 @@ class Meta: fields = ["id", "name"] -class ProductSerializer( - ProductCoreSerializer -): # pylint: disable=too-many-public-methods - # all these methods are needed - open_critical_observation_count = IntegerField(read_only=True) - open_high_observation_count = IntegerField(read_only=True) - open_medium_observation_count = IntegerField(read_only=True) - open_low_observation_count = IntegerField(read_only=True) - open_none_observation_count = IntegerField(read_only=True) - open_unknown_observation_count = IntegerField(read_only=True) +class ProductListSerializer(ProductCoreSerializer): + active_critical_observation_count = IntegerField(read_only=True) + active_high_observation_count = IntegerField(read_only=True) + active_medium_observation_count = IntegerField(read_only=True) + active_low_observation_count = IntegerField(read_only=True) + active_none_observation_count = IntegerField(read_only=True) + active_unknown_observation_count = IntegerField(read_only=True) forbidden_licenses_count = IntegerField(read_only=True) review_required_licenses_count = IntegerField(read_only=True) unknown_licenses_count = IntegerField(read_only=True) @@ -241,10 +202,28 @@ class ProductSerializer( ignored_licenses_count = IntegerField(read_only=True) product_group_name = SerializerMethodField() + repository_default_branch_name = SerializerMethodField() + + class Meta: + model = Product + exclude = ["is_product_group", "members", "authorization_group_members"] + + def get_product_group_name(self, obj: Product) -> str: + if not obj.product_group: + return "" + return obj.product_group.name + + def get_repository_default_branch_name(self, obj: Product) -> str: + if not obj.repository_default_branch: + return "" + return obj.repository_default_branch.name + + +class ProductSerializer(ProductListSerializer): # pylint: disable=too-many-public-methods + # all these methods are needed product_group_repository_branch_housekeeping_active = SerializerMethodField() product_group_security_gate_active = SerializerMethodField() product_group_assessments_need_approval = SerializerMethodField() - repository_default_branch_name = SerializerMethodField() observation_reviews = SerializerMethodField() observation_log_approvals = SerializerMethodField() has_services = SerializerMethodField() @@ -255,19 +234,16 @@ class ProductSerializer( has_branches = SerializerMethodField() has_licenses = SerializerMethodField() product_group_license_policy = SerializerMethodField() + has_api_configurations = SerializerMethodField() + has_branch_osv_linux_distribution = SerializerMethodField() + has_concluded_comments = SerializerMethodField() class Meta: model = Product + read_only_fields = ["repository_default_branch"] exclude = ["is_product_group", "members", "authorization_group_members"] - def get_product_group_name(self, obj: Product) -> str: - if not obj.product_group: - return "" - return obj.product_group.name - - def get_product_group_repository_branch_housekeeping_active( - self, obj: Product - ) -> Optional[bool]: + def get_product_group_repository_branch_housekeeping_active(self, obj: Product) -> Optional[bool]: if not obj.product_group: return None return obj.product_group.repository_branch_housekeeping_active @@ -282,22 +258,12 @@ def get_product_group_assessments_need_approval(self, obj: Product) -> bool: return False return obj.product_group.assessments_need_approval - def get_repository_default_branch_name(self, obj: Product) -> str: - if not obj.repository_default_branch: - return "" - return obj.repository_default_branch.name - def get_observation_reviews(self, obj: Product) -> int: - return Observation.objects.filter( - product=obj, current_status=Status.STATUS_IN_REVIEW - ).count() + return Observation.objects.filter(product=obj, current_status=Status.STATUS_IN_REVIEW).count() def get_observation_log_approvals(self, obj: Product) -> int: if obj.product_group: - if ( - not obj.product_group.assessments_need_approval - and not obj.assessments_need_approval - ): + if not obj.product_group.assessments_need_approval and not obj.assessments_need_approval: return 0 else: if not obj.assessments_need_approval: @@ -318,22 +284,15 @@ def get_product_group_product_rules_need_approval(self, obj: Product) -> bool: def get_product_rule_approvals(self, obj: Product) -> int: if obj.product_group: - if ( - not obj.product_group.product_rules_need_approval - and not obj.product_rules_need_approval - ): + if not obj.product_group.product_rules_need_approval and not obj.product_rules_need_approval: return 0 else: if not obj.product_rules_need_approval: return 0 - return Rule.objects.filter( - product=obj, approval_status=Rule_Status.RULE_STATUS_NEEDS_APPROVAL - ).count() + return Rule.objects.filter(product=obj, approval_status=Rule_Status.RULE_STATUS_NEEDS_APPROVAL).count() - def get_risk_acceptance_expiry_date_calculated( - self, obj: Product - ) -> Optional[date]: + def get_risk_acceptance_expiry_date_calculated(self, obj: Product) -> Optional[date]: return calculate_risk_acceptance_expiry_date(obj) def get_product_group_new_observations_in_review(self, obj: Product) -> bool: @@ -352,7 +311,16 @@ def get_product_group_license_policy(self, obj: Product) -> Optional[int]: return None return obj.product_group.license_policy.id - def validate(self, attrs: dict): # pylint: disable=too-many-branches + def get_has_api_configurations(self, obj: Product) -> bool: + return Api_Configuration.objects.filter(product=obj).exists() + + def get_has_branch_osv_linux_distribution(self, obj: Product) -> bool: + return Branch.objects.filter(product=obj).exclude(osv_linux_distribution="").exists() + + def get_has_concluded_comments(self, obj: Product) -> bool: + return License_Component.objects.filter(product=obj).exclude(manual_concluded_comment="").exists() + + def validate(self, attrs: dict) -> dict: # pylint: disable=too-many-branches # There are quite a lot of branches, but at least they are not nested too much if attrs.get("issue_tracker_type") == Issue_Tracker.ISSUE_TRACKER_GITHUB: attrs["issue_tracker_base_url"] = "https://api.github.com" @@ -368,45 +336,29 @@ def validate(self, attrs: dict): # pylint: disable=too-many-branches and not attrs.get("issue_tracker_api_key") and not attrs.get("issue_tracker_project_id") ): - raise ValidationError( - "Either all or none of the issue tracker fields must be set" - ) + raise ValidationError("Either all or none of the issue tracker fields must be set") if attrs.get("issue_tracker_active") and not attrs.get("issue_tracker_type"): - raise ValidationError( - "Issue tracker data must be set when issue tracking is active" - ) + raise ValidationError("Issue tracker data must be set when issue tracking is active") if attrs.get("issue_tracker_type") == Issue_Tracker.ISSUE_TRACKER_JIRA: if not attrs.get("issue_tracker_username"): - raise ValidationError( - "Username must be set when issue tracker type is Jira" - ) + raise ValidationError("Username must be set when issue tracker type is Jira") if not attrs.get("issue_tracker_issue_type"): - raise ValidationError( - "Issue type must be set when issue tracker type is Jira" - ) + raise ValidationError("Issue type must be set when issue tracker type is Jira") if not attrs.get("issue_tracker_status_closed"): - raise ValidationError( - "Closed status must be set when issue tracker type is Jira" - ) + raise ValidationError("Closed status must be set when issue tracker type is Jira") - if ( - attrs.get("issue_tracker_type") - and attrs.get("issue_tracker_type") != Issue_Tracker.ISSUE_TRACKER_JIRA - ): + if attrs.get("issue_tracker_type") and attrs.get("issue_tracker_type") != Issue_Tracker.ISSUE_TRACKER_JIRA: if attrs.get("issue_tracker_username"): - raise ValidationError( - "Username must not be set when issue tracker type is not Jira" - ) + raise ValidationError("Username must not be set when issue tracker type is not Jira") if attrs.get("issue_tracker_issue_type"): - raise ValidationError( - "Isse type must not be set when issue tracker type is not Jira" - ) + raise ValidationError("Isse type must not be set when issue tracker type is not Jira") if attrs.get("issue_tracker_status_closed"): - raise ValidationError( - "Closed status must not be set when issue tracker type is not Jira" - ) + raise ValidationError("Closed status must not be set when issue tracker type is not Jira") + + if attrs.get("osv_linux_release") and not attrs.get("osv_linux_distribution"): + raise ValidationError("osv_linux_release cannot be set without osv_linux_distribution") return super().validate(attrs) @@ -442,7 +394,7 @@ class Meta: model = Product exclude = ["members", "authorization_group_members"] - def get_permissions(self, product: Product) -> list[Permissions]: + def get_permissions(self, product: Product) -> Optional[set[Permissions]]: return get_permissions_for_role(get_highest_user_role(product)) def get_product_group_assessments_need_approval(self, obj: Product) -> bool: @@ -455,9 +407,7 @@ def get_product_group_product_rules_need_approval(self, obj: Product) -> bool: return False return obj.product_group.product_rules_need_approval - def get_risk_acceptance_expiry_date_calculated( - self, obj: Product - ) -> Optional[date]: + def get_risk_acceptance_expiry_date_calculated(self, obj: Product) -> Optional[date]: return calculate_risk_acceptance_expiry_date(obj) @@ -493,72 +443,62 @@ class Meta: model = Product_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: Product_Member data_product: Optional[Product] = attrs.get("product") data_user = attrs.get("user") - if self.instance is not None and ( - (data_product and data_product != self.instance.product) - or (data_user and data_user != self.instance.user) - ): - raise ValidationError("Product and user cannot be changed") + current_user = get_current_user() if self.instance is None: + if data_product is None: + raise ValidationError("Product must be set") + if data_user is None: + raise ValidationError("User must be set") + product_member = get_product_member(data_product, data_user) if product_member: - raise ValidationError( - f"Product member {data_product} / {data_user} already exists" - ) + raise ValidationError(f"Product member {data_product} / {data_user} already exists") - current_user = get_current_user() - if self.instance is not None: - highest_user_role = get_highest_user_role( - self.instance.product, current_user - ) - else: highest_user_role = get_highest_user_role(data_product, current_user) + else: + if (data_product and data_product != self.instance.product) or ( + data_user and data_user != self.instance.user + ): + raise ValidationError("Product and user cannot be changed") - if highest_user_role != Roles.Owner and not ( - current_user and current_user.is_superuser - ): + highest_user_role = get_highest_user_role(self.instance.product, current_user) + + if highest_user_role != Roles.Owner and not (current_user and current_user.is_superuser): if attrs.get("role") == Roles.Owner: raise ValidationError("You are not permitted to add a member as Owner") - if ( - attrs.get("role") != Roles.Owner - and self.instance is not None - and self.instance.role == Roles.Owner - ): + if attrs.get("role") != Roles.Owner and self.instance is not None and self.instance.role == Roles.Owner: raise ValidationError("You are not permitted to change the Owner role") return attrs class ProductAuthorizationGroupMemberSerializer(ModelSerializer): - authorization_group_data = NestedAuthorizationGroupSerializer( - source="authorization_group", read_only=True - ) + authorization_group_data = NestedAuthorizationGroupSerializer(source="authorization_group", read_only=True) product_data = NestedProductSerializerSmall(source="product", read_only=True) class Meta: model = Product_Authorization_Group_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: Product_Authorization_Group_Member data_product: Optional[Product] = attrs.get("product") data_authorization_group = attrs.get("authorization_group") - if self.instance is not None and ( - (data_product and data_product != self.instance.product) - or ( - data_authorization_group - and data_authorization_group != self.instance.authorization_group - ) - ): - raise ValidationError("Product and authorization group cannot be changed") + current_user = get_current_user() if self.instance is None: + if data_product is None: + raise ValidationError("Product must be set") + if data_authorization_group is None: + raise ValidationError("Authorization group must be set") + product_authorization_group_member = get_product_authorization_group_member( data_product, data_authorization_group ) @@ -566,48 +506,44 @@ def validate(self, attrs: dict): raise ValidationError( f"Product authorization group member {data_product} / {data_authorization_group} already exists" ) - - current_user = get_current_user() - if self.instance is not None: - highest_user_role = get_highest_user_role( - self.instance.product, current_user - ) - else: highest_user_role = get_highest_user_role(data_product, current_user) + else: + if (data_product and data_product != self.instance.product) or ( + data_authorization_group and data_authorization_group != self.instance.authorization_group + ): + raise ValidationError("Product and authorization group cannot be changed") + highest_user_role = get_highest_user_role(self.instance.product, current_user) - if highest_user_role != Roles.Owner and not ( - current_user and current_user.is_superuser - ): + if highest_user_role != Roles.Owner and not (current_user and current_user.is_superuser): if attrs.get("role") == Roles.Owner: raise ValidationError("You are not permitted to add a member as Owner") - if ( - attrs.get("role") != Roles.Owner - and self.instance is not None - and self.instance.role == Roles.Owner - ): + if attrs.get("role") != Roles.Owner and self.instance is not None and self.instance.role == Roles.Owner: raise ValidationError("You are not permitted to change the Owner role") return attrs +class ProductApiTokenSerializer(Serializer): + id = IntegerField(read_only=True) + product = IntegerField(validators=[MinValueValidator(1)]) + role = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(5)]) + name = CharField(max_length=32) + expiration_date = DateField(required=False, allow_null=True) + + class BranchSerializer(ModelSerializer): name_with_product = SerializerMethodField() - is_default_branch = SerializerMethodField() - open_critical_observation_count = SerializerMethodField() - open_high_observation_count = SerializerMethodField() - open_medium_observation_count = SerializerMethodField() - open_low_observation_count = SerializerMethodField() - open_none_observation_count = SerializerMethodField() - open_unknown_observation_count = SerializerMethodField() - forbidden_licenses_count = SerializerMethodField() - review_required_licenses_count = SerializerMethodField() - unknown_licenses_count = SerializerMethodField() - allowed_licenses_count = SerializerMethodField() - ignored_licenses_count = SerializerMethodField() - - class Meta: - model = Branch - fields = "__all__" + active_critical_observation_count = IntegerField(read_only=True) + active_high_observation_count = IntegerField(read_only=True) + active_medium_observation_count = IntegerField(read_only=True) + active_low_observation_count = IntegerField(read_only=True) + active_none_observation_count = IntegerField(read_only=True) + active_unknown_observation_count = IntegerField(read_only=True) + forbidden_licenses_count = IntegerField(read_only=True) + review_required_licenses_count = IntegerField(read_only=True) + unknown_licenses_count = IntegerField(read_only=True) + allowed_licenses_count = IntegerField(read_only=True) + ignored_licenses_count = IntegerField(read_only=True) def validate_purl(self, purl: str) -> str: return validate_purl(purl) @@ -618,41 +554,15 @@ def validate_cpe23(self, cpe23: str) -> str: def get_name_with_product(self, obj: Service) -> str: return f"{obj.name} ({obj.product.name})" - def get_is_default_branch(self, obj: Branch) -> bool: - return obj.product.repository_default_branch == obj - - def get_open_critical_observation_count(self, obj: Branch) -> int: - return obj.open_critical_observation_count - - def get_open_high_observation_count(self, obj: Branch) -> int: - return obj.open_high_observation_count - - def get_open_medium_observation_count(self, obj: Branch) -> int: - return obj.open_medium_observation_count - - def get_open_low_observation_count(self, obj: Branch) -> int: - return obj.open_low_observation_count - - def get_open_none_observation_count(self, obj: Branch) -> int: - return obj.open_none_observation_count - - def get_open_unknown_observation_count(self, obj: Branch) -> int: - return obj.open_unknown_observation_count - - def get_forbidden_licenses_count(self, obj: Branch) -> int: - return obj.forbidden_licenses_count - - def get_review_required_licenses_count(self, obj: Branch) -> int: - return obj.review_required_licenses_count - - def get_unknown_licenses_count(self, obj: Branch) -> int: - return obj.unknown_licenses_count + class Meta: + model = Branch + fields = "__all__" - def get_allowed_licenses_count(self, obj: Branch) -> int: - return obj.allowed_licenses_count + def validate(self, attrs: dict) -> dict: # pylint: disable=too-many-branches + if attrs.get("osv_linux_release") and not attrs.get("osv_linux_distribution"): + raise ValidationError("osv_linux_release cannot be set without osv_linux_distribution") - def get_ignored_licenses_count(self, obj: Branch) -> int: - return obj.ignored_licenses_count + return super().validate(attrs) class BranchNameSerializer(ModelSerializer): @@ -662,18 +572,23 @@ class Meta: model = Branch fields = ["id", "name", "name_with_product"] - def get_name_with_product(self, obj: Service) -> str: + def get_name_with_product(self, obj: Branch) -> str: return f"{obj.name} ({obj.product.name})" class ServiceSerializer(ModelSerializer): name_with_product = SerializerMethodField() - open_critical_observation_count = SerializerMethodField() - open_high_observation_count = SerializerMethodField() - open_medium_observation_count = SerializerMethodField() - open_low_observation_count = SerializerMethodField() - open_none_observation_count = SerializerMethodField() - open_unknown_observation_count = SerializerMethodField() + active_critical_observation_count = IntegerField(read_only=True) + active_high_observation_count = IntegerField(read_only=True) + active_medium_observation_count = IntegerField(read_only=True) + active_low_observation_count = IntegerField(read_only=True) + active_none_observation_count = IntegerField(read_only=True) + active_unknown_observation_count = IntegerField(read_only=True) + forbidden_licenses_count = IntegerField(read_only=True) + review_required_licenses_count = IntegerField(read_only=True) + unknown_licenses_count = IntegerField(read_only=True) + allowed_licenses_count = IntegerField(read_only=True) + ignored_licenses_count = IntegerField(read_only=True) class Meta: model = Service @@ -682,20 +597,23 @@ class Meta: def get_name_with_product(self, obj: Service) -> str: return f"{obj.name} ({obj.product.name})" - def get_open_critical_observation_count(self, obj: Service) -> int: - return obj.open_critical_observation_count - def get_open_high_observation_count(self, obj: Service) -> int: - return obj.open_high_observation_count +class ServiceNameSerializer(ModelSerializer): + name_with_product = SerializerMethodField() + + class Meta: + model = Branch + fields = ["id", "name", "name_with_product"] + + def get_name_with_product(self, obj: Service) -> str: + return f"{obj.name} ({obj.product.name})" - def get_open_medium_observation_count(self, obj: Service) -> int: - return obj.open_medium_observation_count - def get_open_low_observation_count(self, obj: Service) -> int: - return obj.open_low_observation_count +class PURLTypeElementSerializer(Serializer): + id = CharField() + name = CharField() - def get_open_none_observation_count(self, obj: Service) -> int: - return obj.open_none_observation_count - def get_open_unknown_observation_count(self, obj: Service) -> int: - return obj.open_unknown_observation_count +class PURLTypeSerializer(Serializer): + count = IntegerField() + results = ListField(child=PURLTypeElementSerializer()) diff --git a/backend/application/core/api/views.py b/backend/application/core/api/views.py index 2cef74b9b..90b5b3d4e 100644 --- a/backend/application/core/api/views.py +++ b/backend/application/core/api/views.py @@ -1,6 +1,9 @@ +import logging +import re from tempfile import NamedTemporaryFile from typing import Any +from django.db.models import QuerySet from django.http import HttpResponse from django.utils import timezone from django_filters.rest_framework import DjangoFilterBackend @@ -8,18 +11,34 @@ from rest_framework.decorators import action from rest_framework.exceptions import NotFound, ValidationError from rest_framework.filters import SearchFilter -from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.permissions import IsAuthenticated from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT -from rest_framework.viewsets import GenericViewSet, ModelViewSet +from rest_framework.serializers import BaseSerializer +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_404_NOT_FOUND, +) +from rest_framework.views import APIView +from rest_framework.viewsets import GenericViewSet, ModelViewSet, ViewSet -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions -from application.commons.services.global_request import get_current_user +from application.access_control.api.serializers import ( + ApiTokenCreateResponseSerializer, +) +from application.access_control.queries.api_token import get_api_token_by_id +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import ( + user_has_permission, + user_has_permission_or_403, +) +from application.authorization.services.roles_permissions import Permissions +from application.commons.services.log_message import format_log_message from application.core.api.filters import ( BranchFilter, + ComponentFilter, EvidenceFilter, ObservationFilter, ObservationLogFilter, @@ -39,6 +58,10 @@ UserHasProductPermission, UserHasServicePermission, ) +from application.core.api.serializers_component import ( + ComponentNameSerializer, + ComponentSerializer, +) from application.core.api.serializers_observation import ( CountSerializer, EvidenceSerializer, @@ -50,6 +73,7 @@ ObservationListSerializer, ObservationLogApprovalSerializer, ObservationLogBulkApprovalSerializer, + ObservationLogBulkDeleteSerializer, ObservationLogListSerializer, ObservationLogSerializer, ObservationRemoveAssessmentSerializer, @@ -61,15 +85,21 @@ from application.core.api.serializers_product import ( BranchNameSerializer, BranchSerializer, + ProductApiTokenSerializer, ProductAuthorizationGroupMemberSerializer, ProductGroupSerializer, + ProductListSerializer, ProductMemberSerializer, ProductNameSerializer, ProductSerializer, + PURLTypeElementSerializer, + PURLTypeSerializer, + ServiceNameSerializer, ServiceSerializer, ) from application.core.models import ( Branch, + Component, Evidence, Observation, Observation_Log, @@ -80,6 +110,7 @@ Service, ) from application.core.queries.branch import get_branches +from application.core.queries.component import get_components from application.core.queries.observation import ( get_current_observation_log, get_evidences, @@ -113,10 +144,15 @@ from application.core.services.potential_duplicates import ( set_potential_duplicate_both_ways, ) +from application.core.services.product_api_token import ( + create_product_api_token, + get_product_api_tokens, + revoke_product_api_token, +) +from application.core.services.purl_type import get_purl_type, get_purl_types from application.core.services.security_gate import check_security_gate from application.core.types import Assessment_Status, Status from application.issue_tracker.services.issue_tracker import ( - push_deleted_observation_to_issue_tracker, push_observations_to_issue_tracker, ) from application.licenses.api.serializers import LicenseComponentBulkDeleteSerializer @@ -129,6 +165,8 @@ ) from application.rules.services.rule_engine import Rule_Engine +logger = logging.getLogger("secobserve.core") + class ProductGroupViewSet(ModelViewSet): serializer_class = ProductGroupSerializer @@ -138,8 +176,8 @@ class ProductGroupViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): - return get_products(is_product_group=True) + def get_queryset(self) -> QuerySet[Product]: + return get_products(is_product_group=True, with_annotations=True) class ProductGroupNameViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): @@ -150,7 +188,7 @@ class ProductGroupNameViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Product]: return get_products(is_product_group=True) @@ -162,13 +200,20 @@ class ProductViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Product]: return ( get_products(is_product_group=False, with_annotations=True) .select_related("product_group") + .select_related("product_group__license_policy") .select_related("repository_default_branch") ) + def get_serializer_class(self) -> type[BaseSerializer[Any]]: + if self.action == "list": + return ProductListSerializer + + return super().get_serializer_class() + @extend_schema( methods=["GET"], responses={200: None}, @@ -177,19 +222,18 @@ def get_queryset(self): ], ) @action(detail=True, methods=["get"]) - def export_observations_excel(self, request, pk=None): + def export_observations_excel(self, request: Request, pk: int) -> HttpResponse: product = self.__get_product(pk) - status = self.request.query_params.get("status") - if status and (status, status) not in Status.STATUS_CHOICES: - raise ValidationError(f"Status {status} is not a valid choice") + statuses = self.request.query_params.getlist("status") + for status in statuses: + if status and (status, status) not in Status.STATUS_CHOICES: + raise ValidationError(f"Status {status} is not a valid choice") - workbook = export_observations_excel(product, status) + workbook = export_observations_excel(product, statuses) with NamedTemporaryFile() as tmp: - workbook.save( - tmp.name # nosemgrep: python.lang.correctness.tempfile.flush.tempfile-without-flush - ) + workbook.save(tmp.name) # nosemgrep: python.lang.correctness.tempfile.flush.tempfile-without-flush # export works fine without .flush() tmp.seek(0) stream = tmp.read() @@ -210,30 +254,29 @@ def export_observations_excel(self, request, pk=None): ], ) @action(detail=True, methods=["get"]) - def export_observations_csv(self, request, pk=None): + def export_observations_csv(self, request: Request, pk: int) -> HttpResponse: product = self.__get_product(pk) - status = self.request.query_params.get("status") - if status and (status, status) not in Status.STATUS_CHOICES: - raise ValidationError(f"Status {status} is not a valid choice") + statuses = self.request.query_params.getlist("status") + for status in statuses: + if status and (status, status) not in Status.STATUS_CHOICES: + raise ValidationError(f"Status {status} is not a valid choice") response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = "attachment; filename=observations.csv" - export_observations_csv(response, product, status) + export_observations_csv(response, product, statuses) return response @action(detail=True, methods=["get"]) - def export_license_components_excel(self, request, pk=None): + def export_license_components_excel(self, request: Request, pk: int) -> HttpResponse: product = self.__get_product(pk) workbook = export_license_components_excel(product) with NamedTemporaryFile() as tmp: - workbook.save( - tmp.name # nosemgrep: python.lang.correctness.tempfile.flush.tempfile-without-flush - ) + workbook.save(tmp.name) # nosemgrep: python.lang.correctness.tempfile.flush.tempfile-without-flush # export works fine without .flush() tmp.seek(0) stream = tmp.read() @@ -251,13 +294,11 @@ def export_license_components_excel(self, request, pk=None): responses={200: None}, ) @action(detail=True, methods=["get"]) - def export_license_components_csv(self, request, pk=None): + def export_license_components_csv(self, request: Request, pk: int) -> HttpResponse: product = self.__get_product(pk) response = HttpResponse(content_type="text/csv") - response["Content-Disposition"] = ( - "attachment; filename=license_observations.csv" - ) + response["Content-Disposition"] = "attachment; filename=license_observations.csv" export_license_components_csv(response, product) @@ -269,7 +310,7 @@ def export_license_components_csv(self, request, pk=None): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["post"]) - def apply_rules(self, request, pk=None): + def apply_rules(self, request: Request, pk: int) -> Response: product = self.__get_product(pk) user_has_permission_or_403(product, Permissions.Product_Rule_Apply) @@ -284,7 +325,7 @@ def apply_rules(self, request, pk=None): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["post"]) - def observations_bulk_assessment(self, request, pk=None): + def observations_bulk_assessment(self, request: Request, pk: int) -> Response: product = self.__get_product(pk) user_has_permission_or_403(product, Permissions.Observation_Assessment) @@ -298,12 +339,8 @@ def observations_bulk_assessment(self, request, pk=None): new_status=request_serializer.validated_data.get("status"), comment=request_serializer.validated_data.get("comment"), observation_ids=request_serializer.validated_data.get("observations"), - new_vex_justification=request_serializer.validated_data.get( - "vex_justification" - ), - new_risk_acceptance_expiry_date=request_serializer.validated_data.get( - "risk_acceptance_expiry_date" - ), + new_vex_justification=request_serializer.validated_data.get("vex_justification"), + new_risk_acceptance_expiry_date=request_serializer.validated_data.get("risk_acceptance_expiry_date"), ) return Response(status=HTTP_204_NO_CONTENT) @@ -313,7 +350,7 @@ def observations_bulk_assessment(self, request, pk=None): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["post"]) - def observations_bulk_mark_duplicates(self, request, pk=None): + def observations_bulk_mark_duplicates(self, request: Request, pk: int) -> Response: product = self.__get_product(pk) user_has_permission_or_403(product, Permissions.Observation_Assessment) @@ -334,7 +371,7 @@ def observations_bulk_mark_duplicates(self, request, pk=None): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["post"]) - def observations_bulk_delete(self, request, pk): + def observations_bulk_delete(self, request: Request, pk: int) -> Response: product = self.__get_product(pk) user_has_permission_or_403(product, Permissions.Observation_Delete) @@ -342,9 +379,7 @@ def observations_bulk_delete(self, request, pk): if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) - observations_bulk_delete( - product, request_serializer.validated_data.get("observations") - ) + observations_bulk_delete(product, request_serializer.validated_data.get("observations")) return Response(status=HTTP_204_NO_CONTENT) @extend_schema( @@ -353,7 +388,7 @@ def observations_bulk_delete(self, request, pk): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["post"]) - def license_components_bulk_delete(self, request, pk): + def license_components_bulk_delete(self, request: Request, pk: int) -> Response: product = self.__get_product(pk) user_has_permission_or_403(product, Permissions.License_Component_Delete) @@ -361,9 +396,7 @@ def license_components_bulk_delete(self, request, pk): if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) - license_components_bulk_delete( - product, request_serializer.validated_data.get("components") - ) + license_components_bulk_delete(product, request_serializer.validated_data.get("components")) return Response(status=HTTP_204_NO_CONTENT) @extend_schema( @@ -372,7 +405,7 @@ def license_components_bulk_delete(self, request, pk): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["post"]) - def synchronize_issues(self, request, pk): + def synchronize_issues(self, request: Request, pk: int) -> Response: product = self.__get_product(pk) user_has_permission_or_403(product, Permissions.Product_Edit) @@ -381,7 +414,7 @@ def synchronize_issues(self, request, pk): return Response(status=HTTP_204_NO_CONTENT) - def __get_product(self, pk) -> Product: + def __get_product(self, pk: int) -> Product: if not pk: raise ValidationError("No id provided") @@ -402,7 +435,7 @@ class ProductNameViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Product]: return get_products(is_product_group=False) @@ -413,7 +446,7 @@ class ProductMemberViewSet(ModelViewSet): queryset = Product_Member.objects.none() filter_backends = [DjangoFilterBackend] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Product_Member]: return get_product_members().select_related("product").select_related("user") @@ -427,12 +460,8 @@ class ProductAuthorizationGroupMemberViewSet(ModelViewSet): queryset = Product_Authorization_Group_Member.objects.none() filter_backends = [DjangoFilterBackend] - def get_queryset(self): - return ( - get_product_authorization_group_members() - .select_related("product") - .select_related("authorization_group") - ) + def get_queryset(self) -> QuerySet[Product_Authorization_Group_Member]: + return get_product_authorization_group_members().select_related("product").select_related("authorization_group") class BranchViewSet(ModelViewSet): @@ -443,12 +472,12 @@ class BranchViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): - return get_branches().select_related("product") + def get_queryset(self) -> QuerySet[Branch]: + return get_branches(with_annotations=True).select_related("product") def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: instance: Branch = self.get_object() - if instance == instance.product.repository_default_branch: + if instance.is_default_branch: raise ValidationError("You cannot delete the default branch of a product.") return super().destroy(request, *args, **kwargs) @@ -462,13 +491,11 @@ class BranchNameViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Branch]: return get_branches().select_related("product") -class ServiceViewSet( - GenericViewSet, ListModelMixin, RetrieveModelMixin, DestroyModelMixin -): +class ServiceViewSet(ModelViewSet): serializer_class = ServiceSerializer filterset_class = ServiceFilter permission_classes = (IsAuthenticated, UserHasServicePermission) @@ -476,7 +503,19 @@ class ServiceViewSet( filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Service]: + return get_services(with_annotations=True).select_related("product") + + +class ServiceNameViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): + serializer_class = ServiceNameSerializer + filterset_class = ServiceFilter + permission_classes = (IsAuthenticated, UserHasServicePermission) + queryset = Service.objects.none() + filter_backends = [SearchFilter, DjangoFilterBackend] + search_fields = ["name"] + + def get_queryset(self) -> QuerySet[Service]: return get_services().select_related("product") @@ -488,7 +527,7 @@ class ObservationViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["title"] - def get_serializer_class(self): + def get_serializer_class(self) -> type[BaseSerializer]: if self.action == "list": return ObservationListSerializer @@ -500,21 +539,25 @@ def get_serializer_class(self): return super().get_serializer_class() - def get_queryset(self): + def get_queryset(self) -> QuerySet[Observation]: return ( get_observations() .select_related("product") .select_related("product__product_group") .select_related("branch") .select_related("parser") + .select_related("origin_service") ) def perform_destroy(self, instance: Observation) -> None: product = instance.product - issue_id = instance.issue_tracker_issue_id + super().perform_destroy(instance) - check_security_gate(product) - push_deleted_observation_to_issue_tracker(product, issue_id, get_current_user()) + if (instance.branch and instance.branch.is_default_branch) or ( + not instance.branch and not instance.product.repository_default_branch + ): + check_security_gate(product) + product.last_observation_change = timezone.now() product.save() @@ -524,7 +567,7 @@ def perform_destroy(self, instance: Observation) -> None: responses={200: None}, ) @action(detail=True, methods=["patch"]) - def assessment(self, request, pk=None): + def assessment(self, request: Request, pk: int) -> Response: request_serializer = ObservationAssessmentSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -538,22 +581,15 @@ def assessment(self, request, pk=None): current_observation_log = get_current_observation_log(observation) if ( current_observation_log - and current_observation_log.assessment_status - == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL + and current_observation_log.assessment_status == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL ): - raise ValidationError( - "Cannot create new assessment while last assessment still needs approval" - ) + raise ValidationError("Cannot create new assessment while last assessment still needs approval") new_severity = request_serializer.validated_data.get("severity") new_status = request_serializer.validated_data.get("status") comment = request_serializer.validated_data.get("comment") - new_vex_justification = request_serializer.validated_data.get( - "vex_justification" - ) - new_risk_acceptance_expiry_date = request_serializer.validated_data.get( - "risk_acceptance_expiry_date" - ) + new_vex_justification = request_serializer.validated_data.get("vex_justification") + new_risk_acceptance_expiry_date = request_serializer.validated_data.get("risk_acceptance_expiry_date") save_assessment( observation=observation, @@ -573,7 +609,7 @@ def assessment(self, request, pk=None): responses={200: None}, ) @action(detail=True, methods=["patch"]) - def remove_assessment(self, request, pk=None): + def remove_assessment(self, request: Request, pk: int) -> Response: request_serializer = ObservationRemoveAssessmentSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -587,12 +623,9 @@ def remove_assessment(self, request, pk=None): current_observation_log = get_current_observation_log(observation) if ( current_observation_log - and current_observation_log.assessment_status - == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL + and current_observation_log.assessment_status == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL ): - raise ValidationError( - "Cannot remove assessment while last assessment still needs approval" - ) + raise ValidationError("Cannot remove assessment while last assessment still needs approval") comment = request_serializer.validated_data.get("comment") @@ -607,7 +640,7 @@ def remove_assessment(self, request, pk=None): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=False, methods=["post"]) - def bulk_assessment(self, request): + def bulk_assessment(self, request: Request) -> Response: request_serializer = ObservationBulkAssessmentSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -618,12 +651,8 @@ def bulk_assessment(self, request): new_status=request_serializer.validated_data.get("status"), comment=request_serializer.validated_data.get("comment"), observation_ids=request_serializer.validated_data.get("observations"), - new_vex_justification=request_serializer.validated_data.get( - "vex_justification" - ), - new_risk_acceptance_expiry_date=request_serializer.validated_data.get( - "risk_acceptance_expiry_date" - ), + new_vex_justification=request_serializer.validated_data.get("vex_justification"), + new_risk_acceptance_expiry_date=request_serializer.validated_data.get("risk_acceptance_expiry_date"), ) return Response(status=HTTP_204_NO_CONTENT) @@ -633,10 +662,8 @@ def bulk_assessment(self, request): responses={HTTP_200_OK: CountSerializer}, ) @action(detail=False, methods=["get"]) - def count_reviews(self, request): - count = ( - get_observations().filter(current_status=Status.STATUS_IN_REVIEW).count() - ) + def count_reviews(self, request: Request) -> Response: + count = get_observations().filter(current_status=Status.STATUS_IN_REVIEW).count() return Response(status=HTTP_200_OK, data={"count": count}) @@ -648,7 +675,7 @@ class ObservationTitleViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["title"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Observation]: return get_observations() @@ -658,15 +685,22 @@ class ObservationLogViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): queryset = Observation_Log.objects.all() filter_backends = [SearchFilter, DjangoFilterBackend] - def get_serializer_class(self): + def get_serializer_class( + self, + ) -> type[ObservationLogListSerializer] | type[BaseSerializer]: if self.action == "list": return ObservationLogListSerializer return super().get_serializer_class() - def get_queryset(self): + def get_queryset(self) -> QuerySet[Observation_Log]: return ( - get_observation_logs().select_related("observation").select_related("user") + get_observation_logs() + .select_related("observation") + .select_related("observation__product") + .select_related("observation__branch") + .select_related("observation__parser") + .select_related("user") ) @extend_schema( @@ -675,7 +709,7 @@ def get_queryset(self): responses={200: None}, ) @action(detail=True, methods=["patch"]) - def approval(self, request, pk=None): + def approval(self, request: Request, pk: int) -> Response: request_serializer = ObservationLogApprovalSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -684,9 +718,7 @@ def approval(self, request, pk=None): if not observation_log: raise NotFound(f"Observation Log {pk} not found") - user_has_permission_or_403( - observation_log, Permissions.Observation_Log_Approval - ) + user_has_permission_or_403(observation_log, Permissions.Observation_Log_Approval) assessment_status = request_serializer.validated_data.get("assessment_status") approval_remark = request_serializer.validated_data.get("approval_remark") @@ -702,7 +734,7 @@ def approval(self, request, pk=None): responses={HTTP_204_NO_CONTENT: None}, ) @action(detail=False, methods=["post"]) - def bulk_approval(self, request): + def bulk_approval(self, request: Request) -> Response: request_serializer = ObservationLogBulkApprovalSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -720,23 +752,40 @@ def bulk_approval(self, request): responses={HTTP_200_OK: CountSerializer}, ) @action(detail=False, methods=["get"]) - def count_approvals(self, request): + def count_approvals(self, request: Request) -> Response: count = ( - get_observation_logs() - .filter( - assessment_status=Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL - ) - .count() + get_observation_logs().filter(assessment_status=Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL).count() ) return Response(status=HTTP_200_OK, data={"count": count}) + @extend_schema( + methods=["DELETE"], + request=ObservationLogBulkApprovalSerializer, + responses={HTTP_204_NO_CONTENT: None}, + ) + @action(detail=False, methods=["delete"]) + def bulk_delete(self, request: Request) -> Response: + request_serializer = ObservationLogBulkDeleteSerializer(data=request.data) + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) + + result = Observation_Log.objects.filter( + id__in=request_serializer.validated_data.get("observation_logs"), + user=get_current_user(), + ).delete() + + if result[0] == 0: + raise ValidationError("No assessments were deleted. You can only delete your own assessments.") + + return Response({"count": result[0]}, status=HTTP_200_OK) + class EvidenceViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): serializer_class = EvidenceSerializer filterset_class = EvidenceFilter queryset = Evidence.objects.none() - def get_queryset(self): + def get_queryset(self) -> QuerySet[Evidence]: return get_evidences().select_related("observation__product") @@ -745,5 +794,163 @@ class PotentialDuplicateViewSet(GenericViewSet, ListModelMixin): filterset_class = PotentialDuplicateFilter queryset = Potential_Duplicate.objects.none() - def get_queryset(self): + def get_queryset(self) -> QuerySet[Potential_Duplicate]: return get_potential_duplicates() + + +class PURLTypeOneView(APIView): + @extend_schema( + methods=["GET"], + request=None, + responses={HTTP_200_OK: PURLTypeSerializer}, + ) + @action(detail=True, methods=["get"]) + def get(self, request: Request, purl_type_id: str) -> Response: + purl_type = get_purl_type(purl_type_id) + if purl_type: + response_serializer = PURLTypeElementSerializer(purl_type) + return Response( + status=HTTP_200_OK, + data=response_serializer.data, + ) + + return Response(status=HTTP_404_NOT_FOUND) + + +class PURLTypeManyView(APIView): + @extend_schema( + methods=["GET"], + request=None, + responses={HTTP_200_OK: PURLTypeSerializer}, + ) + @action(detail=False, methods=["get"]) + def get(self, request: Request) -> Response: + product_id = request.query_params.get("product") + if not product_id: + return Response(status=HTTP_404_NOT_FOUND) + product = get_product_by_id(int(product_id)) + if not product: + return Response(status=HTTP_404_NOT_FOUND) + if not user_has_permission(product, Permissions.Product_View): + return Response(status=HTTP_404_NOT_FOUND) + + for_observations = bool(request.query_params.get("for_observations")) + for_license_components = bool(request.query_params.get("for_license_components")) + purl_types = get_purl_types(product, for_observations, for_license_components) + + response_serializer = PURLTypeSerializer(purl_types) + return Response( + status=HTTP_200_OK, + data=response_serializer.data, + ) + + +class ProductApiTokenViewset(ViewSet): + serializer_class = ProductApiTokenSerializer + + @extend_schema( + parameters=[ + OpenApiParameter(name="product", location=OpenApiParameter.QUERY, required=True, type=int), + ], + ) + def list(self, request: Request) -> Response: + product_id = str(request.query_params.get("product", "")) + if not product_id: + raise ValidationError("Product is required") + if not product_id.isdigit(): + raise ValidationError("Product id must be an integer") + + product = _get_product(int(str(product_id))) + user_has_permission_or_403(product, Permissions.Product_View) + tokens = get_product_api_tokens(product) + serializer = ProductApiTokenSerializer(tokens, many=True) + response_data = {"results": serializer.data} + return Response(response_data) + + @extend_schema( + request=ProductApiTokenSerializer, + responses={HTTP_200_OK: ApiTokenCreateResponseSerializer}, + ) + def create(self, request: Request) -> Response: + request_serializer = ProductApiTokenSerializer(data=request.data) + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) + + product = _get_product(request_serializer.validated_data.get("product")) + + user_has_permission_or_403(product, Permissions.Product_Api_Token_Create) + + token = create_product_api_token( + product, + request_serializer.validated_data.get("role"), + request_serializer.validated_data.get("name"), + request_serializer.validated_data.get("expiration_date"), + ) + + response = Response({"token": token}, status=HTTP_201_CREATED) + logger.info(format_log_message(message="Product API token created", response=response)) + return response + + @extend_schema( + responses={HTTP_204_NO_CONTENT: None}, + ) + def destroy(self, request: Request, pk: int) -> Response: + API_TOKEN_NOT_VALID = "API token not valid" + + api_token = get_api_token_by_id(pk) + if not api_token: + raise ValidationError(API_TOKEN_NOT_VALID) + + if not re.match("-product-(\\d)*(-.*)?-api_token-", api_token.user.username): + raise ValidationError(API_TOKEN_NOT_VALID) + + product_member = Product_Member.objects.filter(user=api_token.user).first() + if not product_member: + raise ValidationError(API_TOKEN_NOT_VALID) + + product = _get_product(product_member.product.pk) + user_has_permission_or_403(product, Permissions.Product_Api_Token_Revoke) + + revoke_product_api_token(product, api_token) + + response = Response(status=HTTP_204_NO_CONTENT) + logger.info(format_log_message(message="Product API token revoked", response=response)) + return response + + +class ComponentViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): + serializer_class = ComponentSerializer + filterset_class = ComponentFilter + permission_classes = (IsAuthenticated,) + queryset = Component.objects.none() + filter_backends = [SearchFilter, DjangoFilterBackend] + search_fields = ["component_name_version"] + + def get_queryset(self) -> QuerySet[Component]: + return ( + get_components() + .select_related("product") + .select_related("product__product_group") + .select_related("branch") + .select_related("origin_service") + ) + + +class ComponentNameViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): + serializer_class = ComponentNameSerializer + filterset_class = ComponentFilter + permission_classes = (IsAuthenticated,) + queryset = Component.objects.none() + filter_backends = [SearchFilter, DjangoFilterBackend] + search_fields = ["component_name_version"] + + def get_queryset(self) -> QuerySet[Component]: + return get_components() + + +def _get_product(product_id: int) -> Product: + product = get_product_by_id(product_id) + if not product: + raise ValidationError(f"Product {product_id} does not exist") + + return product diff --git a/backend/application/core/apps.py b/backend/application/core/apps.py index 2ca5c2787..10e1c9301 100644 --- a/backend/application/core/apps.py +++ b/backend/application/core/apps.py @@ -5,7 +5,7 @@ class CoreConfig(AppConfig): name = "application.core" verbose_name = "Core" - def ready(self): + def ready(self) -> None: try: import application.core.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import except ImportError: diff --git a/backend/application/core/migrations/0001_initial.py b/backend/application/core/migrations/0001_initial.py index 59f6f9bfd..e143f44e2 100644 --- a/backend/application/core/migrations/0001_initial.py +++ b/backend/application/core/migrations/0001_initial.py @@ -375,9 +375,7 @@ class Migration(migrations.Migration): ), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ( "user", @@ -467,15 +465,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name="observation", name="parser", - field=models.ForeignKey( - on_delete=django.db.models.deletion.PROTECT, to="core.parser" - ), + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="core.parser"), ), migrations.AddField( model_name="observation", name="product", - field=models.ForeignKey( - on_delete=django.db.models.deletion.PROTECT, to="core.product" - ), + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="core.product"), ), ] diff --git a/backend/application/core/migrations/0005_last_observation_log_data.py b/backend/application/core/migrations/0005_last_observation_log_data.py index dd2ee1c30..43d2c72a0 100644 --- a/backend/application/core/migrations/0005_last_observation_log_data.py +++ b/backend/application/core/migrations/0005_last_observation_log_data.py @@ -12,9 +12,7 @@ def update_last_observation_log(apps, schema_editor): Observation_Log = apps.get_model("core", "Observation_Log") for observation in Observation.objects.all(): try: - observation_log = Observation_Log.objects.filter( - observation=observation - ).latest("created") + observation_log = Observation_Log.objects.filter(observation=observation).latest("created") observation.last_observation_log = observation_log.created observation.save() except Exception as e: diff --git a/backend/application/core/migrations/0011_observation_initialize_short_names.py b/backend/application/core/migrations/0011_observation_initialize_short_names.py index e319e1629..e54b4af65 100644 --- a/backend/application/core/migrations/0011_observation_initialize_short_names.py +++ b/backend/application/core/migrations/0011_observation_initialize_short_names.py @@ -7,14 +7,10 @@ def touch_observations(apps, schema_editor): Observation = apps.get_model("core", "Observation") for observation in Observation.objects.all(): if observation.origin_docker_image_name_tag: - origin_docker_image_name_tag_parts = ( - observation.origin_docker_image_name_tag.split("/") - ) - observation.origin_docker_image_name_tag_short = ( - origin_docker_image_name_tag_parts[ - len(origin_docker_image_name_tag_parts) - 1 - ].strip() - ) + origin_docker_image_name_tag_parts = observation.origin_docker_image_name_tag.split("/") + observation.origin_docker_image_name_tag_short = origin_docker_image_name_tag_parts[ + len(origin_docker_image_name_tag_parts) - 1 + ].strip() else: observation.origin_docker_image_name_tag_short = "" diff --git a/backend/application/core/migrations/0015_branch_observation_branch_and_more.py b/backend/application/core/migrations/0015_branch_observation_branch_and_more.py index 4f1456c0a..f15e689ae 100644 --- a/backend/application/core/migrations/0015_branch_observation_branch_and_more.py +++ b/backend/application/core/migrations/0015_branch_observation_branch_and_more.py @@ -25,9 +25,7 @@ class Migration(migrations.Migration): ("name", models.CharField(max_length=255, unique=True)), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ], options={ @@ -37,9 +35,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="observation", name="branch", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.PROTECT, to="core.branch" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to="core.branch"), ), migrations.AddField( model_name="product", diff --git a/backend/application/core/migrations/0019_indices.py b/backend/application/core/migrations/0019_indices.py index 3c9ff5ba4..742815e3a 100644 --- a/backend/application/core/migrations/0019_indices.py +++ b/backend/application/core/migrations/0019_indices.py @@ -23,9 +23,7 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["product", "branch"], name="core_observ_product_2b0c8c_idx" - ), + index=models.Index(fields=["product", "branch"], name="core_observ_product_2b0c8c_idx"), ), migrations.AddIndex( model_name="observation", @@ -33,27 +31,19 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["current_severity"], name="core_observ_current_ac86ae_idx" - ), + index=models.Index(fields=["current_severity"], name="core_observ_current_ac86ae_idx"), ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["numerical_severity"], name="core_observ_numeric_0b8309_idx" - ), + index=models.Index(fields=["numerical_severity"], name="core_observ_numeric_0b8309_idx"), ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["current_status"], name="core_observ_current_ad6d8c_idx" - ), + index=models.Index(fields=["current_status"], name="core_observ_current_ad6d8c_idx"), ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["vulnerability_id"], name="core_observ_vulnera_e3405d_idx" - ), + index=models.Index(fields=["vulnerability_id"], name="core_observ_vulnera_e3405d_idx"), ), migrations.AddIndex( model_name="observation", @@ -71,9 +61,7 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["origin_service_name"], name="core_observ_origin__43587c_idx" - ), + index=models.Index(fields=["origin_service_name"], name="core_observ_origin__43587c_idx"), ), migrations.AddIndex( model_name="observation", @@ -84,27 +72,19 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["origin_source_file"], name="core_observ_origin__e87800_idx" - ), + index=models.Index(fields=["origin_source_file"], name="core_observ_origin__e87800_idx"), ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["last_observation_log"], name="core_observ_last_ob_49e4ca_idx" - ), + index=models.Index(fields=["last_observation_log"], name="core_observ_last_ob_49e4ca_idx"), ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["epss_score"], name="core_observ_epss_sc_6b5f31_idx" - ), + index=models.Index(fields=["epss_score"], name="core_observ_epss_sc_6b5f31_idx"), ), migrations.AddIndex( model_name="observation", - index=models.Index( - fields=["scanner"], name="core_observ_scanner_d77e31_idx" - ), + index=models.Index(fields=["scanner"], name="core_observ_scanner_d77e31_idx"), ), migrations.AddIndex( model_name="observation_log", @@ -115,9 +95,7 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="observation_log", - index=models.Index( - fields=["-created"], name="core_observ_created_8b59b9_idx" - ), + index=models.Index(fields=["-created"], name="core_observ_created_8b59b9_idx"), ), migrations.AddIndex( model_name="parser", diff --git a/backend/application/core/migrations/0022_branch_housekeeping.py b/backend/application/core/migrations/0022_branch_housekeeping.py index 83864b71d..d43979b1f 100644 --- a/backend/application/core/migrations/0022_branch_housekeeping.py +++ b/backend/application/core/migrations/0022_branch_housekeeping.py @@ -45,8 +45,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="observation", name="branch", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="core.branch" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="core.branch"), ), ] diff --git a/backend/application/core/migrations/0023_service_observation_origin_service_and_more.py b/backend/application/core/migrations/0023_service_observation_origin_service_and_more.py index fda52a2bc..bfcb3aaeb 100644 --- a/backend/application/core/migrations/0023_service_observation_origin_service_and_more.py +++ b/backend/application/core/migrations/0023_service_observation_origin_service_and_more.py @@ -25,9 +25,7 @@ class Migration(migrations.Migration): ("name", models.CharField(max_length=255)), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ], ), diff --git a/backend/application/core/migrations/0024_initialize_branches.py b/backend/application/core/migrations/0024_initialize_branches.py index b8e0c019c..9077c2428 100644 --- a/backend/application/core/migrations/0024_initialize_branches.py +++ b/backend/application/core/migrations/0024_initialize_branches.py @@ -13,13 +13,9 @@ def initialize_branches(apps, schema_editor): for observation in Observation.objects.exclude(origin_service_name=""): try: try: - service = Service.objects.get( - product=observation.product, name=observation.origin_service_name - ) + service = Service.objects.get(product=observation.product, name=observation.origin_service_name) except Service.DoesNotExist: - service = Service.objects.create( - product=observation.product, name=observation.origin_service_name - ) + service = Service.objects.create(product=observation.product, name=observation.origin_service_name) observation.origin_service = service observation.save() except Exception as e: diff --git a/backend/application/core/migrations/0037_product_authorization_group_member.py b/backend/application/core/migrations/0037_product_authorization_group_member.py index 827ef94da..7883e68f8 100644 --- a/backend/application/core/migrations/0037_product_authorization_group_member.py +++ b/backend/application/core/migrations/0037_product_authorization_group_member.py @@ -43,9 +43,7 @@ class Migration(migrations.Migration): ), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ], options={ diff --git a/backend/application/core/migrations/0045_populate_observation_purl_type.py b/backend/application/core/migrations/0045_populate_observation_purl_type.py index 9b5290505..3a67f0ab7 100644 --- a/backend/application/core/migrations/0045_populate_observation_purl_type.py +++ b/backend/application/core/migrations/0045_populate_observation_purl_type.py @@ -3,7 +3,7 @@ from django.core.paginator import Paginator from django.db import migrations -from application.core.services.observation import normalize_origin_component +from application.core.services.observation import _normalize_origin_component logger = logging.getLogger("secobserve.migration") @@ -11,9 +11,7 @@ def populate_purl_type(apps, schema_editor): Observation = apps.get_model("core", "Observation") observations = ( - Observation.objects.exclude(origin_component_purl="") - .filter(origin_component_purl_type="") - .order_by("id") + Observation.objects.exclude(origin_component_purl="").filter(origin_component_purl_type="").order_by("id") ) paginator = Paginator(observations, 1000) @@ -22,7 +20,7 @@ def populate_purl_type(apps, schema_editor): updates = [] for observation in page.object_list: - normalize_origin_component(observation) + _normalize_origin_component(observation) updates.append(observation) Observation.objects.bulk_update(updates, ["origin_component_purl_type"]) diff --git a/backend/application/core/migrations/0048_populate_product_checkboxes.py b/backend/application/core/migrations/0048_populate_product_checkboxes.py index 206b38585..d0ed3d12b 100644 --- a/backend/application/core/migrations/0048_populate_product_checkboxes.py +++ b/backend/application/core/migrations/0048_populate_product_checkboxes.py @@ -19,34 +19,22 @@ def populate_product_flags(apps, schema_editor): for product in page.object_list: product.has_cloud_resource = ( - Observation.objects.filter(product=product) - .exclude(origin_cloud_qualified_resource="") - .exists() + Observation.objects.filter(product=product).exclude(origin_cloud_qualified_resource="").exists() ) product.has_component = ( - Observation.objects.filter(product=product) - .exclude(origin_component_name_version="") - .exists() + Observation.objects.filter(product=product).exclude(origin_component_name_version="").exists() ) product.has_docker_image = ( - Observation.objects.filter(product=product) - .exclude(origin_docker_image_name_tag_short="") - .exists() + Observation.objects.filter(product=product).exclude(origin_docker_image_name_tag_short="").exists() ) product.has_endpoint = ( - Observation.objects.filter(product=product) - .exclude(origin_endpoint_hostname="") - .exists() + Observation.objects.filter(product=product).exclude(origin_endpoint_hostname="").exists() ) - product.has_source = ( - Observation.objects.filter(product=product) - .exclude(origin_source_file="") - .exists() - ) + product.has_source = Observation.objects.filter(product=product).exclude(origin_source_file="").exists() product.has_potential_duplicates = Observation.objects.filter( product=product, has_potential_duplicates=True @@ -73,7 +61,5 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunPython( - populate_product_flags, reverse_code=migrations.RunPython.noop - ), + migrations.RunPython(populate_product_flags, reverse_code=migrations.RunPython.noop), ] diff --git a/backend/application/core/migrations/0051_convert_origin_component_dependencies.py b/backend/application/core/migrations/0051_convert_origin_component_dependencies.py index 744e98fdc..e6157ccb0 100644 --- a/backend/application/core/migrations/0051_convert_origin_component_dependencies.py +++ b/backend/application/core/migrations/0051_convert_origin_component_dependencies.py @@ -14,9 +14,7 @@ def convert_origin_component_dependencies(apps, schema_editor): Observation = apps.get_model("core", "Observation") - observations = Observation.objects.exclude( - origin_component_dependencies="" - ).order_by("id") + observations = Observation.objects.exclude(origin_component_dependencies="").order_by("id") paginator = Paginator(observations, 1000) for page_number in paginator.page_range: @@ -29,9 +27,7 @@ def convert_origin_component_dependencies(apps, schema_editor): dependencies = list(dict.fromkeys(dependencies)) mermaid_dependencies = _parse_mermaid_graph_content(dependencies) - observation.origin_component_dependencies = ( - _generate_dependency_list_as_text(mermaid_dependencies) - ) + observation.origin_component_dependencies = _generate_dependency_list_as_text(mermaid_dependencies) updates.append(observation) diff --git a/backend/application/core/migrations/0054_convert_unknown_data.py b/backend/application/core/migrations/0054_convert_unknown_data.py index 7ce57b425..92d60de53 100644 --- a/backend/application/core/migrations/0054_convert_unknown_data.py +++ b/backend/application/core/migrations/0054_convert_unknown_data.py @@ -65,9 +65,7 @@ def convert_unknown_data(apps, schema_editor): ) Product = apps.get_model("core", "Product") - products = Product.objects.filter(issue_tracker_minimum_severity="Unkown").order_by( - "id" - ) + products = Product.objects.filter(issue_tracker_minimum_severity="Unkown").order_by("id") paginator = Paginator(products, 1000) for page_number in paginator.page_range: diff --git a/backend/application/core/migrations/0056_correct_risk_acceptance_expiry_date.py b/backend/application/core/migrations/0056_correct_risk_acceptance_expiry_date.py index ed199c637..9bfe89610 100644 --- a/backend/application/core/migrations/0056_correct_risk_acceptance_expiry_date.py +++ b/backend/application/core/migrations/0056_correct_risk_acceptance_expiry_date.py @@ -17,9 +17,7 @@ def correct_risk_acceptance_expiry_date(apps, schema_editor): Observation = apps.get_model("core", "Observation") Observation_Log = apps.get_model("core", "Observation_Log") - observations = Observation.objects.filter( - current_status=Status.STATUS_RISK_ACCEPTED - ).order_by("id") + observations = Observation.objects.filter(current_status=Status.STATUS_RISK_ACCEPTED).order_by("id") paginator = Paginator(observations, 1000) for page_number in paginator.page_range: @@ -30,39 +28,25 @@ def correct_risk_acceptance_expiry_date(apps, schema_editor): risk_acceptance_expiry_date_found = False most_recent_risk_acceptance: Optional[date] = None - observation_logs = Observation_Log.objects.filter( - observation=observation - ).order_by("-created") + observation_logs = Observation_Log.objects.filter(observation=observation).order_by("-created") for observation_log in observation_logs: - if ( - observation_log.status == Status.STATUS_RISK_ACCEPTED - and not most_recent_risk_acceptance - ): + if observation_log.status == Status.STATUS_RISK_ACCEPTED and not most_recent_risk_acceptance: most_recent_risk_acceptance = observation_log.created.date() if observation_log.risk_acceptance_expiry_date: - observation.risk_acceptance_expiry_date = ( - observation_log.risk_acceptance_expiry_date - ) + observation.risk_acceptance_expiry_date = observation_log.risk_acceptance_expiry_date risk_acceptance_expiry_date_found = True break - if ( - not risk_acceptance_expiry_date_found - and observation.risk_acceptance_expiry_date - ): - new_risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date( - observation.product - ) + if not risk_acceptance_expiry_date_found and observation.risk_acceptance_expiry_date: + new_risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date(observation.product) if most_recent_risk_acceptance: days_between = (date.today() - most_recent_risk_acceptance).days - observation.risk_acceptance_expiry_date = ( - new_risk_acceptance_expiry_date - timedelta(days=days_between) + observation.risk_acceptance_expiry_date = new_risk_acceptance_expiry_date - timedelta( + days=days_between ) else: - observation.risk_acceptance_expiry_date = ( - new_risk_acceptance_expiry_date - ) + observation.risk_acceptance_expiry_date = new_risk_acceptance_expiry_date updates.append(observation) diff --git a/backend/application/core/migrations/0058_observation_vulnerability_id_aliases.py b/backend/application/core/migrations/0058_observation_vulnerability_id_aliases.py new file mode 100644 index 000000000..38f9b4a36 --- /dev/null +++ b/backend/application/core/migrations/0058_observation_vulnerability_id_aliases.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-23 06:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0057_observation_cvss4_score_observation_cvss4_vector"), + ] + + operations = [ + migrations.AddField( + model_name="observation", + name="vulnerability_id_aliases", + field=models.CharField(blank=True, max_length=512), + ), + ] diff --git a/backend/application/core/migrations/0059_branch_osv_linux_distribution_and_more.py b/backend/application/core/migrations/0059_branch_osv_linux_distribution_and_more.py new file mode 100644 index 000000000..d539cc6b6 --- /dev/null +++ b/backend/application/core/migrations/0059_branch_osv_linux_distribution_and_more.py @@ -0,0 +1,68 @@ +# Generated by Django 5.1.5 on 2025-01-29 07:00 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0058_observation_vulnerability_id_aliases"), + ] + + operations = [ + migrations.AddField( + model_name="branch", + name="osv_linux_distribution", + field=models.CharField( + blank=True, + choices=[ + ("AlmaLinux", "AlmaLinux"), + ("Alpine", "Alpine"), + ("Debian", "Debian"), + ("Mageia", "Mageia"), + ("openSUSE", "openSUSE"), + ("Photon OS", "Photon OS"), + ("Red Hat", "Red Hat"), + ("Rocky Linux", "Rocky Linux"), + ("SUSE", "SUSE"), + ("Ubuntu", "Ubuntu"), + ], + max_length=12, + ), + ), + migrations.AddField( + model_name="branch", + name="osv_linux_release", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="product", + name="osv_enabled", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="product", + name="osv_linux_distribution", + field=models.CharField( + blank=True, + choices=[ + ("AlmaLinux", "AlmaLinux"), + ("Alpine", "Alpine"), + ("Debian", "Debian"), + ("Mageia", "Mageia"), + ("openSUSE", "openSUSE"), + ("Photon OS", "Photon OS"), + ("Red Hat", "Red Hat"), + ("Rocky Linux", "Rocky Linux"), + ("SUSE", "SUSE"), + ("Ubuntu", "Ubuntu"), + ], + max_length=12, + ), + ), + migrations.AddField( + model_name="product", + name="osv_linux_release", + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/backend/application/core/migrations/0060_product_automatic_osv_scanning_enabled.py b/backend/application/core/migrations/0060_product_automatic_osv_scanning_enabled.py new file mode 100644 index 000000000..db90f3e3c --- /dev/null +++ b/backend/application/core/migrations/0060_product_automatic_osv_scanning_enabled.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-02-02 16:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0059_branch_osv_linux_distribution_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="product", + name="automatic_osv_scanning_enabled", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/application/core/migrations/0061_observation_cve_found_in_and_more.py b/backend/application/core/migrations/0061_observation_cve_found_in_and_more.py new file mode 100644 index 000000000..f7d4662ef --- /dev/null +++ b/backend/application/core/migrations/0061_observation_cve_found_in_and_more.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-03-04 05:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0060_product_automatic_osv_scanning_enabled"), + ] + + operations = [ + migrations.AddField( + model_name="observation", + name="cve_found_in", + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/backend/application/core/migrations/0062_alter_branch_osv_linux_distribution_and_more.py b/backend/application/core/migrations/0062_alter_branch_osv_linux_distribution_and_more.py new file mode 100644 index 000000000..542d2a33d --- /dev/null +++ b/backend/application/core/migrations/0062_alter_branch_osv_linux_distribution_and_more.py @@ -0,0 +1,57 @@ +# Generated by Django 5.1.7 on 2025-03-26 16:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0061_observation_cve_found_in_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="branch", + name="osv_linux_distribution", + field=models.CharField( + blank=True, + choices=[ + ("AlmaLinux", "AlmaLinux"), + ("Alpine", "Alpine"), + ("Chainguard", "Chainguard"), + ("Debian", "Debian"), + ("Mageia", "Mageia"), + ("openSUSE", "openSUSE"), + ("Photon OS", "Photon OS"), + ("Red Hat", "Red Hat"), + ("Rocky Linux", "Rocky Linux"), + ("SUSE", "SUSE"), + ("Ubuntu", "Ubuntu"), + ("Wolfi", "Wolfi"), + ], + max_length=12, + ), + ), + migrations.AlterField( + model_name="product", + name="osv_linux_distribution", + field=models.CharField( + blank=True, + choices=[ + ("AlmaLinux", "AlmaLinux"), + ("Alpine", "Alpine"), + ("Chainguard", "Chainguard"), + ("Debian", "Debian"), + ("Mageia", "Mageia"), + ("openSUSE", "openSUSE"), + ("Photon OS", "Photon OS"), + ("Red Hat", "Red Hat"), + ("Rocky Linux", "Rocky Linux"), + ("SUSE", "SUSE"), + ("Ubuntu", "Ubuntu"), + ("Wolfi", "Wolfi"), + ], + max_length=12, + ), + ), + ] diff --git a/backend/application/core/migrations/0063_observation_origin_source_file_link.py b/backend/application/core/migrations/0063_observation_origin_source_file_link.py new file mode 100644 index 000000000..7b05d8323 --- /dev/null +++ b/backend/application/core/migrations/0063_observation_origin_source_file_link.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.1 on 2025-05-26 20:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0062_alter_branch_osv_linux_distribution_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="observation", + name="origin_source_file_link", + field=models.CharField(blank=True, max_length=2048), + ), + ] diff --git a/backend/application/core/migrations/0064_product_description_markdown.py b/backend/application/core/migrations/0064_product_description_markdown.py new file mode 100644 index 000000000..d79fbb2b2 --- /dev/null +++ b/backend/application/core/migrations/0064_product_description_markdown.py @@ -0,0 +1,39 @@ +import logging + +from django.core.paginator import Paginator +from django.db import migrations +from html_to_markdown import convert_to_markdown + +logger = logging.getLogger("secobserve.migration") + + +def convert_product_description_to_markdown(apps, schema_editor): + Product = apps.get_model("core", "Product") + products = Product.objects.exclude(description__exact="").order_by("id") + + paginator = Paginator(products, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for product in page.object_list: + product.description = convert_to_markdown(product.description) + updates.append(product) + + Product.objects.bulk_update(updates, ["description"]) + + +class Migration(migrations.Migration): + dependencies = [ + ( + "core", + "0063_observation_origin_source_file_link", + ), + ] + + operations = [ + migrations.RunPython( + convert_product_description_to_markdown, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/backend/application/core/migrations/0065_observation_origin_component_cyclonedx_bom_link_and_more.py b/backend/application/core/migrations/0065_observation_origin_component_cyclonedx_bom_link_and_more.py new file mode 100644 index 000000000..3241186a9 --- /dev/null +++ b/backend/application/core/migrations/0065_observation_origin_component_cyclonedx_bom_link_and_more.py @@ -0,0 +1,25 @@ +# Generated by Django 5.2.5 on 2025-08-21 06:11 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0064_product_description_markdown"), + ("import_observations", "0013_alter_vulnerability_check_unique_together_and_more"), + ("rules", "0015_convert_unknown_data"), + ("vex", "0009_vex_statement_component_cyclonedx_bom_link_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="observation", + name="origin_component_cyclonedx_bom_link", + field=models.CharField(blank=True, max_length=512), + ), + migrations.AddIndex( + model_name="observation", + index=models.Index(fields=["origin_component_cyclonedx_bom_link"], name="core_observ_origin__952ded_idx"), + ), + ] diff --git a/backend/application/core/migrations/0066_alter_observation_assessment_vex_justification_and_more.py b/backend/application/core/migrations/0066_alter_observation_assessment_vex_justification_and_more.py new file mode 100644 index 000000000..40fa0003b --- /dev/null +++ b/backend/application/core/migrations/0066_alter_observation_assessment_vex_justification_and_more.py @@ -0,0 +1,175 @@ +# Generated by Django 5.2.5 on 2025-08-26 19:17 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0065_observation_origin_component_cyclonedx_bom_link_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="observation", + name="assessment_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + migrations.AlterField( + model_name="observation", + name="current_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + migrations.AlterField( + model_name="observation", + name="parser_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + migrations.AlterField( + model_name="observation", + name="rule_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + migrations.AlterField( + model_name="observation", + name="vex_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + migrations.AlterField( + model_name="observation_log", + name="vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + ] diff --git a/backend/application/core/migrations/0067_components_view.py b/backend/application/core/migrations/0067_components_view.py new file mode 100644 index 000000000..876c4707a --- /dev/null +++ b/backend/application/core/migrations/0067_components_view.py @@ -0,0 +1,104 @@ +from django.db import migrations + +CREATE_SQL = """ +DROP VIEW IF EXISTS core_component; +CREATE VIEW core_component AS +WITH CombinedData AS ( + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + origin_component_name AS component_name, + origin_component_version AS component_version, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_purl_type AS component_purl_type, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM core_observation + WHERE origin_component_name_version != '' + + UNION + + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + component_name AS component_name, + component_version AS component_version, + component_name_version AS component_name_version, + component_purl AS component_purl, + component_purl_type AS component_purl_type, + component_cpe AS component_cpe, + component_dependencies AS component_dependencies, + component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM licenses_license_component +), +ObservationFlag AS ( + SELECT DISTINCT + product_id, + branch_id, + origin_service_id, + origin_component_name_version AS component_name_version, + origin_component_purl_type AS component_purl_type, + TRUE AS has_observation + FROM core_observation +) +SELECT + MD5( + CONCAT( + CAST(COALESCE(cd.product_id, 111) as CHAR(255)), + CAST(COALESCE(cd.branch_id, 222) as CHAR(255)), + CAST(COALESCE(cd.origin_service_id, 333) as CHAR(255)), + COALESCE(cd.component_name_version, 'no_name_version'), + COALESCE(cd.component_purl_type, 'no_purl_type'), + COALESCE(cd.component_dependencies, 'no_dependencies') + ) + ) AS id, + cd.product_id as product_id, + cd.branch_id as branch_id, + cd.origin_service_id as origin_service_id, + cd.component_name AS component_name, + cd.component_version AS component_version, + cd.component_name_version AS component_name_version, + cd.component_purl AS component_purl, + cd.component_purl_type AS component_purl_type, + cd.component_cpe AS component_cpe, + cd.component_dependencies AS component_dependencies, + cd.component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + COALESCE(ObservationFlag.has_observation, FALSE) AS has_observations +FROM CombinedData cd +LEFT JOIN ObservationFlag ON + cd.product_id = ObservationFlag.product_id + AND ( + (cd.branch_id = ObservationFlag.branch_id) IS TRUE OR + (cd.branch_id IS NULL AND ObservationFlag.branch_id IS NULL) + ) + AND ( + (cd.origin_service_id = ObservationFlag.origin_service_id) IS TRUE OR + (cd.origin_service_id IS NULL AND ObservationFlag.origin_service_id IS NULL) + ) + AND cd.component_name_version = ObservationFlag.component_name_version + AND ( + (cd.component_purl_type = ObservationFlag.component_purl_type) IS TRUE OR + (cd.component_purl_type IS NULL AND ObservationFlag.component_purl_type IS NULL) + ) +; +""" + +DROP_SQL = "DROP VIEW IF EXISTS core_component;" + + +class Migration(migrations.Migration): + dependencies = [ + ("licenses", "0020_license_component_component_cyclonedx_bom_link"), + ("core", "0066_alter_observation_assessment_vex_justification_and_more"), + ] + + operations = [ + migrations.RunSQL( + sql=CREATE_SQL, + reverse_sql=DROP_SQL, + ), + ] diff --git a/backend/application/core/migrations/0068_component.py b/backend/application/core/migrations/0068_component.py new file mode 100644 index 000000000..ee7336705 --- /dev/null +++ b/backend/application/core/migrations/0068_component.py @@ -0,0 +1,32 @@ +# Generated by Django 5.2.6 on 2025-09-21 15:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0067_components_view"), + ] + + operations = [ + migrations.CreateModel( + name="Component", + fields=[ + ("id", models.CharField(max_length=32, primary_key=True, serialize=False)), + ("component_name", models.CharField(max_length=255)), + ("component_version", models.CharField(blank=True, max_length=255)), + ("component_name_version", models.CharField(blank=True, max_length=513)), + ("component_purl", models.CharField(blank=True, max_length=255)), + ("component_purl_type", models.CharField(blank=True, max_length=16)), + ("component_cpe", models.CharField(blank=True, max_length=255)), + ("component_dependencies", models.TextField(blank=True, max_length=32768)), + ("component_cyclonedx_bom_link", models.CharField(blank=True, max_length=512)), + ("has_observations", models.BooleanField()), + ], + options={ + "db_table": "core_component", + "managed": False, + }, + ), + ] diff --git a/backend/application/core/migrations/0069_components_view_fix.py b/backend/application/core/migrations/0069_components_view_fix.py new file mode 100644 index 000000000..91fbcb7a6 --- /dev/null +++ b/backend/application/core/migrations/0069_components_view_fix.py @@ -0,0 +1,108 @@ +from django.db import migrations + +CREATE_SQL = """ +DROP VIEW IF EXISTS core_component; +CREATE VIEW core_component AS +WITH CombinedData AS ( + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + origin_component_name AS component_name, + origin_component_version AS component_version, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_purl_type AS component_purl_type, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM core_observation + WHERE origin_component_name_version != '' + + UNION + + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + component_name AS component_name, + component_version AS component_version, + component_name_version AS component_name_version, + component_purl AS component_purl, + component_purl_type AS component_purl_type, + component_cpe AS component_cpe, + component_dependencies AS component_dependencies, + component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM licenses_license_component +), +ObservationFlag AS ( + SELECT DISTINCT + product_id, + branch_id, + origin_service_id, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + TRUE AS has_observation + FROM core_observation +) +SELECT + MD5( + CONCAT( + CAST(COALESCE(cd.product_id, 111) as CHAR(255)), + CAST(COALESCE(cd.branch_id, 222) as CHAR(255)), + CAST(COALESCE(cd.origin_service_id, 333) as CHAR(255)), + COALESCE(cd.component_name_version, 'no_name_version'), + COALESCE(cd.component_purl, 'no_purl'), + COALESCE(cd.component_cpe, 'no_cpe'), + COALESCE(cd.component_dependencies, 'no_dependencies'), + COALESCE(cd.component_cyclonedx_bom_link, 'component_cyclonedx_bom_link') + ) + ) AS id, + cd.product_id as product_id, + cd.branch_id as branch_id, + cd.origin_service_id as origin_service_id, + cd.component_name AS component_name, + cd.component_version AS component_version, + cd.component_name_version AS component_name_version, + cd.component_purl AS component_purl, + cd.component_purl_type AS component_purl_type, + cd.component_cpe AS component_cpe, + cd.component_dependencies AS component_dependencies, + cd.component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + COALESCE(ObservationFlag.has_observation, FALSE) AS has_observations +FROM CombinedData cd +LEFT JOIN ObservationFlag ON + cd.product_id = ObservationFlag.product_id + AND ( + (cd.branch_id = ObservationFlag.branch_id) IS TRUE OR + (cd.branch_id IS NULL AND ObservationFlag.branch_id IS NULL) + ) + AND ( + (cd.origin_service_id = ObservationFlag.origin_service_id) IS TRUE OR + (cd.origin_service_id IS NULL AND ObservationFlag.origin_service_id IS NULL) + ) + AND cd.component_name_version = ObservationFlag.component_name_version + AND cd.component_purl = ObservationFlag.component_purl + AND cd.component_cpe = ObservationFlag.component_cpe + AND cd.component_dependencies = ObservationFlag.component_dependencies + AND cd.component_cyclonedx_bom_link = ObservationFlag.component_cyclonedx_bom_link +; +""" + +DROP_SQL = "DROP VIEW IF EXISTS core_component;" + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0068_component"), + ] + + operations = [ + migrations.RunSQL( + sql=CREATE_SQL, + reverse_sql=DROP_SQL, + ), + ] diff --git a/backend/application/core/migrations/0070_components_view_open_observations.py b/backend/application/core/migrations/0070_components_view_open_observations.py new file mode 100644 index 000000000..5d902a6d4 --- /dev/null +++ b/backend/application/core/migrations/0070_components_view_open_observations.py @@ -0,0 +1,109 @@ +from django.db import migrations + +CREATE_SQL = """ +DROP VIEW IF EXISTS core_component; +CREATE VIEW core_component AS +WITH CombinedData AS ( + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + origin_component_name AS component_name, + origin_component_version AS component_version, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_purl_type AS component_purl_type, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM core_observation + WHERE origin_component_name_version != '' + + UNION + + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + component_name AS component_name, + component_version AS component_version, + component_name_version AS component_name_version, + component_purl AS component_purl, + component_purl_type AS component_purl_type, + component_cpe AS component_cpe, + component_dependencies AS component_dependencies, + component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM licenses_license_component +), +ObservationFlag AS ( + SELECT DISTINCT + product_id, + branch_id, + origin_service_id, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + TRUE AS has_observation + FROM core_observation + WHERE current_status = 'Open' +) +SELECT + MD5( + CONCAT( + CAST(COALESCE(cd.product_id, 111) as CHAR(255)), + CAST(COALESCE(cd.branch_id, 222) as CHAR(255)), + CAST(COALESCE(cd.origin_service_id, 333) as CHAR(255)), + COALESCE(cd.component_name_version, 'no_name_version'), + COALESCE(cd.component_purl, 'no_purl'), + COALESCE(cd.component_cpe, 'no_cpe'), + COALESCE(cd.component_dependencies, 'no_dependencies'), + COALESCE(cd.component_cyclonedx_bom_link, 'component_cyclonedx_bom_link') + ) + ) AS id, + cd.product_id as product_id, + cd.branch_id as branch_id, + cd.origin_service_id as origin_service_id, + cd.component_name AS component_name, + cd.component_version AS component_version, + cd.component_name_version AS component_name_version, + cd.component_purl AS component_purl, + cd.component_purl_type AS component_purl_type, + cd.component_cpe AS component_cpe, + cd.component_dependencies AS component_dependencies, + cd.component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + COALESCE(ObservationFlag.has_observation, FALSE) AS has_observations +FROM CombinedData cd +LEFT JOIN ObservationFlag ON + cd.product_id = ObservationFlag.product_id + AND ( + (cd.branch_id = ObservationFlag.branch_id) IS TRUE OR + (cd.branch_id IS NULL AND ObservationFlag.branch_id IS NULL) + ) + AND ( + (cd.origin_service_id = ObservationFlag.origin_service_id) IS TRUE OR + (cd.origin_service_id IS NULL AND ObservationFlag.origin_service_id IS NULL) + ) + AND cd.component_name_version = ObservationFlag.component_name_version + AND cd.component_purl = ObservationFlag.component_purl + AND cd.component_cpe = ObservationFlag.component_cpe + AND cd.component_dependencies = ObservationFlag.component_dependencies + AND cd.component_cyclonedx_bom_link = ObservationFlag.component_cyclonedx_bom_link +; +""" + +DROP_SQL = "DROP VIEW IF EXISTS core_component;" + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0069_components_view_fix"), + ] + + operations = [ + migrations.RunSQL( + sql=CREATE_SQL, + reverse_sql=DROP_SQL, + ), + ] diff --git a/backend/application/core/migrations/0071_alter_product_osv_enabled.py b/backend/application/core/migrations/0071_alter_product_osv_enabled.py new file mode 100644 index 000000000..1e6fe6fea --- /dev/null +++ b/backend/application/core/migrations/0071_alter_product_osv_enabled.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.8 on 2025-11-28 17:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0070_components_view_open_observations"), + ] + + operations = [ + migrations.AlterField( + model_name="product", + name="osv_enabled", + field=models.BooleanField(default=True), + ), + ] diff --git a/backend/application/core/migrations/0072_branch_is_default_branch.py b/backend/application/core/migrations/0072_branch_is_default_branch.py new file mode 100644 index 000000000..7a783a65f --- /dev/null +++ b/backend/application/core/migrations/0072_branch_is_default_branch.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.9 on 2025-12-16 10:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0071_alter_product_osv_enabled"), + ] + + operations = [ + migrations.AddField( + model_name="branch", + name="is_default_branch", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/application/core/migrations/0073_initialize_default_branch.py b/backend/application/core/migrations/0073_initialize_default_branch.py new file mode 100644 index 000000000..7d16b7145 --- /dev/null +++ b/backend/application/core/migrations/0073_initialize_default_branch.py @@ -0,0 +1,41 @@ +from django.core.paginator import Paginator +from django.db import migrations + + +def initialize_default_branch(apps, schema_editor): + Branch = apps.get_model("core", "Branch") + + Product = apps.get_model("core", "Product") + products = ( + Product.objects.exclude(repository_default_branch__isnull=True) + .order_by("id") + .select_related("repository_default_branch") + ) + + paginator = Paginator(products, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for product in page.object_list: + branch = product.repository_default_branch + branch.is_default_branch = True + updates.append(branch) + + Branch.objects.bulk_update(updates, ["is_default_branch"]) + + +class Migration(migrations.Migration): + dependencies = [ + ( + "core", + "0072_branch_is_default_branch", + ), + ] + + operations = [ + migrations.RunPython( + initialize_default_branch, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/backend/application/core/migrations/0074_observation_fix_available_and_more.py b/backend/application/core/migrations/0074_observation_fix_available_and_more.py new file mode 100644 index 000000000..f0527f2f3 --- /dev/null +++ b/backend/application/core/migrations/0074_observation_fix_available_and_more.py @@ -0,0 +1,37 @@ +# Generated by Django 5.2.10 on 2026-01-11 09:27 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0073_initialize_default_branch"), + ("import_observations", "0016_api_configuration_migrate_names"), + ("rules", "0016_alter_rule_new_vex_justification"), + ("vex", "0010_cyclonedx_cyclonedx_branch_cyclonedx_vulnerability"), + ] + + operations = [ + migrations.AddField( + model_name="observation", + name="fix_available", + field=models.BooleanField(null=True), + ), + migrations.AddField( + model_name="observation", + name="update_impact_score", + field=models.IntegerField( + null=True, + validators=[ + django.core.validators.MinValueValidator(0), + django.core.validators.MaxValueValidator(999999), + ], + ), + ), + migrations.AddIndex( + model_name="observation", + index=models.Index(fields=["update_impact_score"], name="core_observ_update__10b05f_idx"), + ), + ] diff --git a/backend/application/core/migrations/0075_observation_initialize_fix_and_upgrade.py b/backend/application/core/migrations/0075_observation_initialize_fix_and_upgrade.py new file mode 100644 index 000000000..91a15993d --- /dev/null +++ b/backend/application/core/migrations/0075_observation_initialize_fix_and_upgrade.py @@ -0,0 +1,42 @@ +import logging + +from django.core.paginator import Paginator +from django.db import migrations + +from application.core.services.observation import ( + _normalize_update_impact_score_and_fix_available, +) + +logger = logging.getLogger("secobserve.migration") + + +def initialize_fix_and_upgrade(apps, schema_editor): + Observation = apps.get_model("core", "Observation") + observations = Observation.objects.exclude(origin_component_name__exact="").order_by("id") + + paginator = Paginator(observations, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for observation in page.object_list: + _normalize_update_impact_score_and_fix_available(observation) + updates.append(observation) + + Observation.objects.bulk_update(updates, ["update_impact_score", "fix_available"]) + + +class Migration(migrations.Migration): + dependencies = [ + ( + "core", + "0074_observation_fix_available_and_more", + ), + ] + + operations = [ + migrations.RunPython( + initialize_fix_and_upgrade, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/backend/application/core/migrations/0076_alter_product_notification_ms_teams_webhook_and_more.py b/backend/application/core/migrations/0076_alter_product_notification_ms_teams_webhook_and_more.py new file mode 100644 index 000000000..b04bbaf5f --- /dev/null +++ b/backend/application/core/migrations/0076_alter_product_notification_ms_teams_webhook_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.11 on 2026-02-04 18:26 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0075_observation_initialize_fix_and_upgrade"), + ] + + operations = [ + migrations.AlterField( + model_name="product", + name="notification_ms_teams_webhook", + field=models.TextField(blank=True, max_length=2048), + ), + migrations.AlterField( + model_name="product", + name="notification_slack_webhook", + field=models.TextField(blank=True, max_length=2048), + ), + ] diff --git a/backend/application/core/migrations/0077_drop_component_view.py b/backend/application/core/migrations/0077_drop_component_view.py new file mode 100644 index 000000000..adba37a32 --- /dev/null +++ b/backend/application/core/migrations/0077_drop_component_view.py @@ -0,0 +1,20 @@ +from django.db import migrations + +# The component view has to be created after all other migrations. Otherwise some alterations of +# observation lead to errors, due to https://www.sqlite.org/lang_altertable.html#caution. +# It will be created before the first query runs. + +DROP_SQL = "DROP VIEW IF EXISTS core_component;" + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0076_alter_product_notification_ms_teams_webhook_and_more"), + ] + + operations = [ + migrations.RunSQL( + sql=DROP_SQL, + reverse_sql=DROP_SQL, + ), + ] diff --git a/backend/application/core/migrations/0078_observation_assessment_priority_and_more.py b/backend/application/core/migrations/0078_observation_assessment_priority_and_more.py new file mode 100644 index 000000000..0e11f683c --- /dev/null +++ b/backend/application/core/migrations/0078_observation_assessment_priority_and_more.py @@ -0,0 +1,145 @@ +# Generated by Django 5.2.11 on 2026-02-11 18:54 + +import django.core.validators +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0077_drop_component_view"), + ("import_observations", "0016_api_configuration_migrate_names"), + ("rules", "0018_rule_rego_module_rule_type"), + ("vex", "0010_cyclonedx_cyclonedx_branch_cyclonedx_vulnerability"), + ] + + operations = [ + migrations.AddField( + model_name="observation", + name="assessment_priority", + field=models.IntegerField( + null=True, + validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(99)], + ), + ), + migrations.AddField( + model_name="observation", + name="current_priority", + field=models.IntegerField( + null=True, + validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(99)], + ), + ), + migrations.AddField( + model_name="observation", + name="general_rule_rego", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="general_rules_rego", + to="rules.rule", + ), + ), + migrations.AddField( + model_name="observation", + name="product_rule_rego", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="product_rules_rego", + to="rules.rule", + ), + ), + migrations.AddField( + model_name="observation", + name="rule_priority", + field=models.IntegerField( + null=True, + validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(99)], + ), + ), + migrations.AddField( + model_name="observation", + name="rule_rego_priority", + field=models.IntegerField( + null=True, + validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(99)], + ), + ), + migrations.AddField( + model_name="observation", + name="rule_rego_severity", + field=models.CharField( + blank=True, + choices=[ + ("Unknown", "Unknown"), + ("None", "None"), + ("Low", "Low"), + ("Medium", "Medium"), + ("High", "High"), + ("Critical", "Critical"), + ], + max_length=12, + ), + ), + migrations.AddField( + model_name="observation", + name="rule_rego_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AddField( + model_name="observation", + name="rule_rego_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + migrations.AddField( + model_name="observation_log", + name="priority", + field=models.IntegerField( + null=True, + validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(99)], + ), + ), + migrations.AddIndex( + model_name="observation", + index=models.Index(fields=["current_priority"], name="core_observ_current_ba21e4_idx"), + ), + ] diff --git a/backend/application/core/migrations/0079_alter_observation_assessment_status_and_more.py b/backend/application/core/migrations/0079_alter_observation_assessment_status_and_more.py new file mode 100644 index 000000000..7b4fc15f8 --- /dev/null +++ b/backend/application/core/migrations/0079_alter_observation_assessment_status_and_more.py @@ -0,0 +1,145 @@ +# Generated by Django 5.2.11 on 2026-02-15 16:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0078_observation_assessment_priority_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="observation", + name="assessment_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AlterField( + model_name="observation", + name="current_status", + field=models.CharField( + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AlterField( + model_name="observation", + name="parser_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AlterField( + model_name="observation", + name="rule_rego_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AlterField( + model_name="observation", + name="rule_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AlterField( + model_name="observation", + name="vex_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + migrations.AlterField( + model_name="observation_log", + name="status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + ] diff --git a/backend/application/core/models.py b/backend/application/core/models.py index 46865eee7..618826bde 100644 --- a/backend/application/core/models.py +++ b/backend/application/core/models.py @@ -1,9 +1,11 @@ from decimal import Decimal +from typing import Any -from django.apps import apps +from dirtyfields import DirtyFieldsMixin from django.core.validators import MaxValueValidator, MinValueValidator from django.db.models import ( CASCADE, + DO_NOTHING, PROTECT, SET_NULL, BooleanField, @@ -21,24 +23,22 @@ from django.utils import timezone from application.access_control.models import Authorization_Group, User -from application.core.services.observation import ( - get_identity_hash, - normalize_observation_fields, - set_product_flags, +from application.core.types import ( + Assessment_Status, + OSVLinuxDistribution, + Severity, + Status, + VEX_Justification, ) -from application.core.types import Assessment_Status, Severity, Status, VexJustification from application.issue_tracker.types import Issue_Tracker -from application.licenses.types import License_Policy_Evaluation_Result -class Product(Model): +class Product(Model, DirtyFieldsMixin): # pylint: disable=too-many-instance-attributes name = CharField(max_length=255, unique=True) description = TextField(max_length=2048, blank=True) is_product_group = BooleanField(default=False) - product_group = ForeignKey( - "self", on_delete=PROTECT, related_name="products", null=True, blank=True - ) + product_group = ForeignKey("self", on_delete=PROTECT, related_name="products", null=True, blank=True) purl = CharField(max_length=255, blank=True) cpe23 = CharField(max_length=255, blank=True) @@ -53,27 +53,19 @@ class Product(Model): repository_branch_housekeeping_keep_inactive_days = IntegerField( null=True, validators=[MinValueValidator(1), MaxValueValidator(999999)] ) - repository_branch_housekeeping_exempt_branches = CharField( - max_length=255, blank=True - ) + repository_branch_housekeeping_exempt_branches = CharField(max_length=255, blank=True) security_gate_passed = BooleanField(null=True) security_gate_active = BooleanField(null=True) security_gate_threshold_critical = IntegerField( null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] ) - security_gate_threshold_high = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) + security_gate_threshold_high = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) security_gate_threshold_medium = IntegerField( null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] ) - security_gate_threshold_low = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) - security_gate_threshold_none = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) + security_gate_threshold_low = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) + security_gate_threshold_none = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) security_gate_threshold_unknown = IntegerField( null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] ) @@ -90,14 +82,12 @@ class Product(Model): apply_general_rules = BooleanField(default=True) - notification_ms_teams_webhook = CharField(max_length=255, blank=True) - notification_slack_webhook = CharField(max_length=255, blank=True) + notification_ms_teams_webhook = TextField(max_length=2048, blank=True) + notification_slack_webhook = TextField(max_length=2048, blank=True) notification_email_to = CharField(max_length=255, blank=True) issue_tracker_active = BooleanField(default=False) - issue_tracker_type = CharField( - max_length=12, choices=Issue_Tracker.ISSUE_TRACKER_TYPE_CHOICES, blank=True - ) + issue_tracker_type = CharField(max_length=12, choices=Issue_Tracker.ISSUE_TRACKER_TYPE_CHOICES, blank=True) issue_tracker_base_url = CharField(max_length=255, blank=True) issue_tracker_username = CharField(max_length=255, blank=True) issue_tracker_api_key = CharField(max_length=255, blank=True) @@ -105,19 +95,21 @@ class Product(Model): issue_tracker_labels = CharField(max_length=255, blank=True) issue_tracker_issue_type = CharField(max_length=255, blank=True) issue_tracker_status_closed = CharField(max_length=255, blank=True) - issue_tracker_minimum_severity = CharField( - max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True - ) + issue_tracker_minimum_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) + last_observation_change = DateTimeField(default=timezone.now) + assessments_need_approval = BooleanField(default=False) new_observations_in_review = BooleanField(default=False) product_rules_need_approval = BooleanField(default=False) + risk_acceptance_expiry_active = BooleanField(null=True) risk_acceptance_expiry_days = IntegerField( null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)], help_text="Days before risk acceptance expires, 0 means no expiry", ) + license_policy = ForeignKey( "licenses.License_Policy", on_delete=PROTECT, @@ -125,6 +117,16 @@ class Product(Model): null=True, blank=True, ) + + osv_enabled = BooleanField(default=True) + osv_linux_distribution = CharField( + max_length=12, + choices=OSVLinuxDistribution.OSV_LINUX_DISTRIBUTION_CHOICES, + blank=True, + ) + osv_linux_release = CharField(max_length=255, blank=True) + automatic_osv_scanning_enabled = BooleanField(default=False) + has_cloud_resource = BooleanField(default=False) has_component = BooleanField(default=False) has_docker_image = BooleanField(default=False) @@ -138,17 +140,40 @@ class Meta: Index(fields=["name"]), ] - def __str__(self): + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.active_critical_observation_count: int | None = None + self.active_high_observation_count: int | None = None + self.active_medium_observation_count: int | None = None + self.active_low_observation_count: int | None = None + self.active_none_observation_count: int | None = None + self.active_unknown_observation_count: int | None = None + + self.forbidden_licenses_count: int | None = None + self.review_required_licenses_count: int | None = None + self.unknown_licenses_count: int | None = None + self.allowed_licenses_count: int | None = None + self.ignored_licenses_count: int | None = None + + super().__init__(*args, **kwargs) + + def __str__(self) -> str: return self.name -class Branch(Model): +class Branch(Model, DirtyFieldsMixin): product = ForeignKey(Product, on_delete=CASCADE) name = CharField(max_length=255) + is_default_branch = BooleanField(default=False) last_import = DateTimeField(null=True) housekeeping_protect = BooleanField(default=False) purl = CharField(max_length=255, blank=True) cpe23 = CharField(max_length=255, blank=True) + osv_linux_distribution = CharField( + max_length=12, + choices=OSVLinuxDistribution.OSV_LINUX_DISTRIBUTION_CHOICES, + blank=True, + ) + osv_linux_release = CharField(max_length=255, blank=True) class Meta: unique_together = ( @@ -159,97 +184,9 @@ class Meta: Index(fields=["name"]), ] - def __str__(self): + def __str__(self) -> str: return self.name - @property - def open_critical_observation_count(self): - return Observation.objects.filter( - branch=self, - current_severity=Severity.SEVERITY_CRITICAL, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_high_observation_count(self): - return Observation.objects.filter( - branch=self, - current_severity=Severity.SEVERITY_HIGH, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_medium_observation_count(self): - return Observation.objects.filter( - branch=self, - current_severity=Severity.SEVERITY_MEDIUM, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_low_observation_count(self): - return Observation.objects.filter( - branch=self, - current_severity=Severity.SEVERITY_LOW, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_none_observation_count(self): - return Observation.objects.filter( - branch=self, - current_severity=Severity.SEVERITY_NONE, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_unknown_observation_count(self): - return Observation.objects.filter( - branch=self, - current_severity=Severity.SEVERITY_UNKNOWN, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def forbidden_licenses_count(self): - License_Component = apps.get_model("licenses", "License_Component") - return License_Component.objects.filter( - branch=self, - evaluation_result=License_Policy_Evaluation_Result.RESULT_FORBIDDEN, - ).count() - - @property - def review_required_licenses_count(self): - License_Component = apps.get_model("licenses", "License_Component") - return License_Component.objects.filter( - branch=self, - evaluation_result=License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED, - ).count() - - @property - def unknown_licenses_count(self): - License_Component = apps.get_model("licenses", "License_Component") - return License_Component.objects.filter( - branch=self, - evaluation_result=License_Policy_Evaluation_Result.RESULT_UNKNOWN, - ).count() - - @property - def allowed_licenses_count(self): - License_Component = apps.get_model("licenses", "License_Component") - return License_Component.objects.filter( - branch=self, - evaluation_result=License_Policy_Evaluation_Result.RESULT_ALLOWED, - ).count() - - @property - def ignored_licenses_count(self): - License_Component = apps.get_model("licenses", "License_Component") - return License_Component.objects.filter( - branch=self, - evaluation_result=License_Policy_Evaluation_Result.RESULT_IGNORED, - ).count() - class Service(Model): product = ForeignKey(Product, on_delete=CASCADE) @@ -264,63 +201,9 @@ class Meta: Index(fields=["name"]), ] - def __str__(self): + def __str__(self) -> str: return self.name - @property - def open_critical_observation_count(self): - return Observation.objects.filter( - origin_service=self, - branch=self.product.repository_default_branch, - current_severity=Severity.SEVERITY_CRITICAL, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_high_observation_count(self): - return Observation.objects.filter( - origin_service=self, - branch=self.product.repository_default_branch, - current_severity=Severity.SEVERITY_HIGH, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_medium_observation_count(self): - return Observation.objects.filter( - origin_service=self, - branch=self.product.repository_default_branch, - current_severity=Severity.SEVERITY_MEDIUM, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_low_observation_count(self): - return Observation.objects.filter( - origin_service=self, - branch=self.product.repository_default_branch, - current_severity=Severity.SEVERITY_LOW, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_none_observation_count(self): - return Observation.objects.filter( - origin_service=self, - branch=self.product.repository_default_branch, - current_severity=Severity.SEVERITY_NONE, - current_status=Status.STATUS_OPEN, - ).count() - - @property - def open_unknown_observation_count(self): - return Observation.objects.filter( - origin_service=self, - branch=self.product.repository_default_branch, - current_severity=Severity.SEVERITY_UNKNOWN, - current_status=Status.STATUS_OPEN, - ).count() - class Product_Member(Model): product = ForeignKey(Product, on_delete=CASCADE) @@ -333,7 +216,7 @@ class Meta: "user", ) - def __str__(self): + def __str__(self) -> str: return f"{self.product} / {self.user}" @@ -348,7 +231,7 @@ class Meta: "authorization_group", ) - def __str__(self): + def __str__(self) -> str: return f"{self.product} / {self.authorization_group}" @@ -359,28 +242,30 @@ class Observation(Model): title = CharField(max_length=255) description = TextField(max_length=2048, blank=True) recommendation = TextField(max_length=2048, blank=True) + current_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES) - numerical_severity = IntegerField( - validators=[MinValueValidator(1), MaxValueValidator(6)] - ) - parser_severity = CharField( - max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True - ) - rule_severity = CharField( - max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True - ) - assessment_severity = CharField( - max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True - ) + numerical_severity = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(6)]) + parser_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) + rule_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) + rule_rego_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) + assessment_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) + current_status = CharField(max_length=16, choices=Status.STATUS_CHOICES) parser_status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) vex_status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) rule_status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) - assessment_status = CharField( - max_length=16, choices=Status.STATUS_CHOICES, blank=True - ) + rule_rego_status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) + assessment_status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) + + current_priority = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(99)], null=True) + rule_priority = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(99)], null=True) + rule_rego_priority = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(99)], null=True) + assessment_priority = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(99)], null=True) + scanner_observation_id = CharField(max_length=255, blank=True) vulnerability_id = CharField(max_length=255, blank=True) + vulnerability_id_aliases = CharField(max_length=512, blank=True) + origin_component_name = CharField(max_length=255, blank=True) origin_component_version = CharField(max_length=255, blank=True) origin_component_name_version = CharField(max_length=513, blank=True) @@ -388,47 +273,52 @@ class Observation(Model): origin_component_purl_type = CharField(max_length=16, blank=True) origin_component_cpe = CharField(max_length=255, blank=True) origin_component_dependencies = TextField(max_length=32768, blank=True) + origin_component_cyclonedx_bom_link = CharField(max_length=512, blank=True) + origin_docker_image_name = CharField(max_length=255, blank=True) origin_docker_image_tag = CharField(max_length=255, blank=True) origin_docker_image_name_tag = CharField(max_length=513, blank=True) origin_docker_image_name_tag_short = CharField(max_length=513, blank=True) origin_docker_image_digest = CharField(max_length=255, blank=True) + origin_endpoint_url = TextField(max_length=2048, blank=True) origin_endpoint_scheme = CharField(max_length=255, blank=True) origin_endpoint_hostname = CharField(max_length=255, blank=True) - origin_endpoint_port = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(65535)] - ) + origin_endpoint_port = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(65535)]) origin_endpoint_path = TextField(max_length=2048, blank=True) origin_endpoint_params = TextField(max_length=2048, blank=True) origin_endpoint_query = TextField(max_length=2048, blank=True) origin_endpoint_fragment = TextField(max_length=2048, blank=True) + origin_service_name = CharField(max_length=255, blank=True) origin_service = ForeignKey(Service, on_delete=PROTECT, null=True) + origin_source_file = CharField(max_length=255, blank=True) - origin_source_line_start = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) - origin_source_line_end = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) + origin_source_line_start = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) + origin_source_line_end = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) + + origin_source_file_link = CharField(max_length=2048, blank=True) + origin_cloud_provider = CharField(max_length=255, blank=True) origin_cloud_account_subscription_project = CharField(max_length=255, blank=True) origin_cloud_resource = CharField(max_length=255, blank=True) origin_cloud_resource_type = CharField(max_length=255, blank=True) origin_cloud_qualified_resource = CharField(max_length=255, blank=True) + origin_kubernetes_cluster = CharField(max_length=255, blank=True) origin_kubernetes_namespace = CharField(max_length=255, blank=True) origin_kubernetes_resource_type = CharField(max_length=255, blank=True) origin_kubernetes_resource_name = CharField(max_length=255, blank=True) origin_kubernetes_qualified_resource = CharField(max_length=255, blank=True) + cvss3_score = DecimalField(max_digits=3, decimal_places=1, null=True) cvss3_vector = CharField(max_length=255, blank=True) cvss4_score = DecimalField(max_digits=3, decimal_places=1, null=True) cvss4_vector = CharField(max_length=255, blank=True) - cwe = IntegerField( - null=True, validators=[MinValueValidator(1), MaxValueValidator(999999)] - ) + cve_found_in = CharField(max_length=255, blank=True) + + cwe = IntegerField(null=True, validators=[MinValueValidator(1), MaxValueValidator(999999)]) + epss_score = DecimalField( max_digits=6, decimal_places=3, @@ -441,6 +331,7 @@ class Observation(Model): null=True, validators=[MinValueValidator(Decimal(0)), MaxValueValidator(Decimal(100))], ) + found = DateField(null=True) scanner = CharField(max_length=255, blank=True) upload_filename = CharField(max_length=255, blank=True) @@ -450,6 +341,7 @@ class Observation(Model): modified = DateTimeField(auto_now=True) last_observation_log = DateTimeField(default=timezone.now) identity_hash = CharField(max_length=64) + general_rule = ForeignKey( "rules.Rule", related_name="general_rules", @@ -464,24 +356,39 @@ class Observation(Model): null=True, on_delete=PROTECT, ) + + general_rule_rego = ForeignKey( + "rules.Rule", + related_name="general_rules_rego", + blank=True, + null=True, + on_delete=PROTECT, + ) + product_rule_rego = ForeignKey( + "rules.Rule", + related_name="product_rules_rego", + blank=True, + null=True, + on_delete=PROTECT, + ) + issue_tracker_issue_id = CharField(max_length=255, blank=True) issue_tracker_issue_closed = BooleanField(default=False) issue_tracker_jira_initial_status = CharField(max_length=255, blank=True) + has_potential_duplicates = BooleanField(default=False) + current_vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True - ) - parser_vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True - ) - vex_vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True + max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True ) - rule_vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True + parser_vex_justification = CharField(max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True) + vex_vex_justification = CharField(max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True) + rule_vex_justification = CharField(max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True) + rule_rego_vex_justification = CharField( + max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True ) assessment_vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True + max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True ) vex_statement = ForeignKey( "vex.VEX_Statement", @@ -490,8 +397,12 @@ class Observation(Model): null=True, on_delete=SET_NULL, ) + risk_acceptance_expiry_date = DateField(null=True) + update_impact_score = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) + fix_available = BooleanField(null=True) + class Meta: indexes = [ Index(fields=["product", "branch"]), @@ -499,8 +410,10 @@ class Meta: Index(fields=["current_severity"]), Index(fields=["numerical_severity"]), Index(fields=["current_status"]), + Index(fields=["current_priority"]), Index(fields=["vulnerability_id"]), Index(fields=["origin_component_name_version"]), + Index(fields=["origin_component_cyclonedx_bom_link"]), Index(fields=["origin_docker_image_name_tag_short"]), Index(fields=["origin_service_name"]), Index(fields=["origin_endpoint_hostname"]), @@ -510,39 +423,28 @@ class Meta: Index(fields=["last_observation_log"]), Index(fields=["epss_score"]), Index(fields=["scanner"]), + Index(fields=["update_impact_score"]), ] - def __str__(self): + def __str__(self) -> str: return f"{self.product} / {self.title}" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.unsaved_references = [] - self.unsaved_evidences = [] - - def save(self, *args, **kwargs) -> None: - normalize_observation_fields(self) - self.identity_hash = get_identity_hash(self) - set_product_flags(self) - - return super().save(*args, **kwargs) + self.unsaved_references: list[str] = [] + self.unsaved_evidences: list[list[str]] = [] class Observation_Log(Model): - observation = ForeignKey( - Observation, related_name="observation_logs", on_delete=CASCADE - ) - user = ForeignKey( - "access_control.User", related_name="observation_logs", on_delete=PROTECT - ) + observation = ForeignKey(Observation, related_name="observation_logs", on_delete=CASCADE) + user = ForeignKey("access_control.User", related_name="observation_logs", on_delete=PROTECT) severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) + priority = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(99)], null=True) comment = TextField(max_length=4096) created = DateTimeField(auto_now_add=True) - vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True - ) + vex_justification = CharField(max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True) assessment_status = CharField( max_length=16, choices=Assessment_Status.ASSESSMENT_STATUS_CHOICES, @@ -612,9 +514,7 @@ class Potential_Duplicate(Model): (POTENTIAL_DUPLICATE_TYPE_SOURCE, POTENTIAL_DUPLICATE_TYPE_SOURCE), ] - observation = ForeignKey( - Observation, related_name="potential_duplicates", on_delete=CASCADE - ) + observation = ForeignKey(Observation, related_name="potential_duplicates", on_delete=CASCADE) potential_duplicate_observation = ForeignKey(Observation, on_delete=CASCADE) type = CharField(max_length=12, choices=POTENTIAL_DUPLICATE_TYPES) @@ -623,3 +523,23 @@ class Meta: "observation", "potential_duplicate_observation", ) + + +class Component(Model): + id = CharField(max_length=32, primary_key=True) + product = ForeignKey(Product, related_name="components", on_delete=DO_NOTHING) + branch = ForeignKey(Branch, related_name="components", on_delete=DO_NOTHING, null=True) + origin_service = ForeignKey(Service, on_delete=DO_NOTHING, null=True) + component_name = CharField(max_length=255) + component_version = CharField(max_length=255, blank=True) + component_name_version = CharField(max_length=513, blank=True) + component_purl = CharField(max_length=255, blank=True) + component_purl_type = CharField(max_length=16, blank=True) + component_cpe = CharField(max_length=255, blank=True) + component_dependencies = TextField(max_length=32768, blank=True) + component_cyclonedx_bom_link = CharField(max_length=512, blank=True) + has_observations = BooleanField() + + class Meta: + db_table = "core_component" + managed = False diff --git a/backend/application/core/queries/branch.py b/backend/application/core/queries/branch.py index e0533712c..e8e159947 100644 --- a/backend/application/core/queries/branch.py +++ b/backend/application/core/queries/branch.py @@ -1,15 +1,20 @@ from typing import Optional -from django.db.models import Exists, OuterRef, Q +from django.db.models import Count, Exists, IntegerField, OuterRef, Q, Subquery +from django.db.models.functions import Coalesce from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import ( Branch, + Observation, Product, Product_Authorization_Group_Member, Product_Member, ) +from application.core.types import Severity, Status +from application.licenses.models import License_Component +from application.licenses.types import License_Policy_Evaluation_Result def get_branch_by_id(product: Product, branch_id: int) -> Optional[Branch]: @@ -26,43 +31,34 @@ def get_branch_by_name(product: Product, name: str) -> Optional[Branch]: return None -def get_branches() -> QuerySet[Branch]: +def get_branches(with_annotations: bool = False) -> QuerySet[Branch]: user = get_current_user() if user is None: return Branch.objects.none() branches = Branch.objects.all() + branches = _add_annotations(branches, with_annotations) if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) branches = branches.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) branches = branches.filter( @@ -75,5 +71,64 @@ def get_branches() -> QuerySet[Branch]: return branches -def get_branches_by_product(product: Product) -> QuerySet[Branch]: - return Branch.objects.filter(product=product) +def _add_annotations(queryset: QuerySet, with_annotations: bool) -> QuerySet: + if not with_annotations: + return queryset + + subquery_active_critical = _get_observation_subquery(Severity.SEVERITY_CRITICAL) + subquery_active_high = _get_observation_subquery(Severity.SEVERITY_HIGH) + subquery_active_medium = _get_observation_subquery(Severity.SEVERITY_MEDIUM) + subquery_active_low = _get_observation_subquery(Severity.SEVERITY_LOW) + subquery_active_none = _get_observation_subquery(Severity.SEVERITY_NONE) + subquery_active_unknown = _get_observation_subquery(Severity.SEVERITY_UNKNOWN) + + subquery_license_forbidden = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_FORBIDDEN) + subquery_license_review_required = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED) + subquery_license_unknown = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_UNKNOWN) + subquery_license_allowed = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_ALLOWED) + subquery_license_ignored = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_IGNORED) + + queryset = queryset.annotate( + active_critical_observation_count=Coalesce(subquery_active_critical, 0), + active_high_observation_count=Coalesce(subquery_active_high, 0), + active_medium_observation_count=Coalesce(subquery_active_medium, 0), + active_low_observation_count=Coalesce(subquery_active_low, 0), + active_none_observation_count=Coalesce(subquery_active_none, 0), + active_unknown_observation_count=Coalesce(subquery_active_unknown, 0), + forbidden_licenses_count=Coalesce(subquery_license_forbidden, 0), + review_required_licenses_count=Coalesce(subquery_license_review_required, 0), + unknown_licenses_count=Coalesce(subquery_license_unknown, 0), + allowed_licenses_count=Coalesce(subquery_license_allowed, 0), + ignored_licenses_count=Coalesce(subquery_license_ignored, 0), + ) + + return queryset + + +def _get_observation_subquery(severity: str) -> Subquery: + return Subquery( + Observation.objects.filter( + branch=OuterRef("pk"), + current_status__in=Status.STATUS_ACTIVE, + current_severity=severity, + ) + .order_by() + .values("branch") + .annotate(count=Count("pk")) + .values("count"), + output_field=IntegerField(), + ) + + +def _get_license_subquery(evaluation_result: str) -> Subquery: + return Subquery( + License_Component.objects.filter( + branch=OuterRef("pk"), + evaluation_result=evaluation_result, + ) + .order_by() + .values("branch") + .annotate(count=Count("pk")) + .values("count"), + output_field=IntegerField(), + ) diff --git a/backend/application/core/queries/component.py b/backend/application/core/queries/component.py new file mode 100644 index 000000000..b38e769ad --- /dev/null +++ b/backend/application/core/queries/component.py @@ -0,0 +1,174 @@ +from typing import Optional + +from django.conf import settings +from django.db import connection +from django.db.models import Exists, OuterRef, Q +from django.db.models.query import QuerySet + +from application.access_control.services.current_user import get_current_user +from application.core.models import ( + Component, + Product_Authorization_Group_Member, + Product_Member, +) + + +def get_component_by_id(component_id: str) -> Optional[Component]: + _create_component_view() + + try: + return Component.objects.get(id=component_id) + except Component.DoesNotExist: + return None + + +def get_components() -> QuerySet[Component]: + _create_component_view() + + user = get_current_user() + + if user is None: + return Component.objects.none() + + components = Component.objects.all() + + if not user.is_superuser: + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) + + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, + ) + + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, + ) + + components = components.annotate( + product__member=Exists(product_members), + product__product_group__member=Exists(product_group_members), + authorization_group_member=Exists(product_authorization_group_members), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), + ) + + components = components.filter( + Q(product__member=True) + | Q(product__product_group__member=True) + | Q(authorization_group_member=True) + | Q(product_group_authorization_group_member=True) + ) + + return components + + +# The component view has to be created after all other migrations. Otherwise some alterations of +# observation lead to errors, due to https://www.sqlite.org/lang_altertable.html#caution. +# It will be created here before the first query runs. + +DROP_COMPONENT_VIEW = "DROP VIEW IF EXISTS core_component;" + +CREATE_COMPONENT_VIEW = """ +CREATE VIEW core_component AS +WITH CombinedData AS ( + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + origin_component_name AS component_name, + origin_component_version AS component_version, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_purl_type AS component_purl_type, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM core_observation + WHERE origin_component_name_version != '' + + UNION + + SELECT + product_id as product_id, + branch_id as branch_id, + origin_service_id as origin_service_id, + component_name AS component_name, + component_version AS component_version, + component_name_version AS component_name_version, + component_purl AS component_purl, + component_purl_type AS component_purl_type, + component_cpe AS component_cpe, + component_dependencies AS component_dependencies, + component_cyclonedx_bom_link AS component_cyclonedx_bom_link + FROM licenses_license_component +), +ObservationFlag AS ( + SELECT DISTINCT + product_id, + branch_id, + origin_service_id, + origin_component_name_version AS component_name_version, + origin_component_purl AS component_purl, + origin_component_cpe AS component_cpe, + origin_component_dependencies AS component_dependencies, + origin_component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + TRUE AS has_observation + FROM core_observation + WHERE current_status IN ('Open', 'Affected', 'In review') +) +SELECT + MD5( + CONCAT( + CAST(COALESCE(cd.product_id, 111) as CHAR(255)), + CAST(COALESCE(cd.branch_id, 222) as CHAR(255)), + CAST(COALESCE(cd.origin_service_id, 333) as CHAR(255)), + COALESCE(cd.component_name_version, 'no_name_version'), + COALESCE(cd.component_purl, 'no_purl'), + COALESCE(cd.component_cpe, 'no_cpe'), + COALESCE(cd.component_dependencies, 'no_dependencies'), + COALESCE(cd.component_cyclonedx_bom_link, 'component_cyclonedx_bom_link') + ) + ) AS id, + cd.product_id as product_id, + cd.branch_id as branch_id, + cd.origin_service_id as origin_service_id, + cd.component_name AS component_name, + cd.component_version AS component_version, + cd.component_name_version AS component_name_version, + cd.component_purl AS component_purl, + cd.component_purl_type AS component_purl_type, + cd.component_cpe AS component_cpe, + cd.component_dependencies AS component_dependencies, + cd.component_cyclonedx_bom_link AS component_cyclonedx_bom_link, + COALESCE(ObservationFlag.has_observation, FALSE) AS has_observations +FROM CombinedData cd +LEFT JOIN ObservationFlag ON + cd.product_id = ObservationFlag.product_id + AND ( + (cd.branch_id = ObservationFlag.branch_id) IS TRUE OR + (cd.branch_id IS NULL AND ObservationFlag.branch_id IS NULL) + ) + AND ( + (cd.origin_service_id = ObservationFlag.origin_service_id) IS TRUE OR + (cd.origin_service_id IS NULL AND ObservationFlag.origin_service_id IS NULL) + ) + AND cd.component_name_version = ObservationFlag.component_name_version + AND cd.component_purl = ObservationFlag.component_purl + AND cd.component_cpe = ObservationFlag.component_cpe + AND cd.component_dependencies = ObservationFlag.component_dependencies + AND cd.component_cyclonedx_bom_link = ObservationFlag.component_cyclonedx_bom_link +; +""" + + +class ComponentView: + created = False + + +def _create_component_view() -> None: + if not ComponentView.created or settings.DATABASES["default"]["ENGINE"] == "django.db.backends.sqlite3": + ComponentView.created = True + with connection.cursor() as cursor: + cursor.execute(DROP_COMPONENT_VIEW) + cursor.execute(CREATE_COMPONENT_VIEW) diff --git a/backend/application/core/queries/observation.py b/backend/application/core/queries/observation.py index d3dee652b..34a9936a3 100644 --- a/backend/application/core/queries/observation.py +++ b/backend/application/core/queries/observation.py @@ -3,7 +3,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import ( Branch, Evidence, @@ -13,6 +13,7 @@ Product, Product_Authorization_Group_Member, Product_Member, + Service, ) @@ -32,34 +33,24 @@ def get_observations() -> QuerySet[Observation]: observations = Observation.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) observations = observations.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) observations = observations.filter( @@ -75,14 +66,36 @@ def get_observations() -> QuerySet[Observation]: def get_observations_for_vulnerability_check( product: Product, branch: Optional[Branch], + service: Optional[Service], filename: str, api_configuration_name: str, ) -> QuerySet[Observation]: + if filename or api_configuration_name: + return Observation.objects.filter( + product=product, + branch=branch, + origin_service=service, + upload_filename=filename, + api_configuration_name=api_configuration_name, + ) + + if service: + return Observation.objects.filter( + product=product, + branch=branch, + origin_service=service, + upload_filename="", + api_configuration_name="", + origin_service__name=service, + ) + return Observation.objects.filter( product=product, branch=branch, - upload_filename=filename, - api_configuration_name=api_configuration_name, + origin_service=service, + upload_filename="", + api_configuration_name="", + origin_service__isnull=True, ) @@ -95,33 +108,25 @@ def get_evidences() -> QuerySet[Evidence]: evidences = Evidence.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("observation__product_id"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("observation__product_id"), user=user) product_group_members = Product_Member.objects.filter( product=OuterRef("observation__product__product_group"), user=user ) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("observation__product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("observation__product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("observation__product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("observation__product__product_group"), + authorization_group__users=user, ) evidences = evidences.annotate( observation__product__member=Exists(product_members), observation__product__product_group__member=Exists(product_group_members), - observation__product__authorization_group_member=Exists( - product_authorization_group_members - ), + observation__product__authorization_group_member=Exists(product_authorization_group_members), observation__product__product_group_authorization_group_member=Exists( product_group_authorization_group_members ), @@ -146,33 +151,25 @@ def get_potential_duplicates() -> QuerySet[Potential_Duplicate]: potential_duplicates = Potential_Duplicate.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("observation__product_id"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("observation__product_id"), user=user) product_group_members = Product_Member.objects.filter( product=OuterRef("observation__product__product_group"), user=user ) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("observation__product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("observation__product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("observation__product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("observation__product__product_group"), + authorization_group__users=user, ) potential_duplicates = potential_duplicates.annotate( observation__product__member=Exists(product_members), observation__product__product_group__member=Exists(product_group_members), - observation__product__authorization_group_member=Exists( - product_authorization_group_members - ), + observation__product__authorization_group_member=Exists(product_authorization_group_members), observation__product__product_group_authorization_group_member=Exists( product_group_authorization_group_members ), @@ -204,33 +201,25 @@ def get_observation_logs() -> QuerySet[Observation_Log]: observation_logs = Observation_Log.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("observation__product_id"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("observation__product_id"), user=user) product_group_members = Product_Member.objects.filter( product=OuterRef("observation__product__product_group"), user=user ) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("observation__product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("observation__product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("observation__product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("observation__product__product_group"), + authorization_group__users=user, ) observation_logs = observation_logs.annotate( observation__product__member=Exists(product_members), observation__product__product_group__member=Exists(product_group_members), - observation__product__authorization_group_member=Exists( - product_authorization_group_members - ), + observation__product__authorization_group_member=Exists(product_authorization_group_members), observation__product__product_group_authorization_group_member=Exists( product_group_authorization_group_members ), @@ -246,18 +235,19 @@ def get_observation_logs() -> QuerySet[Observation_Log]: return observation_logs -def get_current_observation_log(observation: Observation): +def get_current_observation_log(observation: Observation) -> Optional[Observation_Log]: try: return Observation_Log.objects.filter(observation=observation).latest("created") except Observation_Log.DoesNotExist: return None -def get_current_modifying_observation_log(observation: Observation): +def get_current_modifying_observation_log( + observation: Observation, +) -> Optional[Observation_Log]: try: return Observation_Log.objects.filter( - Q(observation_id=observation.id) - & (~Q(status="") | ~Q(severity="") | ~Q(vex_justification="")) + Q(observation_id=observation.id) & (~Q(status="") | ~Q(severity="") | ~Q(vex_justification="")) ).latest("created") except Observation_Log.DoesNotExist: return None diff --git a/backend/application/core/queries/product.py b/backend/application/core/queries/product.py index 19360ccdb..3e5db7807 100644 --- a/backend/application/core/queries/product.py +++ b/backend/application/core/queries/product.py @@ -1,10 +1,11 @@ from typing import Optional -from django.db.models import Count, Exists, F, IntegerField, OuterRef, Q, Subquery +from django.db.models import Count, Exists, IntegerField, OuterRef, Q, Subquery from django.db.models.functions import Coalesce from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.commons.models import Settings from application.core.models import ( Observation, Product, @@ -17,12 +18,14 @@ def get_product_by_id( - product_id: int, is_product_group: bool = None + product_id: int, is_product_group: bool = None, with_annotations: bool = False ) -> Optional[Product]: try: if is_product_group is None: - return Product.objects.get(id=product_id) - return Product.objects.get(id=product_id, is_product_group=is_product_group) + return _add_annotations(Product.objects.all(), False, False).get(id=product_id) + return _add_annotations(Product.objects.all(), is_product_group, with_annotations).get( + id=product_id, is_product_group=is_product_group + ) except Product.DoesNotExist: return None @@ -36,9 +39,7 @@ def get_product_by_name(name: str, is_product_group: bool = None) -> Optional[Pr return None -def get_products( - is_product_group: Optional[bool] = None, with_annotations: Optional[bool] = False -) -> QuerySet[Product]: +def get_products(is_product_group: bool = None, with_annotations: bool = False) -> QuerySet[Product]: user = get_current_user() if user is None: @@ -46,75 +47,28 @@ def get_products( products = Product.objects.all() - if not is_product_group and with_annotations: - subquery_open_critical = _get_observation_subquery(Severity.SEVERITY_CRITICAL) - subquery_open_high = _get_observation_subquery(Severity.SEVERITY_HIGH) - subquery_open_medium = _get_observation_subquery(Severity.SEVERITY_MEDIUM) - subquery_open_low = _get_observation_subquery(Severity.SEVERITY_LOW) - subquery_open_none = _get_observation_subquery(Severity.SEVERITY_NONE) - subquery_open_unknown = _get_observation_subquery(Severity.SEVERITY_UNKNOWN) - - subquery_license_forbidden = _get_license_subquery( - License_Policy_Evaluation_Result.RESULT_FORBIDDEN - ) - subquery_license_review_required = _get_license_subquery( - License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED - ) - subquery_license_unknown = _get_license_subquery( - License_Policy_Evaluation_Result.RESULT_UNKNOWN - ) - subquery_license_allowed = _get_license_subquery( - License_Policy_Evaluation_Result.RESULT_ALLOWED - ) - subquery_license_ignored = _get_license_subquery( - License_Policy_Evaluation_Result.RESULT_IGNORED - ) - - products = products.annotate( - open_critical_observation_count=Coalesce(subquery_open_critical, 0), - open_high_observation_count=Coalesce(subquery_open_high, 0), - open_medium_observation_count=Coalesce(subquery_open_medium, 0), - open_low_observation_count=Coalesce(subquery_open_low, 0), - open_none_observation_count=Coalesce(subquery_open_none, 0), - open_unknown_observation_count=Coalesce(subquery_open_unknown, 0), - forbidden_licenses_count=Coalesce(subquery_license_forbidden, 0), - review_required_licenses_count=Coalesce( - subquery_license_review_required, 0 - ), - unknown_licenses_count=Coalesce(subquery_license_unknown, 0), - allowed_licenses_count=Coalesce(subquery_license_allowed, 0), - ignored_licenses_count=Coalesce(subquery_license_ignored, 0), - ) + if is_product_group is not None: + products = _add_annotations(products, is_product_group, with_annotations) if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("pk"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("pk"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("pk"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("pk"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_group"), + authorization_group__users=user, ) products = products.annotate( member=Exists(product_members), product_group_member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) products = products.filter( Q(member=True) @@ -129,8 +83,101 @@ def get_products( return products -def _get_observation_subquery(severity: str) -> Subquery: - branch_filter = Q(branch=F("product__repository_default_branch")) | ( +def _add_annotations(queryset: QuerySet, is_product_group: bool, with_annotations: bool) -> QuerySet: + if not with_annotations: + return queryset + + queryset = _add_observation_annotations(queryset, is_product_group) + queryset = _add_license_annotations(queryset, is_product_group) + return queryset + + +def _add_observation_annotations(queryset: QuerySet, is_product_group: bool) -> QuerySet: + subquery_active_critical = ( + _get_product_group_observation_subquery(Severity.SEVERITY_CRITICAL) + if is_product_group + else _get_product_observation_subquery(Severity.SEVERITY_CRITICAL) + ) + subquery_active_high = ( + _get_product_group_observation_subquery(Severity.SEVERITY_HIGH) + if is_product_group + else _get_product_observation_subquery(Severity.SEVERITY_HIGH) + ) + subquery_active_medium = ( + _get_product_group_observation_subquery(Severity.SEVERITY_MEDIUM) + if is_product_group + else _get_product_observation_subquery(Severity.SEVERITY_MEDIUM) + ) + subquery_active_low = ( + _get_product_group_observation_subquery(Severity.SEVERITY_LOW) + if is_product_group + else _get_product_observation_subquery(Severity.SEVERITY_LOW) + ) + subquery_active_none = ( + _get_product_group_observation_subquery(Severity.SEVERITY_NONE) + if is_product_group + else _get_product_observation_subquery(Severity.SEVERITY_NONE) + ) + subquery_active_unknown = ( + _get_product_group_observation_subquery(Severity.SEVERITY_UNKNOWN) + if is_product_group + else _get_product_observation_subquery(Severity.SEVERITY_UNKNOWN) + ) + + queryset = queryset.annotate( + active_critical_observation_count=Coalesce(subquery_active_critical, 0), + active_high_observation_count=Coalesce(subquery_active_high, 0), + active_medium_observation_count=Coalesce(subquery_active_medium, 0), + active_low_observation_count=Coalesce(subquery_active_low, 0), + active_none_observation_count=Coalesce(subquery_active_none, 0), + active_unknown_observation_count=Coalesce(subquery_active_unknown, 0), + ) + + return queryset + + +def _add_license_annotations(queryset: QuerySet, is_product_group: bool) -> QuerySet: + settings = Settings.load() + if settings.feature_license_management: + subquery_license_forbidden = ( + _get_product_group_license_subquery(License_Policy_Evaluation_Result.RESULT_FORBIDDEN) + if is_product_group + else _get_product_license_subquery(License_Policy_Evaluation_Result.RESULT_FORBIDDEN) + ) + subquery_license_review_required = ( + _get_product_group_license_subquery(License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED) + if is_product_group + else _get_product_license_subquery(License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED) + ) + subquery_license_unknown = ( + _get_product_group_license_subquery(License_Policy_Evaluation_Result.RESULT_UNKNOWN) + if is_product_group + else _get_product_license_subquery(License_Policy_Evaluation_Result.RESULT_UNKNOWN) + ) + subquery_license_allowed = ( + _get_product_group_license_subquery(License_Policy_Evaluation_Result.RESULT_ALLOWED) + if is_product_group + else _get_product_license_subquery(License_Policy_Evaluation_Result.RESULT_ALLOWED) + ) + subquery_license_ignored = ( + _get_product_group_license_subquery(License_Policy_Evaluation_Result.RESULT_IGNORED) + if is_product_group + else _get_product_license_subquery(License_Policy_Evaluation_Result.RESULT_IGNORED) + ) + + queryset = queryset.annotate( + forbidden_licenses_count=Coalesce(subquery_license_forbidden, 0), + review_required_licenses_count=Coalesce(subquery_license_review_required, 0), + unknown_licenses_count=Coalesce(subquery_license_unknown, 0), + allowed_licenses_count=Coalesce(subquery_license_allowed, 0), + ignored_licenses_count=Coalesce(subquery_license_ignored, 0), + ) + + return queryset + + +def _get_product_observation_subquery(severity: str) -> Subquery: + branch_filter = Q(branch__is_default_branch=True) | ( Q(branch__isnull=True) & Q(product__repository_default_branch__isnull=True) ) @@ -138,7 +185,7 @@ def _get_observation_subquery(severity: str) -> Subquery: Observation.objects.filter( branch_filter, product=OuterRef("pk"), - current_status=Status.STATUS_OPEN, + current_status__in=Status.STATUS_ACTIVE, current_severity=severity, ) .order_by() @@ -149,8 +196,28 @@ def _get_observation_subquery(severity: str) -> Subquery: ) -def _get_license_subquery(evaluation_result: str) -> Subquery: - branch_filter = Q(branch=F("product__repository_default_branch")) | ( +def _get_product_group_observation_subquery(severity: str) -> Subquery: + branch_filter = Q(branch__is_default_branch=True) | ( + Q(branch__isnull=True) & Q(product__repository_default_branch__isnull=True) + ) + + return Subquery( + Observation.objects.filter( + branch_filter, + product__product_group=OuterRef("pk"), + current_status__in=Status.STATUS_ACTIVE, + current_severity=severity, + ) + .order_by() + .values("product__product_group") + .annotate(count=Count("pk")) + .values("count"), + output_field=IntegerField(), + ) + + +def _get_product_license_subquery(evaluation_result: str) -> Subquery: + branch_filter = Q(branch__is_default_branch=True) | ( Q(branch__isnull=True) & Q(product__repository_default_branch__isnull=True) ) @@ -166,3 +233,22 @@ def _get_license_subquery(evaluation_result: str) -> Subquery: .values("count"), output_field=IntegerField(), ) + + +def _get_product_group_license_subquery(evaluation_result: str) -> Subquery: + branch_filter = Q(branch__is_default_branch=True) | ( + Q(branch__isnull=True) & Q(product__repository_default_branch__isnull=True) + ) + + return Subquery( + License_Component.objects.filter( + branch_filter, + product__product_group=OuterRef("pk"), + evaluation_result=evaluation_result, + ) + .order_by() + .values("product__product_group") + .annotate(count=Count("pk")) + .values("count"), + output_field=IntegerField(), + ) diff --git a/backend/application/core/queries/product_member.py b/backend/application/core/queries/product_member.py index 5430c1971..3f384da5c 100644 --- a/backend/application/core/queries/product_member.py +++ b/backend/application/core/queries/product_member.py @@ -4,7 +4,7 @@ from django.db.models.query import QuerySet from application.access_control.models import Authorization_Group, User -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import ( Product, Product_Authorization_Group_Member, @@ -29,9 +29,7 @@ def get_product_members() -> QuerySet[Product_Member]: if user is None: return Product_Member.objects.none() - product_members = Product_Member.objects.exclude( - user__username__startswith="-product-" - ) + product_members = Product_Member.objects.exclude(user__username__startswith="-product-") if user.is_superuser: return product_members @@ -44,24 +42,18 @@ def get_product_authorization_group_member( product: Product, authorization_group: Authorization_Group ) -> Optional[Product_Authorization_Group_Member]: try: - return Product_Authorization_Group_Member.objects.get( - product=product, authorization_group=authorization_group - ) + return Product_Authorization_Group_Member.objects.get(product=product, authorization_group=authorization_group) except Product_Authorization_Group_Member.DoesNotExist: return None -def get_product_authorization_group_members() -> ( - QuerySet[Product_Authorization_Group_Member] -): +def get_product_authorization_group_members() -> QuerySet[Product_Authorization_Group_Member]: user = get_current_user() if user is None: return Product_Authorization_Group_Member.objects.none() - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.all() - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.all() if user.is_superuser: return product_authorization_group_members @@ -70,9 +62,7 @@ def get_product_authorization_group_members() -> ( return product_authorization_group_members.filter(product__in=products) -def get_highest_role_of_product_authorization_group_members_for_user( - product: Product, user: User = None -) -> int: +def get_highest_role_of_product_authorization_group_members_for_user(product: Product, user: User = None) -> int: if not user: user = get_current_user() diff --git a/backend/application/core/queries/service.py b/backend/application/core/queries/service.py index 63fc89b6f..4c0d8607a 100644 --- a/backend/application/core/queries/service.py +++ b/backend/application/core/queries/service.py @@ -1,15 +1,20 @@ from typing import Optional -from django.db.models import Exists, OuterRef, Q +from django.db.models import Count, Exists, IntegerField, OuterRef, Q, Subquery +from django.db.models.functions import Coalesce from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import ( + Observation, Product, Product_Authorization_Group_Member, Product_Member, Service, ) +from application.core.types import Severity, Status +from application.licenses.models import License_Component +from application.licenses.types import License_Policy_Evaluation_Result def get_service_by_id(product: Product, service_id: int) -> Optional[Service]: @@ -26,43 +31,34 @@ def get_service_by_name(product: Product, name: str) -> Optional[Service]: return None -def get_services() -> QuerySet[Service]: +def get_services(with_annotations: bool = False) -> QuerySet[Service]: user = get_current_user() if user is None: return Service.objects.none() services = Service.objects.all() + services = _add_annotations(services, with_annotations) if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) services = services.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) services = services.filter( @@ -75,5 +71,74 @@ def get_services() -> QuerySet[Service]: return services -def get_services_by_product(product: Product) -> QuerySet[Service]: - return Service.objects.filter(product=product) +def _add_annotations(queryset: QuerySet, with_annotations: bool) -> QuerySet: + if not with_annotations: + return queryset + + subquery_active_critical = _get_observation_subquery(Severity.SEVERITY_CRITICAL) + subquery_active_high = _get_observation_subquery(Severity.SEVERITY_HIGH) + subquery_active_medium = _get_observation_subquery(Severity.SEVERITY_MEDIUM) + subquery_active_low = _get_observation_subquery(Severity.SEVERITY_LOW) + subquery_active_none = _get_observation_subquery(Severity.SEVERITY_NONE) + subquery_active_unknown = _get_observation_subquery(Severity.SEVERITY_UNKNOWN) + + subquery_license_forbidden = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_FORBIDDEN) + subquery_license_review_required = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED) + subquery_license_unknown = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_UNKNOWN) + subquery_license_allowed = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_ALLOWED) + subquery_license_ignored = _get_license_subquery(License_Policy_Evaluation_Result.RESULT_IGNORED) + + queryset = queryset.annotate( + active_critical_observation_count=Coalesce(subquery_active_critical, 0), + active_high_observation_count=Coalesce(subquery_active_high, 0), + active_medium_observation_count=Coalesce(subquery_active_medium, 0), + active_low_observation_count=Coalesce(subquery_active_low, 0), + active_none_observation_count=Coalesce(subquery_active_none, 0), + active_unknown_observation_count=Coalesce(subquery_active_unknown, 0), + forbidden_licenses_count=Coalesce(subquery_license_forbidden, 0), + review_required_licenses_count=Coalesce(subquery_license_review_required, 0), + unknown_licenses_count=Coalesce(subquery_license_unknown, 0), + allowed_licenses_count=Coalesce(subquery_license_allowed, 0), + ignored_licenses_count=Coalesce(subquery_license_ignored, 0), + ) + + return queryset + + +def _get_observation_subquery(severity: str) -> Subquery: + branch_filter = Q(branch__is_default_branch=True) | ( + Q(branch__isnull=True) & Q(product__repository_default_branch__isnull=True) + ) + + return Subquery( + Observation.objects.filter( + branch_filter, + origin_service=OuterRef("pk"), + current_status__in=Status.STATUS_ACTIVE, + current_severity=severity, + ) + .order_by() + .values("origin_service") + .annotate(count=Count("pk")) + .values("count"), + output_field=IntegerField(), + ) + + +def _get_license_subquery(evaluation_result: str) -> Subquery: + branch_filter = Q(branch__is_default_branch=True) | ( + Q(branch__isnull=True) & Q(product__repository_default_branch__isnull=True) + ) + + return Subquery( + License_Component.objects.filter( + branch_filter, + origin_service=OuterRef("pk"), + evaluation_result=evaluation_result, + ) + .order_by() + .values("origin_service") + .annotate(count=Count("pk")) + .values("count"), + output_field=IntegerField(), + ) diff --git a/backend/application/core/services/assessment.py b/backend/application/core/services/assessment.py index a46e47723..e732db6f5 100644 --- a/backend/application/core/services/assessment.py +++ b/backend/application/core/services/assessment.py @@ -4,7 +4,7 @@ from django.utils import timezone from rest_framework.exceptions import ValidationError -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Observation, Observation_Log, Product from application.core.services.observation import ( get_current_severity, @@ -32,18 +32,11 @@ def save_assessment( new_risk_acceptance_expiry_date: Optional[date], ) -> None: - log_severity = ( - new_severity - if new_severity and new_severity != observation.current_severity - else "" - ) - log_status = ( - new_status if new_status and new_status != observation.current_status else "" - ) + log_severity = new_severity if new_severity and new_severity != observation.current_severity else "" + log_status = new_status if new_status and new_status != observation.current_status else "" log_vex_justification = ( new_vex_justification - if new_vex_justification - and new_vex_justification != observation.current_vex_justification + if new_vex_justification and new_vex_justification != observation.current_vex_justification else "" ) log_risk_acceptance_expiry_date = ( @@ -59,10 +52,7 @@ def save_assessment( and ( (log_severity and log_severity != observation.current_severity) or (log_status and log_status != observation.current_status) - or ( - log_vex_justification - and log_vex_justification != observation.current_vex_justification - ) + or (log_vex_justification and log_vex_justification != observation.current_vex_justification) ) and new_status != Status.STATUS_IN_REVIEW else Assessment_Status.ASSESSMENT_STATUS_AUTO_APPROVED @@ -125,33 +115,23 @@ def _update_observation( previous_current_vex_justification = observation.current_vex_justification previous_assessment_vex_justification = observation.assessment_vex_justification - if ( - new_vex_justification - and new_vex_justification != observation.current_vex_justification - ): + if new_vex_justification and new_vex_justification != observation.current_vex_justification: observation.assessment_vex_justification = new_vex_justification - observation.current_vex_justification = get_current_vex_justification( - observation - ) + observation.current_vex_justification = get_current_vex_justification(observation) previous_risk_acceptance_expiry_date = observation.risk_acceptance_expiry_date observation.risk_acceptance_expiry_date = ( - new_risk_acceptance_expiry_date - if observation.current_status == Status.STATUS_RISK_ACCEPTED - else None + new_risk_acceptance_expiry_date if observation.current_status == Status.STATUS_RISK_ACCEPTED else None ) if ( - previous_current_severity # pylint: disable=too-many-boolean-expressions - != observation.current_severity + previous_current_severity != observation.current_severity # pylint: disable=too-many-boolean-expressions or previous_assessment_severity != observation.assessment_severity or previous_current_status != observation.current_status or previous_assessment_status != observation.assessment_status or previous_current_vex_justification != observation.current_vex_justification - or previous_assessment_vex_justification - != observation.assessment_vex_justification - or previous_risk_acceptance_expiry_date - != observation.risk_acceptance_expiry_date + or previous_assessment_vex_justification != observation.assessment_vex_justification + or previous_risk_acceptance_expiry_date != observation.risk_acceptance_expiry_date ): observation.save() @@ -170,15 +150,11 @@ def remove_assessment(observation: Observation, comment: str) -> bool: observation.current_severity = get_current_severity(observation) previous_status = observation.current_status observation.current_status = get_current_status(observation) - observation.current_vex_justification = get_current_vex_justification( - observation - ) + observation.current_vex_justification = get_current_vex_justification(observation) if observation.current_status == Status.STATUS_RISK_ACCEPTED: if previous_status != Status.STATUS_RISK_ACCEPTED: - observation.risk_acceptance_expiry_date = ( - calculate_risk_acceptance_expiry_date(observation.product) - ) + observation.risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date(observation.product) else: observation.risk_acceptance_expiry_date = None @@ -200,13 +176,8 @@ def remove_assessment(observation: Observation, comment: str) -> bool: return False -def assessment_approval( - observation_log: Observation_Log, assessment_status: str, approval_remark: str -) -> None: - if ( - observation_log.assessment_status - != Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL - ): +def assessment_approval(observation_log: Observation_Log, assessment_status: str, approval_remark: str) -> None: + if observation_log.assessment_status != Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL: raise ValidationError("Observation log does not need approval") approval_user = get_current_user() @@ -226,9 +197,7 @@ def assessment_approval( ) check_security_gate(observation_log.observation.product) - push_observation_to_issue_tracker( - observation_log.observation, get_current_user() - ) + push_observation_to_issue_tracker(observation_log.observation, get_current_user()) observation_log.approval_user = approval_user observation_log.approval_remark = approval_remark diff --git a/backend/application/core/services/branch.py b/backend/application/core/services/branch.py new file mode 100644 index 000000000..534bce302 --- /dev/null +++ b/backend/application/core/services/branch.py @@ -0,0 +1,19 @@ +from application.core.models import Branch + + +def set_default_branch(branch: Branch, created: bool) -> None: + + if created or (branch.get_dirty_fields().get("is_default_branch") is not None): + if branch.is_default_branch: + for other_branch in Branch.objects.filter(product=branch.product, is_default_branch=True).exclude( + pk=branch.pk + ): + other_branch.is_default_branch = False + other_branch.save() + + branch.product.repository_default_branch = branch + branch.product.save() + else: + if branch.product.repository_default_branch == branch: + branch.product.repository_default_branch = None + branch.product.save() diff --git a/backend/application/core/services/export_observations.py b/backend/application/core/services/export_observations.py index 5b60d4d67..14028d9d5 100644 --- a/backend/application/core/services/export_observations.py +++ b/backend/application/core/services/export_observations.py @@ -8,16 +8,12 @@ from application.core.models import Observation, Product -def export_observations_excel(product: Product, status: Optional[str]) -> Workbook: +def export_observations_excel(product: Product, status: Optional[list[str]]) -> Workbook: observations = _get_observations(product, status) - return export_excel( - observations, "Observations", _get_excludes(), _get_foreign_keys() - ) + return export_excel(observations, "Observations", _get_excludes(), _get_foreign_keys()) -def export_observations_csv( - response: HttpResponse, product: Product, status: Optional[str] -) -> None: +def export_observations_csv(response: HttpResponse, product: Product, status: Optional[list[str]]) -> None: observations = _get_observations(product, status) export_csv( response, @@ -27,21 +23,21 @@ def export_observations_csv( ) -def _get_observations(product: Product, status: Optional[str]) -> QuerySet: +def _get_observations(product: Product, status: Optional[list[str]]) -> QuerySet: if product.is_product_group: observations = Observation.objects.filter(product__product_group=product) else: observations = Observation.objects.filter(product=product) if status: - observations = observations.filter(current_status=status) + observations = observations.filter(current_status__in=status) observations = observations.order_by("current_status", "current_severity", "title") return observations -def _get_excludes(): +def _get_excludes() -> list[str]: return [ "identity_hash", "pk", @@ -69,5 +65,5 @@ def _get_excludes(): ] -def _get_foreign_keys(): +def _get_foreign_keys() -> list[str]: return ["branch", "parser", "product"] diff --git a/backend/application/core/services/housekeeping.py b/backend/application/core/services/housekeeping.py index 8779bdf91..52b01a491 100644 --- a/backend/application/core/services/housekeeping.py +++ b/backend/application/core/services/housekeeping.py @@ -10,14 +10,21 @@ logger = logging.getLogger("secobserve.core") -def delete_inactive_branches_and_set_flags() -> None: +def delete_inactive_branches_and_set_flags() -> str: + num_products = 0 + num_deleted_branches = 0 products = Product.objects.filter(is_product_group=False) for product in products: - delete_inactive_branches_for_product(product) + deleted_branches = delete_inactive_branches_for_product(product) + num_deleted_branches += deleted_branches + if deleted_branches > 0: + num_products += 1 set_product_flags(product) + return f"Deleted {num_deleted_branches} inactive branches in {num_products} products." -def delete_inactive_branches_for_product(product: Product) -> None: + +def delete_inactive_branches_for_product(product: Product) -> int: product_group_specific = False keep_inactive_days = None exempt_branches = None @@ -25,27 +32,21 @@ def delete_inactive_branches_for_product(product: Product) -> None: product_group: Product = product.product_group if product_group.repository_branch_housekeeping_active is False: # Branch housekeeping is disabled for this product group - return + return 0 if product_group.repository_branch_housekeeping_active is True: # Branch housekeeping is product group specific product_group_specific = True - keep_inactive_days = ( - product_group.repository_branch_housekeeping_keep_inactive_days - ) - exempt_branches = ( - product_group.repository_branch_housekeeping_exempt_branches - ) + keep_inactive_days = product_group.repository_branch_housekeeping_keep_inactive_days + exempt_branches = product_group.repository_branch_housekeeping_exempt_branches if not product_group_specific: if product.repository_branch_housekeeping_active is False: # Branch housekeeping is disabled for this product - return + return 0 if product.repository_branch_housekeeping_active is True: # Branch housekeeping is product specific - keep_inactive_days = ( - product.repository_branch_housekeeping_keep_inactive_days - ) + keep_inactive_days = product.repository_branch_housekeeping_keep_inactive_days exempt_branches = product.repository_branch_housekeeping_exempt_branches else: settings = Settings.load() @@ -53,14 +54,14 @@ def delete_inactive_branches_for_product(product: Product) -> None: # Branch housekeeping is standard if not settings.branch_housekeeping_active: # Branch housekeeping is disabled - return + return 0 keep_inactive_days = settings.branch_housekeeping_keep_inactive_days exempt_branches = settings.branch_housekeeping_exempt_branches if not keep_inactive_days: # Branch housekeeping has no inactive days configured - return + return 0 inactive_date = timezone.now() - timedelta(days=keep_inactive_days) @@ -68,64 +69,50 @@ def delete_inactive_branches_for_product(product: Product) -> None: if exempt_branches: compiled_exempt_branches = re.compile(exempt_branches, re.IGNORECASE) + num_deleted_branches = 0 branches = Branch.objects.filter( - product=product, housekeeping_protect=False, last_import__lte=inactive_date + product=product, is_default_branch=False, housekeeping_protect=False, last_import__lte=inactive_date ) for branch in branches: - if product.repository_default_branch == branch or ( - compiled_exempt_branches and compiled_exempt_branches.match(branch.name) - ): + if compiled_exempt_branches and compiled_exempt_branches.match(branch.name): continue logger.info( # pylint: disable=logging-fstring-interpolation f"Deleting branch {branch.name} for product {product.name}" ) branch.delete() + num_deleted_branches += 1 + + return num_deleted_branches def set_product_flags(product: Product) -> None: has_cloud_resource_before = product.has_cloud_resource product.has_cloud_resource = ( - Observation.objects.filter(product=product) - .exclude(origin_cloud_qualified_resource="") - .exists() + Observation.objects.filter(product=product).exclude(origin_cloud_qualified_resource="").exists() ) has_component_before = product.has_component product.has_component = ( - Observation.objects.filter(product=product) - .exclude(origin_component_name_version="") - .exists() + Observation.objects.filter(product=product).exclude(origin_component_name_version="").exists() ) has_docker_image_before = product.has_docker_image product.has_docker_image = ( - Observation.objects.filter(product=product) - .exclude(origin_docker_image_name_tag_short="") - .exists() + Observation.objects.filter(product=product).exclude(origin_docker_image_name_tag_short="").exists() ) has_endpoint_before = product.has_endpoint - product.has_endpoint = ( - Observation.objects.filter(product=product) - .exclude(origin_endpoint_hostname="") - .exists() - ) + product.has_endpoint = Observation.objects.filter(product=product).exclude(origin_endpoint_hostname="").exists() has_kubernetes_resource_before = product.has_kubernetes_resource product.has_kubernetes_resource = ( - Observation.objects.filter(product=product) - .exclude(origin_kubernetes_qualified_resource="") - .exists() + Observation.objects.filter(product=product).exclude(origin_kubernetes_qualified_resource="").exists() ) has_source_before = product.has_source - product.has_source = ( - Observation.objects.filter(product=product) - .exclude(origin_source_file="") - .exists() - ) + product.has_source = Observation.objects.filter(product=product).exclude(origin_source_file="").exists() has_potential_duplicates_before = product.has_potential_duplicates product.has_potential_duplicates = Observation.objects.filter( @@ -133,8 +120,7 @@ def set_product_flags(product: Product) -> None: ).exists() if ( - has_cloud_resource_before # pylint: disable=too-many-boolean-expressions - != product.has_cloud_resource + has_cloud_resource_before != product.has_cloud_resource # pylint: disable=too-many-boolean-expressions or has_component_before != product.has_component or has_docker_image_before != product.has_docker_image or has_endpoint_before != product.has_endpoint diff --git a/backend/application/core/services/observation.py b/backend/application/core/services/observation.py index ec18f930f..bba38feba 100644 --- a/backend/application/core/services/observation.py +++ b/backend/application/core/services/observation.py @@ -1,19 +1,26 @@ import hashlib +import re +from decimal import Decimal +from typing import Optional from urllib.parse import urlparse +from cvss import CVSS3, CVSS4 from packageurl import PackageURL +from application.core.models import Observation from application.core.types import Severity, Status -# Parameter observation cannot be typed, because some methods are used in the model class +VERSION_REGEX = r"(?:\s|^)+(?:\d+:)?v?(\d+[\.\d]*)" # NOSONAR +VERSION_REGEX_COMPILED = re.compile(VERSION_REGEX) +# The regex will never be used on an empty string -def get_identity_hash(observation) -> str: +def get_identity_hash(observation: Observation) -> str: hash_string = _get_string_to_hash(observation) return hashlib.sha256(hash_string.casefold().encode("utf-8").strip()).hexdigest() -def _get_string_to_hash(observation): # pylint: disable=too-many-branches +def _get_string_to_hash(observation: Observation) -> str: # pylint: disable=too-many-branches hash_string = observation.title if observation.origin_component_name_version: @@ -41,6 +48,8 @@ def _get_string_to_hash(observation): # pylint: disable=too-many-branches hash_string += str(observation.origin_source_line_start) if observation.origin_source_line_end: hash_string += str(observation.origin_source_line_end) + if observation.origin_source_file_link: + hash_string += observation.origin_source_file_link if observation.origin_cloud_provider: hash_string += observation.origin_cloud_provider @@ -61,14 +70,23 @@ def _get_string_to_hash(observation): # pylint: disable=too-many-branches return hash_string -def get_current_severity(observation) -> str: +def get_current_severity(observation: Observation) -> str: + if observation.cvss3_vector: + observation.cvss3_score = CVSS3(observation.cvss3_vector).base_score + + if observation.cvss4_vector: + observation.cvss4_score = CVSS4(observation.cvss4_vector).base_score + if observation.assessment_severity: return observation.assessment_severity + if observation.rule_rego_severity: + return observation.rule_rego_severity + if observation.rule_severity: return observation.rule_severity - if observation.parser_severity: + if observation.parser_severity and observation.parser_severity != Severity.SEVERITY_UNKNOWN: return observation.parser_severity if observation.cvss4_score is not None: @@ -80,7 +98,7 @@ def get_current_severity(observation) -> str: return Severity.SEVERITY_UNKNOWN -def get_cvss_severity(cvss_score: int) -> str: +def get_cvss_severity(cvss_score: Decimal) -> str: if cvss_score is None: return Severity.SEVERITY_UNKNOWN @@ -99,13 +117,16 @@ def get_cvss_severity(cvss_score: int) -> str: return Severity.SEVERITY_NONE -def get_current_status(observation) -> str: +def get_current_status(observation: Observation) -> str: if observation.parser_status == Status.STATUS_RESOLVED: return observation.parser_status if observation.assessment_status: return observation.assessment_status + if observation.rule_rego_status: + return observation.rule_rego_status + if observation.rule_status: return observation.rule_status @@ -118,10 +139,26 @@ def get_current_status(observation) -> str: return Status.STATUS_OPEN -def get_current_vex_justification(observation) -> str: +def get_current_priority(observation: Observation) -> Optional[int]: + if observation.assessment_priority: + return observation.assessment_priority + + if observation.rule_rego_priority: + return observation.rule_rego_priority + + if observation.rule_priority: + return observation.rule_priority + + return None + + +def get_current_vex_justification(observation: Observation) -> str: if observation.assessment_vex_justification: return observation.assessment_vex_justification + if observation.rule_rego_vex_justification: + return observation.rule_rego_vex_justification + if observation.rule_vex_justification: return observation.rule_vex_justification @@ -134,18 +171,23 @@ def get_current_vex_justification(observation) -> str: return "" -def normalize_observation_fields(observation) -> None: - normalize_origin_component(observation) - normalize_origin_docker(observation) - normalize_origin_endpoint(observation) - normalize_origin_cloud(observation) - normalize_origin_kubernetes(observation) +def normalize_observation_fields(observation: Observation) -> None: + _normalize_origin_component(observation) + _normalize_origin_docker(observation) + _normalize_origin_endpoint(observation) + _normalize_origin_cloud(observation) + _normalize_origin_kubernetes(observation) - normalize_severity(observation) - normalize_status(observation) - normalize_vex_justification(observation) + _normalize_severity(observation) + _normalize_status(observation) + observation.current_priority = get_current_priority(observation) + _normalize_vex_justification(observation) - normalize_description(observation) + _normalize_description(observation) + _normalize_vulnerability_ids(observation) + _normalize_cvss_vectors(observation) + + _normalize_update_impact_score_and_fix_available(observation) if observation.recommendation is None: observation.recommendation = "" @@ -155,25 +197,37 @@ def normalize_observation_fields(observation) -> None: observation.origin_service_name = "" if observation.origin_source_file is None: observation.origin_source_file = "" - if observation.cvss3_vector is None: - observation.cvss3_vector = "" - if observation.cvss4_vector is None: - observation.cvss4_vector = "" + if observation.origin_source_file_link is None: + observation.origin_source_file_link = "" if observation.scanner is None: observation.scanner = "" if observation.api_configuration_name is None: observation.api_configuration_name = "" if observation.upload_filename is None: observation.upload_filename = "" - if observation.vulnerability_id is None: - observation.vulnerability_id = "" if observation.issue_tracker_issue_id is None: observation.issue_tracker_issue_id = "" if observation.issue_tracker_jira_initial_status is None: observation.issue_tracker_jira_initial_status = "" -def normalize_description(observation): +def _normalize_vulnerability_ids(observation: Observation) -> None: + if observation.vulnerability_id is None: + observation.vulnerability_id = "" + if observation.vulnerability_id_aliases is None: + observation.vulnerability_id_aliases = "" + + +def _normalize_cvss_vectors(observation: Observation) -> None: + if observation.cvss3_vector is None: + observation.cvss3_vector = "" + if observation.cvss4_vector is None: + observation.cvss4_vector = "" + if observation.cve_found_in is None: + observation.cve_found_in = "" + + +def _normalize_description(observation: Observation) -> None: if observation.description is None: observation.description = "" else: @@ -182,38 +236,31 @@ def normalize_description(observation): observation.description = observation.description[:-1] # \u0000 can lead to SQL exceptions - observation.description = observation.description.replace( - "\u0000", "REDACTED_NULL" - ) + observation.description = observation.description.replace("\u0000", "REDACTED_NULL") -def normalize_origin_component(observation): # pylint: disable=too-many-branches +def _normalize_origin_component(observation: Observation) -> None: # pylint: disable=too-many-branches if not observation.origin_component_name_version: if observation.origin_component_name and observation.origin_component_version: observation.origin_component_name_version = ( - observation.origin_component_name - + ":" - + observation.origin_component_version + observation.origin_component_name + ":" + observation.origin_component_version ) elif observation.origin_component_name: - observation.origin_component_name_version = ( - observation.origin_component_name - ) + observation.origin_component_name_version = observation.origin_component_name else: component_parts = observation.origin_component_name_version.split(":") if len(component_parts) == 3: - observation.origin_component_name = ( - f"{component_parts[0]}:{component_parts[1]}" - ) - observation.origin_component_version = component_parts[2] + if component_parts[0] == observation.origin_component_name: + observation.origin_component_version = f"{component_parts[1]}:{component_parts[2]}" + else: + observation.origin_component_name = f"{component_parts[0]}:{component_parts[1]}" + observation.origin_component_version = component_parts[2] elif len(component_parts) == 2: observation.origin_component_name = component_parts[0] observation.origin_component_version = component_parts[1] elif len(component_parts) == 1: - observation.origin_component_name = ( - observation.origin_component_name_version - ) - observation.origin_component_version = None + observation.origin_component_name = observation.origin_component_name_version + observation.origin_component_version = "" if observation.origin_component_name_version is None: observation.origin_component_name_version = "" @@ -225,6 +272,8 @@ def normalize_origin_component(observation): # pylint: disable=too-many-branche observation.origin_component_purl = "" if observation.origin_component_cpe is None: observation.origin_component_cpe = "" + if observation.origin_component_cyclonedx_bom_link is None: + observation.origin_component_cyclonedx_bom_link = "" if observation.origin_component_dependencies is None: observation.origin_component_dependencies = "" @@ -240,21 +289,17 @@ def normalize_origin_component(observation): # pylint: disable=too-many-branche observation.origin_component_purl_type = "" -def normalize_origin_docker(observation): +def _normalize_origin_docker(observation: Observation) -> None: if not observation.origin_docker_image_name_tag: _normalize_origin_docker_image_name(observation) else: _normalize_origin_docker_image_name_tag(observation) if observation.origin_docker_image_name_tag: - origin_docker_image_name_tag_parts = ( - observation.origin_docker_image_name_tag.split("/") - ) - observation.origin_docker_image_name_tag_short = ( - origin_docker_image_name_tag_parts[ - len(origin_docker_image_name_tag_parts) - 1 - ].strip() - ) + origin_docker_image_name_tag_parts = observation.origin_docker_image_name_tag.split("/") + observation.origin_docker_image_name_tag_short = origin_docker_image_name_tag_parts[ + len(origin_docker_image_name_tag_parts) - 1 + ].strip() else: observation.origin_docker_image_name_tag_short = "" @@ -268,7 +313,7 @@ def normalize_origin_docker(observation): observation.origin_docker_image_digest = "" -def _normalize_origin_docker_image_name(observation): +def _normalize_origin_docker_image_name(observation: Observation) -> None: if observation.origin_docker_image_name and not observation.origin_docker_image_tag: docker_image_parts = observation.origin_docker_image_name.split(":") if len(docker_image_parts) == 2: @@ -277,15 +322,13 @@ def _normalize_origin_docker_image_name(observation): if observation.origin_docker_image_name and observation.origin_docker_image_tag: observation.origin_docker_image_name_tag = ( - observation.origin_docker_image_name - + ":" - + observation.origin_docker_image_tag + observation.origin_docker_image_name + ":" + observation.origin_docker_image_tag ) else: observation.origin_docker_image_name_tag = observation.origin_docker_image_name -def _normalize_origin_docker_image_name_tag(observation): +def _normalize_origin_docker_image_name_tag(observation: Observation) -> None: docker_image_parts = observation.origin_docker_image_name_tag.split(":") if len(docker_image_parts) == 2: observation.origin_docker_image_name = docker_image_parts[0] @@ -294,11 +337,11 @@ def _normalize_origin_docker_image_name_tag(observation): observation.origin_docker_image_name = observation.origin_docker_image_name_tag -def normalize_origin_endpoint(observation): +def _normalize_origin_endpoint(observation: Observation) -> None: if observation.origin_endpoint_url: parse_result = urlparse(observation.origin_endpoint_url) observation.origin_endpoint_scheme = parse_result.scheme - observation.origin_endpoint_hostname = parse_result.hostname + observation.origin_endpoint_hostname = str(parse_result.hostname) observation.origin_endpoint_port = parse_result.port observation.origin_endpoint_path = parse_result.path observation.origin_endpoint_params = parse_result.params @@ -329,7 +372,7 @@ def normalize_origin_endpoint(observation): observation.origin_endpoint_fragment = "" -def normalize_origin_cloud(observation): +def _normalize_origin_cloud(observation: Observation) -> None: if observation.origin_cloud_provider is None: observation.origin_cloud_provider = "" if observation.origin_cloud_account_subscription_project is None: @@ -346,10 +389,7 @@ def normalize_origin_cloud(observation): if len(observation.origin_cloud_account_subscription_project) > 122 else observation.origin_cloud_account_subscription_project ) - if ( - observation.origin_cloud_account_subscription_project - and observation.origin_cloud_resource - ): + if observation.origin_cloud_account_subscription_project and observation.origin_cloud_resource: observation.origin_cloud_qualified_resource += " / " if observation.origin_cloud_resource: observation.origin_cloud_qualified_resource += ( @@ -359,7 +399,7 @@ def normalize_origin_cloud(observation): ) -def normalize_origin_kubernetes(observation): +def _normalize_origin_kubernetes(observation: Observation) -> None: if observation.origin_kubernetes_cluster is None: observation.origin_kubernetes_cluster = "" if observation.origin_kubernetes_namespace is None: @@ -394,36 +434,43 @@ def normalize_origin_kubernetes(observation): ) -def normalize_severity(observation): +def _normalize_severity(observation: Observation) -> None: if observation.current_severity is None: observation.current_severity = "" if observation.assessment_severity is None: observation.assessment_severity = "" if observation.rule_severity is None: observation.rule_severity = "" + if observation.rule_rego_severity is None: + observation.rule_rego_severity = "" if observation.parser_severity is None: observation.parser_severity = "" - if observation.parser_severity: - if ( + if ( + observation.parser_severity + and ( observation.parser_severity, observation.parser_severity, - ) not in Severity.SEVERITY_CHOICES: - observation.parser_severity = Severity.SEVERITY_UNKNOWN + ) + not in Severity.SEVERITY_CHOICES + ): + observation.parser_severity = Severity.SEVERITY_UNKNOWN observation.current_severity = get_current_severity(observation) observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get( - observation.current_severity + observation.current_severity, Severity.SEVERITY_UNKNOWN ) -def normalize_status(observation): +def _normalize_status(observation: Observation) -> None: if observation.current_status is None: observation.current_status = "" if observation.assessment_status is None: observation.assessment_status = "" if observation.rule_status is None: observation.rule_status = "" + if observation.rule_rego_status is None: + observation.rule_rego_status = "" if observation.parser_status is None: observation.parser_status = "" if observation.vex_status is None: @@ -432,13 +479,15 @@ def normalize_status(observation): observation.current_status = get_current_status(observation) -def normalize_vex_justification(observation): +def _normalize_vex_justification(observation: Observation) -> None: if observation.current_vex_justification is None: observation.current_vex_justification = "" if observation.assessment_vex_justification is None: observation.assessment_vex_justification = "" if observation.rule_vex_justification is None: observation.rule_vex_justification = "" + if observation.rule_rego_vex_justification is None: + observation.rule_rego_vex_justification = "" if observation.parser_vex_justification is None: observation.parser_vex_justification = "" if observation.vex_vex_justification is None: @@ -447,27 +496,77 @@ def normalize_vex_justification(observation): observation.current_vex_justification = get_current_vex_justification(observation) -def set_product_flags(observation) -> None: +def _normalize_update_impact_score_and_fix_available(observation: Observation) -> None: + observation.fix_available = None + observation.update_impact_score = None + + if not observation.origin_component_name: + return + + observation.fix_available = False + + recommendation_matches = ( + re.findall(VERSION_REGEX_COMPILED, observation.recommendation) if observation.recommendation else None + ) + component_matches = ( + re.findall(VERSION_REGEX_COMPILED, observation.origin_component_version) + if observation.origin_component_version + else None + ) + + observation.fix_available = bool(recommendation_matches) + + if not recommendation_matches or not component_matches: + return + + observation.fix_available = True + + recommendation_version = None + if len(recommendation_matches) == 1: + recommendation_version = _parse_version(recommendation_matches[0]) + else: + component_name = re.sub(r":\d+", "", observation.origin_component_name) + search_prefix = rf"(?:Upgrade (?:\S+:)?{component_name} to version)" + match = re.findall(rf"{search_prefix}{VERSION_REGEX}", observation.recommendation) + if match: + recommendation_version = _parse_version(match[0]) + + if recommendation_version: + component_version = _parse_version(component_matches[0]) + major_diff = max(recommendation_version[0] - component_version[0], 0) + minor_diff = max(recommendation_version[1] - component_version[1], 0) + patch_diff = max(recommendation_version[2] - component_version[2], 0) + + if major_diff > 0: + observation.update_impact_score = major_diff * 100 + elif minor_diff > 0: + observation.update_impact_score = minor_diff * 10 + else: + observation.update_impact_score = patch_diff + + +def _parse_version(version: str) -> tuple[int, ...]: + parts = version.split(".")[:3] + # Filter out empty strings + parts = [part for part in parts if part] + rettuple = tuple(map(int, parts)) + for _ in range(3 - len(rettuple)): + rettuple += (0,) + return rettuple + + +def set_product_flags(observation: Observation) -> None: product_changed = False - if ( - observation.origin_cloud_qualified_resource - and not observation.product.has_cloud_resource - ): + if observation.origin_cloud_qualified_resource and not observation.product.has_cloud_resource: observation.product.has_cloud_resource = True product_changed = True - if ( - observation.origin_component_name_version - and not observation.product.has_component - ): + if observation.origin_component_name_version and not observation.product.has_component: observation.product.has_component = True product_changed = True - if ( - observation.origin_docker_image_name_tag - and not observation.product.has_docker_image - ): + if observation.origin_docker_image_name_tag and not observation.product.has_docker_image: observation.product.has_docker_image = True product_changed = True @@ -475,10 +574,7 @@ def set_product_flags(observation) -> None: observation.product.has_endpoint = True product_changed = True - if ( - observation.origin_kubernetes_qualified_resource - and not observation.product.has_kubernetes_resource - ): + if observation.origin_kubernetes_qualified_resource and not observation.product.has_kubernetes_resource: observation.product.has_kubernetes_resource = True product_changed = True @@ -486,10 +582,7 @@ def set_product_flags(observation) -> None: observation.product.has_source = True product_changed = True - if ( - observation.has_potential_duplicates - and not observation.product.has_potential_duplicates - ): + if observation.has_potential_duplicates and not observation.product.has_potential_duplicates: observation.product.has_potential_duplicates = True product_changed = True diff --git a/backend/application/core/services/observation_log.py b/backend/application/core/services/observation_log.py index 9a758e2a1..82d19dd37 100644 --- a/backend/application/core/services/observation_log.py +++ b/backend/application/core/services/observation_log.py @@ -2,15 +2,16 @@ from typing import Optional from application.access_control.models import User -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Observation, Observation_Log -def create_observation_log( +def create_observation_log( # pylint: disable=too-many-arguments *, observation: Observation, severity: str, status: str, + priority: Optional[int] = None, comment: str, vex_justification: str, assessment_status: str, @@ -21,6 +22,7 @@ def create_observation_log( user=_get_user(), severity=severity, status=status, + priority=priority, comment=comment, vex_justification=vex_justification, assessment_status=assessment_status, diff --git a/backend/application/core/services/observations_bulk_actions.py b/backend/application/core/services/observations_bulk_actions.py index 49731b9b8..262c69c77 100644 --- a/backend/application/core/services/observations_bulk_actions.py +++ b/backend/application/core/services/observations_bulk_actions.py @@ -5,9 +5,9 @@ from django.utils import timezone from rest_framework.exceptions import ValidationError -from application.access_control.services.authorization import user_has_permission -from application.access_control.services.roles_permissions import Permissions -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import user_has_permission +from application.authorization.services.roles_permissions import Permissions from application.core.models import ( Observation, Observation_Log, @@ -22,9 +22,6 @@ ) from application.core.services.security_gate import check_security_gate from application.core.types import Assessment_Status, Status -from application.issue_tracker.services.issue_tracker import ( - push_deleted_observation_to_issue_tracker, -) def observations_bulk_assessment( @@ -57,9 +54,6 @@ def observations_bulk_delete(product: Product, observation_ids: list[int]) -> No observations.delete() - for issue_id in issue_ids: - push_deleted_observation_to_issue_tracker(product, issue_id, get_current_user()) - check_security_gate(product) product.last_observation_change = timezone.now() product.save() @@ -73,13 +67,9 @@ def observations_bulk_mark_duplicates( try: observation = Observation.objects.get(pk=observation_id) if observation.product != product: - raise ValidationError( - f"Observation {observation.pk} does not belong to product {product.pk}" - ) + raise ValidationError(f"Observation {observation.pk} does not belong to product {product.pk}") except Observation.DoesNotExist: - raise ValidationError( # pylint: disable=raise-missing-from - "Observation does not exist" - ) + raise ValidationError("Observation does not exist") # pylint: disable=raise-missing-from # The DoesNotExist exception itself is not relevant and must not be re-raised observation_ids = [] @@ -88,14 +78,9 @@ def observations_bulk_mark_duplicates( observation_ids.append(potential_duplicate.potential_duplicate_observation.id) duplicates = _check_observations(product, observation_ids) - if ( - potential_duplicate.type - == Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_COMPONENT - ): + if potential_duplicate.type == Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_COMPONENT: comment = f"Duplicate of {observation.origin_component_name_version}" - elif ( - potential_duplicate.type == Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_SOURCE - ): + elif potential_duplicate.type == Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_SOURCE: comment = f"Duplicate of {observation.title} from scanner {observation.scanner}" else: raise ValidationError("Invalid potential duplicate type") @@ -114,9 +99,7 @@ def observations_bulk_mark_duplicates( set_potential_duplicate(observation) -def _check_observations( - product: Optional[Product], observation_ids: list[int] -) -> QuerySet[Observation]: +def _check_observations(product: Optional[Product], observation_ids: list[int]) -> QuerySet[Observation]: observations = Observation.objects.filter(id__in=observation_ids) if len(observations) != len(observation_ids): raise ValidationError("Some observations do not exist") @@ -124,24 +107,17 @@ def _check_observations( for observation in observations: if product: if observation.product != product: - raise ValidationError( - f"Observation {observation.pk} does not belong to product {product.pk}" - ) + raise ValidationError(f"Observation {observation.pk} does not belong to product {product.pk}") else: if not user_has_permission(observation, Permissions.Observation_Assessment): - raise ValidationError( - f"First observation without assessment permission: {observation}" - ) + raise ValidationError(f"First observation without assessment permission: {observation}") current_observation_log = get_current_observation_log(observation) if ( current_observation_log - and current_observation_log.assessment_status - == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL + and current_observation_log.assessment_status == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL ): - raise ValidationError( - "Cannot create new assessment while last assessment still needs approval" - ) + raise ValidationError("Cannot create new assessment while last assessment still needs approval") return observations @@ -157,9 +133,7 @@ def observation_logs_bulk_approval( set_potential_duplicate_both_ways(observation_log.observation) -def _check_observation_logs( - product: Optional[Product], observation_log_ids: list[int] -) -> QuerySet[Observation_Log]: +def _check_observation_logs(product: Optional[Product], observation_log_ids: list[int]) -> QuerySet[Observation_Log]: observation_logs = Observation_Log.objects.filter(id__in=observation_log_ids) if len(observation_logs) != len(observation_log_ids): raise ValidationError("Some observation logs do not exist") @@ -167,23 +141,12 @@ def _check_observation_logs( for observation_log in observation_logs: if product: if observation_log.observation.product != product: - raise ValidationError( - f"Observation log {observation_log.pk} does not belong to product {product.pk}" - ) + raise ValidationError(f"Observation log {observation_log.pk} does not belong to product {product.pk}") else: - if not user_has_permission( - observation_log, Permissions.Observation_Log_Approval - ): - raise ValidationError( - f"First observation log without approval permission: {observation_log.pk}" - ) - if ( - not observation_log.assessment_status - == Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL - ): - raise ValidationError( - f"First observation log that does not need approval: {observation_log.pk}" - ) + if not user_has_permission(observation_log, Permissions.Observation_Log_Approval): + raise ValidationError(f"First observation log without approval permission: {observation_log.pk}") + if observation_log.assessment_status != Assessment_Status.ASSESSMENT_STATUS_NEEDS_APPROVAL: + raise ValidationError(f"First observation log that does not need approval: {observation_log.pk}") if get_current_user() == observation_log.user: raise ValidationError( f"First observation log where user cannot approve their own assessment: {observation_log.pk}" diff --git a/backend/application/core/services/potential_duplicates.py b/backend/application/core/services/potential_duplicates.py index 0c84e1ab4..dc59a082e 100644 --- a/backend/application/core/services/potential_duplicates.py +++ b/backend/application/core/services/potential_duplicates.py @@ -3,15 +3,13 @@ from django.db.models.query import QuerySet from huey.contrib.djhuey import db_task -from application.commons.services.tasks import handle_task_exception from application.core.models import Branch, Observation, Potential_Duplicate, Product from application.core.types import Status +from application.notifications.services.tasks import handle_task_exception @db_task() -def find_potential_duplicates( - product: Product, branch: Optional[Branch], service: Optional[str] -) -> None: +def find_potential_duplicates(product: Product, branch: Optional[Branch], service: Optional[str]) -> None: try: if not service: service = "" @@ -28,15 +26,15 @@ def find_potential_duplicates( handle_task_exception(e) -def _handle_observation(observation: Observation, observations: QuerySet[Observation]): +def _handle_observation(observation: Observation, observations: QuerySet[Observation]) -> None: Potential_Duplicate.objects.filter(observation=observation).delete() initial_has_potential_duplicates = observation.has_potential_duplicates observation.has_potential_duplicates = False - if observation.current_status == Status.STATUS_OPEN: + if observation.current_status in Status.STATUS_ACTIVE: for potential_duplicate_observation in observations: if ( observation != potential_duplicate_observation - and potential_duplicate_observation.current_status == Status.STATUS_OPEN + and potential_duplicate_observation.current_status in Status.STATUS_ACTIVE ): potential_duplicate_type = None if ( @@ -44,21 +42,15 @@ def _handle_observation(observation: Observation, observations: QuerySet[Observa and potential_duplicate_observation.origin_component_name and observation.title == potential_duplicate_observation.title ): - potential_duplicate_type = ( - Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_COMPONENT - ) + potential_duplicate_type = Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_COMPONENT if ( observation.origin_source_file and observation.origin_source_line_start - and observation.origin_source_file - == potential_duplicate_observation.origin_source_file - and observation.origin_source_line_start - == potential_duplicate_observation.origin_source_line_start + and observation.origin_source_file == potential_duplicate_observation.origin_source_file + and observation.origin_source_line_start == potential_duplicate_observation.origin_source_line_start and observation.scanner != potential_duplicate_observation.scanner ): - potential_duplicate_type = ( - Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_SOURCE - ) + potential_duplicate_type = Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_SOURCE if potential_duplicate_type: Potential_Duplicate.objects.update_or_create( observation=observation, @@ -73,9 +65,7 @@ def _handle_observation(observation: Observation, observations: QuerySet[Observa def set_potential_duplicate_both_ways(observation: Observation) -> None: set_potential_duplicate(observation) - potential_duplicate_observations = Potential_Duplicate.objects.filter( - potential_duplicate_observation=observation - ) + potential_duplicate_observations = Potential_Duplicate.objects.filter(potential_duplicate_observation=observation) for potential_duplicate_observation in potential_duplicate_observations: set_potential_duplicate(potential_duplicate_observation.observation) @@ -83,10 +73,10 @@ def set_potential_duplicate_both_ways(observation: Observation) -> None: def set_potential_duplicate(observation: Observation) -> None: initial_has_potential_duplicates = observation.has_potential_duplicates - if observation.current_status == Status.STATUS_OPEN: + if observation.current_status in Status.STATUS_ACTIVE: open_potential_duplicates = Potential_Duplicate.objects.filter( observation=observation, - potential_duplicate_observation__current_status=Status.STATUS_OPEN, + potential_duplicate_observation__current_status__in=Status.STATUS_ACTIVE, ).count() if open_potential_duplicates == 0: observation.has_potential_duplicates = False diff --git a/backend/application/core/services/product.py b/backend/application/core/services/product.py deleted file mode 100644 index 6c2effc49..000000000 --- a/backend/application/core/services/product.py +++ /dev/null @@ -1,61 +0,0 @@ -from application.core.models import Observation, Product -from application.core.queries.branch import get_branches_by_product -from application.core.types import Status -from application.licenses.models import License_Component - - -def set_repository_default_branch(product: Product) -> None: - if not product.repository_default_branch: - current_repository_default_branch = product.repository_default_branch - new_repository_default_branch = product.repository_default_branch - branches = get_branches_by_product(product) - if not branches: - new_repository_default_branch = None - else: - if len(branches) == 1: - new_repository_default_branch = branches[0] - else: - for branch in branches: - if branch.name == "main": - new_repository_default_branch = branch - break - - if new_repository_default_branch != current_repository_default_branch: - product.repository_default_branch = new_repository_default_branch - product.save() - - -def get_product_group_observation_count(product_group: Product, severity: str) -> int: - if not product_group.is_product_group: - raise ValueError(f"{product_group.name} is not a product group") - - count = 0 - for product in Product.objects.filter(product_group=product_group): - count += get_product_observation_count(product, severity) - return count - - -def get_product_observation_count(product: Product, severity: str) -> int: - return Observation.objects.filter( - product=product, - branch=product.repository_default_branch, - current_status=Status.STATUS_OPEN, - current_severity=severity, - ).count() - - -def get_product_group_license_count( - product_group: Product, evaluation_result: str -) -> int: - count = 0 - for product in Product.objects.filter(product_group=product_group): - count += get_product_license_count(product, evaluation_result) - return count - - -def get_product_license_count(product: Product, evaluation_result: str) -> int: - return License_Component.objects.filter( - product=product, - branch=product.repository_default_branch, - evaluation_result=evaluation_result, - ).count() diff --git a/backend/application/core/services/product_api_token.py b/backend/application/core/services/product_api_token.py new file mode 100644 index 000000000..be641ccd3 --- /dev/null +++ b/backend/application/core/services/product_api_token.py @@ -0,0 +1,90 @@ +from dataclasses import dataclass +from datetime import date +from typing import Optional + +from rest_framework.exceptions import ValidationError + +from application.access_control.models import API_Token_Multiple, User +from application.access_control.queries.user import get_user_by_username +from application.access_control.services.user_api_token import generate_api_token_hash +from application.authorization.services.roles_permissions import Roles +from application.core.models import Product, Product_Member +from application.core.queries.product_member import get_product_member + + +def create_product_api_token(product: Product, role: Roles, name: str, expiration_date: Optional[date]) -> str: + product_user_name = _get_product_user_name(product, name) + user = get_user_by_username(product_user_name) + if user: + try: + API_Token_Multiple.objects.get(user=user) + raise ValidationError("API token with this name already exists.") + except API_Token_Multiple.DoesNotExist: + pass + + api_token, api_token_hash = generate_api_token_hash() + + if user: + user.is_active = True + else: + user = User(username=product_user_name, is_active=True) + user.set_unusable_password() + user.save() + + Product_Member(product=product, user=user, role=role).save() + API_Token_Multiple(user=user, api_token_hash=api_token_hash, name=name, expiration_date=expiration_date).save() + + return api_token + + +def revoke_product_api_token(product: Product, api_token: API_Token_Multiple) -> None: + user = api_token.user + api_token.delete() + + product_member = get_product_member(product, user) + if product_member: + product_member.delete() + + user.is_active = False + user.save() + + +@dataclass +class ProductAPIToken: + id: int + product: int + role: int + name: str + expiration_date: Optional[date] + + +def get_product_api_tokens(product: Product) -> list[ProductAPIToken]: + users = User.objects.filter(username__startswith=f"-product-{product.pk}-") + if not users: + return [] + + product_api_tokens = [] + for user in users: + product_member = get_product_member(product, user) + if product_member: + try: + api_token = API_Token_Multiple.objects.get(user=user) + product_api_tokens.append( + ProductAPIToken( + id=api_token.pk, + product=product.pk, + role=product_member.role, + name=api_token.name, + expiration_date=api_token.expiration_date, + ) + ) + except API_Token_Multiple.DoesNotExist: + continue + except API_Token_Multiple.MultipleObjectsReturned: + continue + + return product_api_tokens + + +def _get_product_user_name(product: Product, name: str) -> str: + return f"-product-{product.pk}-{name}-api_token-" diff --git a/backend/application/core/services/purl_type.py b/backend/application/core/services/purl_type.py new file mode 100644 index 000000000..9ab7d008f --- /dev/null +++ b/backend/application/core/services/purl_type.py @@ -0,0 +1,70 @@ +from dataclasses import dataclass +from typing import Optional + +from application.core.models import Observation, Product +from application.core.types import PURL_Type +from application.licenses.models import License_Component + + +@dataclass +class PURLTypeElement: + id: str + name: str + + +@dataclass +class PURLTypeList: + count: int + results: list[PURLTypeElement] + + +def get_purl_type(purl_type_id: str) -> Optional[PURLTypeElement]: + name = PURL_Type.PURL_TYPE_CHOICES[purl_type_id] + if name: + return PURLTypeElement(id=purl_type_id, name=name) + + return None + + +def get_purl_types(product: Product, for_observations: bool, for_license_components: bool) -> PURLTypeList: + purl_types = PURLTypeList( + count=0, + results=[], + ) + + if for_observations: + observation_purl_types = ( + Observation.objects.filter(product=product) + .exclude(origin_component_purl_type="") + .values("origin_component_purl_type") + .distinct() + ) + purl_types = PURLTypeList( + count=observation_purl_types.count(), + results=[ + PURLTypeElement( + id=purl_type.get("origin_component_purl_type", ""), + name=PURL_Type.PURL_TYPE_CHOICES.get(purl_type.get("origin_component_purl_type", ""), ""), + ) + for purl_type in observation_purl_types + ], + ) + elif for_license_components: + license_component_purl_types = ( + License_Component.objects.filter(product=product) + .exclude(component_purl_type="") + .values("component_purl_type") + .distinct() + ) + purl_types = PURLTypeList( + count=license_component_purl_types.count(), + results=[ + PURLTypeElement( + id=purl_type.get("component_purl_type", ""), + name=PURL_Type.PURL_TYPE_CHOICES.get(purl_type.get("component_purl_type", ""), ""), + ) + for purl_type in license_component_purl_types + ], + ) + + return purl_types diff --git a/backend/application/core/services/risk_acceptance_expiry.py b/backend/application/core/services/risk_acceptance_expiry.py index eb5f6c3d1..3d59146e8 100644 --- a/backend/application/core/services/risk_acceptance_expiry.py +++ b/backend/application/core/services/risk_acceptance_expiry.py @@ -23,9 +23,7 @@ def calculate_risk_acceptance_expiry_date(product: Product) -> Optional[date]: and product.product_group.risk_acceptance_expiry_days and product.product_group.risk_acceptance_expiry_days > 0 ): - return date.today() + timedelta( - days=product.product_group.risk_acceptance_expiry_days - ) + return date.today() + timedelta(days=product.product_group.risk_acceptance_expiry_days) settings = Settings.load() if settings.risk_acceptance_expiry_days > 0: diff --git a/backend/application/core/services/risk_acceptance_expiry_task.py b/backend/application/core/services/risk_acceptance_expiry_task.py index b4a8daed1..db6c40ce1 100644 --- a/backend/application/core/services/risk_acceptance_expiry_task.py +++ b/backend/application/core/services/risk_acceptance_expiry_task.py @@ -5,15 +5,13 @@ from application.core.types import Status -def expire_risk_acceptances() -> None: +def expire_risk_acceptances() -> str: observations = Observation.objects.filter( current_status=Status.STATUS_RISK_ACCEPTED, risk_acceptance_expiry_date__lte=date.today(), ) for observation in observations: - assessment_removed = remove_assessment( - observation, "Risk acceptance has expired." - ) + assessment_removed = remove_assessment(observation, "Risk acceptance has expired.") if not assessment_removed: observation.parser_status = Status.STATUS_OPEN observation.save() @@ -25,3 +23,5 @@ def expire_risk_acceptances() -> None: new_vex_justification="", new_risk_acceptance_expiry_date=None, ) + + return f"Expired risk acceptances for {observations.count()} observations." diff --git a/backend/application/core/services/security_gate.py b/backend/application/core/services/security_gate.py index 1f9ba1998..c3d9cd57d 100644 --- a/backend/application/core/services/security_gate.py +++ b/backend/application/core/services/security_gate.py @@ -1,12 +1,11 @@ from typing import Optional from application.commons.models import Settings -from application.commons.services.send_notifications import ( +from application.core.models import Observation, Product +from application.core.queries.product import get_product_by_id +from application.notifications.services.send_notifications import ( send_product_security_gate_notification, ) -from application.core.models import Product -from application.core.services.product import get_product_observation_count -from application.core.types import Severity def check_security_gate(product: Product) -> None: @@ -37,6 +36,11 @@ def check_security_gate(product: Product) -> None: send_product_security_gate_notification(product) +def check_security_gate_observation(observation: Observation) -> None: + if observation.branch == observation.product.repository_default_branch: + check_security_gate(observation.product) + + def _calculate_active_product_security_gate(product: Product) -> bool: new_security_gate_passed = True @@ -73,51 +77,43 @@ def _calculate_active_product_security_gate(product: Product) -> bool: ) else: security_gate_threshold_critical = ( - product.security_gate_threshold_critical - if product.security_gate_threshold_critical - else 0 + product.security_gate_threshold_critical if product.security_gate_threshold_critical else 0 ) security_gate_threshold_high = ( - product.security_gate_threshold_high - if product.security_gate_threshold_high - else 0 + product.security_gate_threshold_high if product.security_gate_threshold_high else 0 ) security_gate_threshold_medium = ( - product.security_gate_threshold_medium - if product.security_gate_threshold_medium - else 0 - ) - security_gate_threshold_low = ( - product.security_gate_threshold_low - if product.security_gate_threshold_low - else 0 + product.security_gate_threshold_medium if product.security_gate_threshold_medium else 0 ) + security_gate_threshold_low = product.security_gate_threshold_low if product.security_gate_threshold_low else 0 security_gate_threshold_none = ( - product.security_gate_threshold_none - if product.security_gate_threshold_none - else 0 + product.security_gate_threshold_none if product.security_gate_threshold_none else 0 ) security_gate_threshold_unknown = ( - product.security_gate_threshold_unknown - if product.security_gate_threshold_unknown - else 0 + product.security_gate_threshold_unknown if product.security_gate_threshold_unknown else 0 ) - if ( - get_product_observation_count( # pylint: disable=too-many-boolean-expressions - product, Severity.SEVERITY_CRITICAL - ) - > security_gate_threshold_critical - or get_product_observation_count(product, Severity.SEVERITY_HIGH) - > security_gate_threshold_high - or get_product_observation_count(product, Severity.SEVERITY_MEDIUM) - > security_gate_threshold_medium - or get_product_observation_count(product, Severity.SEVERITY_LOW) - > security_gate_threshold_low - or get_product_observation_count(product, Severity.SEVERITY_NONE) - > security_gate_threshold_none - or get_product_observation_count(product, Severity.SEVERITY_UNKNOWN) - > security_gate_threshold_unknown + annotated_product = get_product_by_id(product_id=product.pk, is_product_group=False, with_annotations=True) + if not annotated_product: + raise ValueError(f"Product {product.pk} not found while calculating security gate.") + + if ( # pylint: disable=too-many-boolean-expressions + annotated_product.active_critical_observation_count is None + or annotated_product.active_high_observation_count is None + or annotated_product.active_medium_observation_count is None + or annotated_product.active_low_observation_count is None + or annotated_product.active_none_observation_count is None + or annotated_product.active_unknown_observation_count is None + ): + raise ValueError("Observation counts are None.") + + if ( # pylint: disable=too-many-boolean-expressions + annotated_product.active_critical_observation_count > security_gate_threshold_critical + or annotated_product.active_high_observation_count > security_gate_threshold_high + or annotated_product.active_medium_observation_count > security_gate_threshold_medium + or annotated_product.active_low_observation_count > security_gate_threshold_low + or annotated_product.active_none_observation_count > security_gate_threshold_none + or annotated_product.active_unknown_observation_count > security_gate_threshold_unknown ): new_security_gate_passed = False @@ -127,22 +123,28 @@ def _calculate_active_product_security_gate(product: Product) -> bool: def _calculate_active_config_security_gate(product: Product) -> bool: settings = Settings.load() + annotated_product = get_product_by_id(product_id=product.pk, is_product_group=False, with_annotations=True) + if not annotated_product: + raise ValueError(f"Product {product.pk} not found while calculating security gate.") + + if ( # pylint: disable=too-many-boolean-expressions + annotated_product.active_critical_observation_count is None + or annotated_product.active_high_observation_count is None + or annotated_product.active_medium_observation_count is None + or annotated_product.active_low_observation_count is None + or annotated_product.active_none_observation_count is None + or annotated_product.active_unknown_observation_count is None + ): + raise ValueError("Observation counts are None.") + new_security_gate_passed = True - if ( - get_product_observation_count( # pylint: disable=too-many-boolean-expressions - product, Severity.SEVERITY_CRITICAL - ) - > settings.security_gate_threshold_critical - or get_product_observation_count(product, Severity.SEVERITY_HIGH) - > settings.security_gate_threshold_high - or get_product_observation_count(product, Severity.SEVERITY_MEDIUM) - > settings.security_gate_threshold_medium - or get_product_observation_count(product, Severity.SEVERITY_LOW) - > settings.security_gate_threshold_low - or get_product_observation_count(product, Severity.SEVERITY_NONE) - > settings.security_gate_threshold_none - or get_product_observation_count(product, Severity.SEVERITY_UNKNOWN) - > settings.security_gate_threshold_unknown + if ( # pylint: disable=too-many-boolean-expressions + annotated_product.active_critical_observation_count > settings.security_gate_threshold_critical + or annotated_product.active_high_observation_count > settings.security_gate_threshold_high + or annotated_product.active_medium_observation_count > settings.security_gate_threshold_medium + or annotated_product.active_low_observation_count > settings.security_gate_threshold_low + or annotated_product.active_none_observation_count > settings.security_gate_threshold_none + or annotated_product.active_unknown_observation_count > settings.security_gate_threshold_unknown ): new_security_gate_passed = False diff --git a/backend/application/core/signals.py b/backend/application/core/signals.py index 997c752c4..7f63ec8ee 100644 --- a/backend/application/core/signals.py +++ b/backend/application/core/signals.py @@ -1,24 +1,71 @@ -from django.db.models.signals import post_delete, post_save +import logging +from typing import Any + +from django.db.models.signals import post_delete, post_save, pre_save from django.dispatch import receiver +from huey.contrib.djhuey import db_task, lock_task -from application.access_control.services.roles_permissions import Roles -from application.commons.services.global_request import get_current_user -from application.core.models import Branch, Product, Product_Member -from application.core.services.product import set_repository_default_branch +from application.access_control.models import User +from application.access_control.services.current_user import get_current_user +from application.authorization.services.roles_permissions import Roles +from application.commons.models import Settings +from application.core.models import Branch, Observation, Product, Product_Member +from application.core.services.branch import set_default_branch +from application.core.services.observation import ( + get_identity_hash, + normalize_observation_fields, + set_product_flags, +) from application.core.services.security_gate import check_security_gate +from application.issue_tracker.services.issue_tracker import ( + push_deleted_observation_to_issue_tracker, +) + +logger = logging.getLogger("secobserve.core") + + +@receiver(pre_save, sender=Observation) +def observation_pre_save(sender: Any, instance: Observation, **kwargs: Any) -> None: # pylint: disable=unused-argument + # sender is needed according to Django documentation + normalize_observation_fields(instance) + instance.identity_hash = get_identity_hash(instance) + set_product_flags(instance) + + +@receiver(post_delete, sender=Observation) +def observation_post_delete( + sender: Any, instance: Observation, **kwargs: Any # pylint: disable=unused-argument +) -> None: + # sender is needed according to Django documentation + push_deleted_observation_to_issue_tracker(instance.product, instance.issue_tracker_issue_id, get_current_user()) + + +@receiver(post_delete, sender=Product) +def product_post_delete(sender: Any, instance: Product, **kwargs: Any) -> None: # pylint: disable=unused-argument + # sender is needed according to Django documentation + User.objects.filter(username__startswith=f"-product-{instance.pk}-").delete() @receiver(post_save, sender=Product) -def product_post_save( # pylint: disable=unused-argument - sender, instance: Product, created: bool, **kwargs +def product_post_save( + sender: Any, instance: Product, created: bool, **kwargs: Any # pylint: disable=unused-argument ) -> None: # sender is needed according to Django documentation if not created: - if instance.is_product_group: - for product in instance.products.all(): - check_security_gate(product) - else: - check_security_gate(instance) + if ( + "security_gate_active" in instance.get_dirty_fields().keys() # pylint: disable=too-many-boolean-expressions + or "security_gate_threshold_critical" in instance.get_dirty_fields().keys() + or "security_gate_threshold_high" in instance.get_dirty_fields().keys() + or "security_gate_threshold_medium" in instance.get_dirty_fields().keys() + or "security_gate_threshold_low" in instance.get_dirty_fields().keys() + or "security_gate_threshold_none" in instance.get_dirty_fields().keys() + or "security_gate_threshold_unknown" in instance.get_dirty_fields().keys() + ): + if instance.is_product_group: + for product in instance.products.all(): + check_security_gate(product) + else: + check_security_gate(instance) else: user = get_current_user() if user: @@ -26,16 +73,37 @@ def product_post_save( # pylint: disable=unused-argument @receiver(post_save, sender=Branch) -def branch_post_save( # pylint: disable=unused-argument - sender, instance: Branch, created: bool, **kwargs +def branch_post_save( + sender: Any, instance: Branch, created: bool, **kwargs: Any # pylint: disable=unused-argument ) -> None: # sender is needed according to Django documentation - set_repository_default_branch(instance.product) + set_default_branch(instance, created) -@receiver(post_delete, sender=Branch) -def branch_post_delete( # pylint: disable=unused-argument - sender, instance: Branch, **kwargs +@receiver(post_save, sender=Settings) +def settings_post_save( # pylint: disable=unused-argument + sender: Any, instance: Settings, created: bool, **kwargs: Any ) -> None: - # sender is needed according to Django documentation - set_repository_default_branch(instance.product) + # parameters are needed according to Django documentation + if ( # pylint: disable=too-many-boolean-expressions + "security_gate_active" in instance.get_dirty_fields().keys() + or "security_gate_threshold_critical" in instance.get_dirty_fields().keys() + or "security_gate_threshold_high" in instance.get_dirty_fields().keys() + or "security_gate_threshold_medium" in instance.get_dirty_fields().keys() + or "security_gate_threshold_low" in instance.get_dirty_fields().keys() + or "security_gate_threshold_none" in instance.get_dirty_fields().keys() + or "security_gate_threshold_unknown" in instance.get_dirty_fields().keys() + ): + settings_post_save_task() + + +@db_task() +@lock_task("product_settings_post_save_task_lock") +def settings_post_save_task() -> None: + + logger.info("--- Settings post_save_task - start ---") + + for product in Product.objects.filter(is_product_group=False): + check_security_gate(product) + + logger.info("--- Settings post_save_task - finished ---") diff --git a/backend/application/core/tasks.py b/backend/application/core/tasks.py deleted file mode 100644 index b359f8edb..000000000 --- a/backend/application/core/tasks.py +++ /dev/null @@ -1,51 +0,0 @@ -import logging - -from huey import crontab -from huey.contrib.djhuey import db_periodic_task, lock_task - -from application.commons import settings_static -from application.commons.services.tasks import handle_task_exception -from application.core.services.housekeeping import ( - delete_inactive_branches_and_set_flags, -) -from application.core.services.risk_acceptance_expiry_task import ( - expire_risk_acceptances, -) - -logger = logging.getLogger("secobserve.core") - - -@db_periodic_task( - crontab( - minute=settings_static.branch_housekeeping_crontab_minute, - hour=settings_static.branch_housekeeping_crontab_hour, - ) -) -@lock_task("branch_housekeeping") -def task_branch_housekeeping() -> None: - logger.info("--- Branch_housekeeping - start ---") - - try: - delete_inactive_branches_and_set_flags() - except Exception as e: - handle_task_exception(e) - - logger.info("--- Branch_housekeeping - finished ---") - - -@db_periodic_task( - crontab( - minute=settings_static.risk_acceptance_expiry_crontab_minute, - hour=settings_static.risk_acceptance_expiry_crontab_hour, - ) -) -@lock_task("risk_acceptance_expiry") -def task_risk_acceptance_expiry() -> None: - logger.info("--- Expire risk acceptances - start ---") - - try: - expire_risk_acceptances() - except Exception as e: - handle_task_exception(e) - - logger.info("--- Expire risk acceptances - finished ---") diff --git a/backend/application/core/types.py b/backend/application/core/types.py index 619bb27e1..c4b8d9ea4 100644 --- a/backend/application/core/types.py +++ b/backend/application/core/types.py @@ -27,6 +27,7 @@ class Severity: class Status: STATUS_OPEN = "Open" + STATUS_AFFECTED = "Affected" STATUS_RESOLVED = "Resolved" STATUS_DUPLICATE = "Duplicate" STATUS_FALSE_POSITIVE = "False positive" @@ -37,6 +38,7 @@ class Status: STATUS_CHOICES = [ (STATUS_OPEN, STATUS_OPEN), + (STATUS_AFFECTED, STATUS_AFFECTED), (STATUS_RESOLVED, STATUS_RESOLVED), (STATUS_DUPLICATE, STATUS_DUPLICATE), (STATUS_FALSE_POSITIVE, STATUS_FALSE_POSITIVE), @@ -46,6 +48,8 @@ class Status: (STATUS_RISK_ACCEPTED, STATUS_RISK_ACCEPTED), ] + STATUS_ACTIVE = [STATUS_OPEN, STATUS_AFFECTED, STATUS_IN_REVIEW] + class Assessment_Status: ASSESSMENT_STATUS_APPROVED = "Approved" @@ -68,27 +72,75 @@ class Assessment_Status: ] -class VexJustification: - STATUS_COMPONENT_NOT_PRESENT = "component_not_present" - STATUS_VULNERABLE_CODE_NOT_PRESENT = "vulnerable_code_not_present" - STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY = ( +class VEX_Justification: + JUSTIFICATION_COMPONENT_NOT_PRESENT = "component_not_present" + JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT = "vulnerable_code_not_present" + JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY = ( "vulnerable_code_cannot_be_controlled_by_adversary" ) - STATUS_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH = "vulnerable_code_not_in_execute_path" - STATUS_INLINE_MITIGATIONS_ALREADY_EXIST = "inline_mitigations_already_exist" + JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH = "vulnerable_code_not_in_execute_path" + JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST = "inline_mitigations_already_exist" + JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT = "code_not_present" + JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE = "code_not_reachable" + JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION = "requires_configuration" + JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY = "requires_dependency" + JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT = "requires_environment" + JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER = "protected_by_compiler" + JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME = "protected_at_runtime" + JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER = "protected_at_perimeter" + JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL = "protected_by_mitigating_control" VEX_JUSTIFICATION_CHOICES = [ - (STATUS_COMPONENT_NOT_PRESENT, "Component not present"), - (STATUS_VULNERABLE_CODE_NOT_PRESENT, "Vulnerable code not present"), + (JUSTIFICATION_COMPONENT_NOT_PRESENT, JUSTIFICATION_COMPONENT_NOT_PRESENT), + (JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT, JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT), + ( + JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + ), + ( + JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, + JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, + ), + ( + JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST, + JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST, + ), + ( + JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT, + JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT, + ), + ( + JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE, + JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE, + ), + ( + JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION, + JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION, + ), ( - STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, - "Vulnerable code cannot be controlled by adversary", + JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY, + JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY, ), ( - STATUS_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, - "Vulnerable code not in execute path", + JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT, + JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT, + ), + ( + JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER, + JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER, + ), + ( + JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME, + JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME, + ), + ( + JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER, + JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER, + ), + ( + JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL, + JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL, ), - (STATUS_INLINE_MITIGATIONS_ALREADY_EXIST, "Inline mitigations already exist"), ] @@ -127,3 +179,33 @@ class PURL_Type: "swid": "SWID", "swift": "Swift", } + + +class OSVLinuxDistribution: + DISTRIBUTION_ALMALINUX = "AlmaLinux" + DISTRIBUTION_ALPINE = "Alpine" + DISTRIBUTION_CHAINGUARD = "Chainguard" + DISTRIBUTION_DEBIAN = "Debian" + DISTRIBUTION_MAGEIA = "Mageia" + DISTRIBUTION_OPENSUSE = "openSUSE" + DISTRIBUTION_PHOTON_OS = "Photon OS" + DISTRIBUTION_REDHAT = "Red Hat" + DISTRIBUTION_ROCKY_LINUX = "Rocky Linux" + DISTRIBUTION_SUSE = "SUSE" + DISTRIBUTION_UBUNTU = "Ubuntu" + DISTRIBUTION_WOLFI = "Wolfi" + + OSV_LINUX_DISTRIBUTION_CHOICES = [ + (DISTRIBUTION_ALMALINUX, DISTRIBUTION_ALMALINUX), + (DISTRIBUTION_ALPINE, DISTRIBUTION_ALPINE), + (DISTRIBUTION_CHAINGUARD, DISTRIBUTION_CHAINGUARD), + (DISTRIBUTION_DEBIAN, DISTRIBUTION_DEBIAN), + (DISTRIBUTION_MAGEIA, DISTRIBUTION_MAGEIA), + (DISTRIBUTION_OPENSUSE, DISTRIBUTION_OPENSUSE), + (DISTRIBUTION_PHOTON_OS, DISTRIBUTION_PHOTON_OS), + (DISTRIBUTION_REDHAT, DISTRIBUTION_REDHAT), + (DISTRIBUTION_ROCKY_LINUX, DISTRIBUTION_ROCKY_LINUX), + (DISTRIBUTION_SUSE, DISTRIBUTION_SUSE), + (DISTRIBUTION_UBUNTU, DISTRIBUTION_UBUNTU), + (DISTRIBUTION_WOLFI, DISTRIBUTION_WOLFI), + ] diff --git a/backend/application/epss/apps.py b/backend/application/epss/apps.py new file mode 100644 index 000000000..5ac0234ce --- /dev/null +++ b/backend/application/epss/apps.py @@ -0,0 +1,14 @@ +from django.apps import AppConfig + + +class EPSSConfig(AppConfig): + name = "application.epss" + verbose_name = "EPSS" + + def ready(self) -> None: + try: + import application.epss.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import + except ImportError: + pass + + import config.schema # noqa: F401 pylint: disable=import-outside-toplevel, unused-import diff --git a/backend/application/epss/migrations/0003_exploit_information_alter_epss_score_cve.py b/backend/application/epss/migrations/0003_exploit_information_alter_epss_score_cve.py new file mode 100644 index 000000000..b92358123 --- /dev/null +++ b/backend/application/epss/migrations/0003_exploit_information_alter_epss_score_cve.py @@ -0,0 +1,40 @@ +# Generated by Django 5.1.6 on 2025-03-01 08:17 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("epss", "0002_rename_epss_scores_epss_score"), + ] + + operations = [ + migrations.CreateModel( + name="Exploit_Information", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("cve", models.CharField(max_length=255, unique=True)), + ("base_cvss_vector", models.CharField(blank=True, max_length=255)), + ("cisa_kev", models.BooleanField(default=False)), + ("vulncheck_kev", models.BooleanField(default=False)), + ("exploitdb", models.BooleanField(default=False)), + ("metasploit", models.BooleanField(default=False)), + ("nuclei", models.BooleanField(default=False)), + ("poc_github", models.BooleanField(default=False)), + ], + ), + migrations.AlterField( + model_name="epss_score", + name="cve", + field=models.CharField(max_length=255, unique=True), + ), + ] diff --git a/backend/application/epss/models.py b/backend/application/epss/models.py index 946a25525..57abf77f2 100644 --- a/backend/application/epss/models.py +++ b/backend/application/epss/models.py @@ -1,12 +1,13 @@ from datetime import date from decimal import Decimal +from typing import Any from django.core.validators import MaxValueValidator, MinValueValidator -from django.db.models import CharField, DateField, DecimalField, Model +from django.db.models import BooleanField, CharField, DateField, DecimalField, Model class EPSS_Score(Model): - cve = CharField(max_length=20, unique=True) + cve = CharField(max_length=255, unique=True) epss_score = DecimalField( max_digits=6, decimal_places=5, @@ -24,14 +25,25 @@ class EPSS_Score(Model): class EPSS_Status(Model): score_date = DateField(default=date.today) - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: self.pk = 1 super().save(*args, **kwargs) - def delete(self, *args, **kwargs): - pass + def delete(self, *args: Any, **kwargs: Any) -> tuple[int, dict[str, int]]: + return 0, {} @classmethod def load(cls) -> "EPSS_Status": obj, _ = cls.objects.get_or_create(pk=1) return obj + + +class Exploit_Information(Model): + cve = CharField(max_length=255, unique=True) + base_cvss_vector = CharField(max_length=255, blank=True) + cisa_kev = BooleanField(default=False) + vulncheck_kev = BooleanField(default=False) + exploitdb = BooleanField(default=False) + metasploit = BooleanField(default=False) + nuclei = BooleanField(default=False) + poc_github = BooleanField(default=False) diff --git a/backend/application/epss/queries/__init__.py b/backend/application/epss/queries/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/epss/queries/exploit_information.py b/backend/application/epss/queries/exploit_information.py new file mode 100644 index 000000000..f73f1c5ea --- /dev/null +++ b/backend/application/epss/queries/exploit_information.py @@ -0,0 +1,10 @@ +from typing import Optional + +from application.epss.models import Exploit_Information + + +def get_exploit_information_by_cve(cve: str) -> Optional[Exploit_Information]: + try: + return Exploit_Information.objects.get(cve=cve) + except Exploit_Information.DoesNotExist: + return None diff --git a/backend/application/epss/services/cvss_bt.py b/backend/application/epss/services/cvss_bt.py new file mode 100644 index 000000000..49f739a32 --- /dev/null +++ b/backend/application/epss/services/cvss_bt.py @@ -0,0 +1,234 @@ +import csv +import logging +from typing import Optional + +import requests +from cvss import CVSS3, CVSS4 +from cvss.exceptions import CVSSError +from django.core.paginator import Paginator +from django.utils import timezone + +from application.commons.models import Settings +from application.core.models import Observation +from application.core.services.observation import get_current_severity +from application.core.types import Severity, Status +from application.epss.models import Exploit_Information +from application.epss.queries.exploit_information import get_exploit_information_by_cve + +logger = logging.getLogger("secobserve.epss") + + +def import_cvss_bt() -> str: + response = requests.get( # nosec B113 + # This is a false positive, there is a timeout of 5 minutes + "https://raw.githubusercontent.com/t0sche/cvss-bt/refs/heads/main/cvss-bt.csv", + timeout=5 * 60, + stream=True, + ) + response.raise_for_status() + + line = (line.decode("utf-8") for line in response.iter_lines()) + reader = csv.reader(line, delimiter=",", quotechar='"') + + first_row = True + counter = 0 + exploit_information_list = [] + num_exploit_informations = 0 + + settings = Settings.load() + + for row in reader: + if first_row: + if not _check_first_row(row): + return "Error: First row of cvss-bt CSV is not valid." + + Exploit_Information.objects.all().delete() + + first_row = False + continue + + if len(row) != 17: + logger.warning("Row doesn't have 17 elements: %s", row) + continue + + cve = row[0] + cve_year = _get_year_from_cve(cve) + if cve_year is None: + continue + current_year = timezone.now().year + if cve_year <= current_year - settings.exploit_information_max_age_years: + continue + + exploit_information = Exploit_Information( + cve=cve, + base_cvss_vector=row[7], + cisa_kev=row[11].lower() == "true", + vulncheck_kev=row[12].lower() == "true", + exploitdb=row[13].lower() == "true", + metasploit=row[14].lower() == "true", + nuclei=row[15].lower() == "true", + poc_github=row[16].lower() == "true", + ) + num_exploit_informations += 1 + exploit_information_list.append(exploit_information) + counter += 1 + if counter == 1000: + Exploit_Information.objects.bulk_create(exploit_information_list) + counter = 0 + exploit_information_list = [] + + if exploit_information_list: + Exploit_Information.objects.bulk_create(exploit_information_list) + + num_observations = apply_exploit_information_observations(settings) + + return ( + f"Imported {num_exploit_informations} exploit information entries.\n" + + f"Applied exploit information to {num_observations} observations." + ) + + +def _check_first_row(row: list[str]) -> bool: + if len(row) != 17: + logger.error("First row doesn't have 17 elements: %s", row) + return False + + errors = [] + if row[0] != "cve": + errors.append("First element of first row is not 'cve'") + if row[7] != "base_vector": + errors.append("Eigth element of first row is not 'base_vector'") + if row[11] != "cisa_kev": + errors.append("Twelfth element of first row is not 'cisa_kev'") + if row[12] != "vulncheck_kev": + errors.append("Thirteenth element of first row is not 'vulncheck_kev'") + if row[13] != "exploitdb": + errors.append("Fourteenth element of first row is not 'exploitdb'") + if row[14] != "metasploit": + errors.append("Fifteenth element of first row is not 'metasploit'") + if row[15] != "nuclei": + errors.append("Sixteenth element of first row is not 'nuclei'") + if row[16] != "poc_github": + errors.append("Seventeenth element of first row is not 'poc_github'") + + if errors: + logger.error("%s: %s", ", ".join(errors), row) + return False + + return True + + +def _get_year_from_cve(cve: str) -> Optional[int]: + if not cve.startswith("CVE-"): + return None + cve_parts = cve.split("-") + if len(cve_parts) != 3: + return None + cve_year = cve.split("-")[1] + if not cve_year.isdigit(): + return None + return int(cve_year) + + +def apply_exploit_information_observations(settings: Settings) -> int: + num_observations = 0 + + observations = ( + Observation.objects.filter(vulnerability_id__startswith="CVE-") + .exclude(current_status=Status.STATUS_RESOLVED) + .order_by("id") + ) + + paginator = Paginator(observations, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for observation in page.object_list: + if apply_exploit_information(observation, settings): + updates.append(observation) + num_observations += 1 + + Observation.objects.bulk_update( + updates, + [ + "cvss3_score", + "cvss3_vector", + "cvss4_score", + "cvss4_vector", + "cve_found_in", + "current_severity", + ], + ) + + return num_observations + + +def apply_exploit_information(observation: Observation, settings: Settings) -> bool: + if not observation.vulnerability_id.startswith("CVE-"): + return False + + if settings.feature_exploit_information: # pylint: disable=no-else-return + # else shall stay for clarity and just in case a return would be forgotten + exploit_information = get_exploit_information_by_cve(observation.vulnerability_id) + if not exploit_information: + return False + + cvss3_vector_before = observation.cvss3_vector + cvss4_vector_before = observation.cvss4_vector + cve_found_in_before = observation.cve_found_in + + if not observation.cvss3_vector and exploit_information.base_cvss_vector.startswith("CVSS:3"): + try: + cvss = CVSS3(exploit_information.base_cvss_vector) + observation.cvss3_vector = exploit_information.base_cvss_vector + observation.cvss3_score = cvss.base_score + except CVSSError: + pass + + if not observation.cvss4_vector and exploit_information.base_cvss_vector.startswith("CVSS:4"): + try: + cvss = CVSS4(exploit_information.base_cvss_vector) + observation.cvss4_vector = exploit_information.base_cvss_vector + observation.cvss4_score = cvss.base_score + except CVSSError: + pass + + _add_cve_found_in(observation, exploit_information) + + if ( + observation.cvss3_vector != cvss3_vector_before # pylint: disable=too-many-boolean-expressions + or observation.cvss4_vector != cvss4_vector_before + or observation.cve_found_in != cve_found_in_before + or ( + (observation.cvss3_vector or observation.cvss4_vector) + and observation.current_severity == Severity.SEVERITY_UNKNOWN + ) + ): + observation.current_severity = get_current_severity(observation) + return True + + return False + else: + if observation.cve_found_in: + observation.cve_found_in = "" + return True + + return False + + +def _add_cve_found_in(observation: Observation, exploit_information: Exploit_Information) -> None: + cve_found_in = [] + if exploit_information.cisa_kev: + cve_found_in.append("CISA KEV") + if exploit_information.exploitdb: + cve_found_in.append("Exploit-DB") + if exploit_information.metasploit: + cve_found_in.append("Metasploit") + if exploit_information.nuclei: + cve_found_in.append("Nuclei") + if exploit_information.poc_github: + cve_found_in.append("PoC GitHub") + if exploit_information.vulncheck_kev: + cve_found_in.append("VulnCheck KEV") + observation.cve_found_in = ", ".join(cve_found_in) diff --git a/backend/application/epss/services/epss.py b/backend/application/epss/services/epss.py index 452ff2a1b..84e49e0b7 100644 --- a/backend/application/epss/services/epss.py +++ b/backend/application/epss/services/epss.py @@ -10,7 +10,7 @@ from application.epss.models import EPSS_Score, EPSS_Status -def import_epss() -> None: +def import_epss() -> str: response = requests.get( "https://epss.cyentia.com/epss_scores-current.csv.gz", timeout=60, @@ -23,6 +23,7 @@ def import_epss() -> None: counter = 0 scores = [] + num_epss_scores = 0 for line in extracted_data.split(b"\n"): decoded_line = line.decode() @@ -30,9 +31,7 @@ def import_epss() -> None: epss_date = re.search(r"(\d{4}-\d{2}-\d{2})", decoded_line) if epss_date: epss_status = EPSS_Status.load() - epss_status.score_date = datetime.strptime( - epss_date.group(0), "%Y-%m-%d" - ) + epss_status.score_date = datetime.strptime(epss_date.group(0), "%Y-%m-%d") epss_status.save() if decoded_line.startswith("CVE"): @@ -45,6 +44,7 @@ def import_epss() -> None: epss_percentile=elements[2], ) ) + num_epss_scores += 1 counter += 1 if counter == 1000: EPSS_Score.objects.bulk_create(scores) @@ -53,8 +53,12 @@ def import_epss() -> None: if scores: EPSS_Score.objects.bulk_create(scores) + return f"Imported {num_epss_scores} EPSS scores." + + +def epss_apply_observations() -> str: + num_observations = 0 -def epss_apply_observations() -> None: observations = ( Observation.objects.filter(vulnerability_id__startswith="CVE-") .exclude(current_status=Status.STATUS_RESOLVED) @@ -68,38 +72,27 @@ def epss_apply_observations() -> None: updates = [] for observation in page.object_list: - if _epss_apply_score(observation): + if apply_epss(observation): updates.append(observation) + num_observations += 1 Observation.objects.bulk_update(updates, ["epss_score", "epss_percentile"]) + return f"Applied EPSS scores to {num_observations} observations." -def _epss_apply_score(observation: Observation) -> bool: + +def apply_epss(observation: Observation) -> bool: if observation.vulnerability_id.startswith("CVE-"): try: epss_score = EPSS_Score.objects.get(cve=observation.vulnerability_id) except EPSS_Score.DoesNotExist: return False - new_epss_score = ( - round(epss_score.epss_score * 100, 3) if epss_score.epss_score else None - ) - new_epss_percentile = ( - round(epss_score.epss_percentile * 100, 3) - if epss_score.epss_percentile - else None - ) - if ( - observation.epss_score != new_epss_score - or observation.epss_percentile != new_epss_percentile - ): + new_epss_score = round(epss_score.epss_score * 100, 3) if epss_score.epss_score else None + new_epss_percentile = round(epss_score.epss_percentile * 100, 3) if epss_score.epss_percentile else None + if observation.epss_score != new_epss_score or observation.epss_percentile != new_epss_percentile: observation.epss_score = new_epss_score observation.epss_percentile = new_epss_percentile return True return False - - -def epss_apply_observation(observation: Observation) -> None: - if _epss_apply_score(observation): - observation.save() diff --git a/backend/application/epss/signals.py b/backend/application/epss/signals.py new file mode 100644 index 000000000..32eb2689c --- /dev/null +++ b/backend/application/epss/signals.py @@ -0,0 +1,40 @@ +import logging +from typing import Any + +from django.db.models.signals import post_save +from django.dispatch import receiver +from huey.contrib.djhuey import db_task, lock_task + +from application.commons.models import Settings +from application.epss.models import Exploit_Information +from application.epss.services.cvss_bt import ( + apply_exploit_information_observations, + import_cvss_bt, +) + +logger = logging.getLogger("secobserve.epss") + + +@receiver(post_save, sender=Settings) +def settings_post_save( # pylint: disable=unused-argument + sender: Any, instance: Settings, created: bool, **kwargs: Any +) -> None: + # parameters are needed according to Django documentation + settings_post_save_task(instance, created) + + +@db_task() +@lock_task("epss_settings_post_save_task_lock") +def settings_post_save_task(settings: Settings, created: bool) -> None: + + logger.info("--- Settings post_save_task - start ---") + + if not created: + if "feature_exploit_information" in settings.get_dirty_fields().keys(): + if settings.feature_exploit_information and not Exploit_Information.objects.exists(): + import_cvss_bt() + if not settings.feature_exploit_information and Exploit_Information.objects.exists(): + Exploit_Information.objects.all().delete() + apply_exploit_information_observations(settings) + + logger.info("--- Settings post_save_task - finished ---") diff --git a/backend/application/epss/tasks.py b/backend/application/epss/tasks.py deleted file mode 100644 index 77b25faad..000000000 --- a/backend/application/epss/tasks.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -from huey import crontab -from huey.contrib.djhuey import db_periodic_task, lock_task - -from application.commons import settings_static -from application.commons.services.tasks import handle_task_exception -from application.epss.services.epss import epss_apply_observations, import_epss - -logger = logging.getLogger("secobserve.epss") - - -@db_periodic_task( - crontab( - minute=settings_static.background_epss_import_crontab_minute, - hour=settings_static.background_epss_import_crontab_hour, - ) -) -@lock_task("import_epss") -def task_import_epss() -> None: - logger.info("--- Import_EPSS - start ---") - - try: - import_epss() - epss_apply_observations() - except Exception as e: - handle_task_exception(e) - - logger.info("--- Import_EPSS - finished ---") diff --git a/backend/application/import_observations/api/filters.py b/backend/application/import_observations/api/filters.py index 173faddbd..6120b499e 100644 --- a/backend/application/import_observations/api/filters.py +++ b/backend/application/import_observations/api/filters.py @@ -26,15 +26,14 @@ class Meta: class VulnerabilityCheckFilter(FilterSet): scanner = CharFilter(field_name="scanner", lookup_expr="icontains") filename = CharFilter(field_name="filename", lookup_expr="icontains") - api_configuration_name = CharFilter( - field_name="api_configuration_name", lookup_expr="icontains" - ) + api_configuration_name = CharFilter(field_name="api_configuration_name", lookup_expr="icontains") ordering = OrderingFilter( # tuple-mapping retains order fields=( ("product", "product"), ("branch", "branch_name"), + ("service", "service_name"), ("scanner", "scanner_name"), ("filename", "filename"), ("api_configuration_name", "api_configuration_name"), @@ -47,7 +46,7 @@ class VulnerabilityCheckFilter(FilterSet): class Meta: model = Vulnerability_Check - fields = ["product", "branch", "scanner", "filename", "api_configuration_name"] + fields = ["product", "branch", "service", "scanner", "filename", "api_configuration_name"] class ParserFilter(FilterSet): diff --git a/backend/application/import_observations/api/permissions.py b/backend/application/import_observations/api/permissions.py index 4f357857f..bc7a86094 100644 --- a/backend/application/import_observations/api/permissions.py +++ b/backend/application/import_observations/api/permissions.py @@ -1,20 +1,22 @@ +from typing import Any + from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView -from application.access_control.api.permissions_base import ( +from application.authorization.api.permissions_base import ( check_object_permission, check_post_permission, ) -from application.access_control.services.roles_permissions import Permissions +from application.authorization.services.roles_permissions import Permissions from application.core.models import Product class UserHasApiConfigurationPermission(BasePermission): - def has_permission(self, request, view): - return check_post_permission( - request, Product, "product", Permissions.Api_Configuration_Create - ) + def has_permission(self, request: Request, view: APIView) -> bool: + return check_post_permission(request, Product, "product", Permissions.Api_Configuration_Create) - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, @@ -25,7 +27,7 @@ def has_object_permission(self, request, view, obj): class UserHasVulnerabilityCheckPermission(BasePermission): - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: return check_object_permission( request=request, object_to_check=obj, diff --git a/backend/application/import_observations/api/serializers.py b/backend/application/import_observations/api/serializers.py index f33ad0f19..585bd526c 100644 --- a/backend/application/import_observations/api/serializers.py +++ b/backend/application/import_observations/api/serializers.py @@ -10,7 +10,7 @@ ValidationError, ) -from application.access_control.services.roles_permissions import Permissions +from application.authorization.services.roles_permissions import Permissions from application.core.api.serializers_product import NestedProductSerializer from application.core.models import Branch from application.import_observations.models import ( @@ -28,6 +28,7 @@ class FileUploadObservationsByIdRequestSerializer(Serializer): product = IntegerField(validators=[MinValueValidator(0)]) branch = IntegerField(validators=[MinValueValidator(0)], required=False) service = CharField(max_length=255, required=False) + service_id = IntegerField(validators=[MinValueValidator(0)], required=False) docker_image_name_tag = CharField(max_length=513, required=False) endpoint_url = CharField(max_length=2048, required=False) kubernetes_cluster = CharField(max_length=255, required=False) @@ -45,10 +46,26 @@ class FileUploadObservationsByNameRequestSerializer(Serializer): suppress_licenses = BooleanField(required=False) +class FileUploadSBOMByIdRequestSerializer(Serializer): + file = FileField(max_length=255) + product = IntegerField(validators=[MinValueValidator(0)]) + branch = IntegerField(validators=[MinValueValidator(0)], required=False) + service = CharField(max_length=255, required=False) + service_id = IntegerField(validators=[MinValueValidator(0)], required=False) + + +class FileUploadSBOMByNameRequestSerializer(Serializer): + file = FileField(max_length=255) + product_name = CharField(max_length=255) + branch_name = CharField(max_length=255, required=False) + service = CharField(max_length=255, required=False) + + class ApiImportObservationsByIdRequestSerializer(Serializer): api_configuration = IntegerField(validators=[MinValueValidator(0)]) branch = IntegerField(validators=[MinValueValidator(0)], required=False) service = CharField(max_length=255, required=False, allow_blank=True) + service_id = IntegerField(validators=[MinValueValidator(0)], required=False) docker_image_name_tag = CharField(max_length=513, required=False, allow_blank=True) endpoint_url = CharField(max_length=2048, required=False, allow_blank=True) kubernetes_cluster = CharField(max_length=255, required=False) @@ -63,7 +80,7 @@ class ApiImportObservationsByNameRequestSerializer(Serializer): kubernetes_cluster = CharField(max_length=255, required=False) -class ImportObservationsResponseSerializer(Serializer): +class FileImportObservationsResponseSerializer(Serializer): observations_new = IntegerField() observations_updated = IntegerField() observations_resolved = IntegerField() @@ -72,15 +89,27 @@ class ImportObservationsResponseSerializer(Serializer): license_components_deleted = IntegerField() +class FileImportSBOMResponseSerializer(Serializer): + license_components_new = IntegerField() + license_components_updated = IntegerField() + license_components_deleted = IntegerField() + + +class APIImportObservationsResponseSerializer(Serializer): + observations_new = IntegerField() + observations_updated = IntegerField() + observations_resolved = IntegerField() + + class ApiConfigurationSerializer(ModelSerializer): product_data = NestedProductSerializer(source="product", read_only=True) test_connection = BooleanField(write_only=True, required=False, default=False) class Meta: model = Api_Configuration - fields = "__all__" + exclude = ["automatic_import_service_legacy"] - def to_representation(self, instance): + def to_representation(self, instance: Api_Configuration) -> dict: # Only users who can edit an API Configuration are allowed to see the API key data = super().to_representation(instance) @@ -90,7 +119,7 @@ def to_representation(self, instance): return data - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: Api_Configuration if attrs.pop("test_connection", False): if self.instance is not None: @@ -101,15 +130,9 @@ def validate(self, attrs: dict): project_key = attrs.get("project_key", self.instance.project_key) api_key = attrs.get("api_key", self.instance.api_key) query = attrs.get("query", self.instance.query) - basic_auth_enabled = attrs.get( - "basic_auth_enabled", self.instance.basic_auth_enabled - ) - basic_auth_username = attrs.get( - "basic_auth_username", self.instance.basic_auth_username - ) - basic_auth_password = attrs.get( - "basic_auth_password", self.instance.basic_auth_password - ) + basic_auth_enabled = attrs.get("basic_auth_enabled", self.instance.basic_auth_enabled) + basic_auth_username = attrs.get("basic_auth_username", self.instance.basic_auth_username) + basic_auth_password = attrs.get("basic_auth_password", self.instance.basic_auth_password) verify_ssl = attrs.get("verify_ssl", self.instance.verify_ssl) else: product = attrs.get("product") @@ -142,29 +165,22 @@ def validate(self, attrs: dict): raise ValidationError("\n".join(errors)) data_product = attrs.get("product") - if ( - self.instance is not None - and data_product - and data_product != self.instance.product - ): + if self.instance is not None and data_product and data_product != self.instance.product: raise ValidationError("Product cannot be changed") return attrs def validate_automatic_import_branch(self, branch: Branch) -> Branch: - product_id = ( - self.instance.product.pk if self.instance else self.initial_data["product"] - ) + product_id = self.instance.product.pk if self.instance else self.initial_data["product"] if branch and branch.product.pk != product_id: - raise ValidationError( - "Branch does not belong to the same product as the API Configuration" - ) + raise ValidationError("Branch does not belong to the same product as the API Configuration") return branch class VulnerabilityCheckSerializer(ModelSerializer): branch_name = SerializerMethodField() + service_name = SerializerMethodField() scanner_name = SerializerMethodField() def get_branch_name(self, vulnerability_check: Vulnerability_Check) -> str: @@ -173,6 +189,12 @@ def get_branch_name(self, vulnerability_check: Vulnerability_Check) -> str: return vulnerability_check.branch.name + def get_service_name(self, vulnerability_check: Vulnerability_Check) -> str: + if not vulnerability_check.service: + return "" + + return vulnerability_check.service.name + def get_scanner_name(self, vulnerability_check: Vulnerability_Check) -> str: if not vulnerability_check.scanner: return "" diff --git a/backend/application/import_observations/api/views.py b/backend/application/import_observations/api/views.py index 3ad1afaa8..423b05b20 100644 --- a/backend/application/import_observations/api/views.py +++ b/backend/application/import_observations/api/views.py @@ -1,20 +1,28 @@ +from typing import Optional + +from django.db.models import QuerySet from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.utils import extend_schema from rest_framework import status +from rest_framework.decorators import action from rest_framework.exceptions import ValidationError from rest_framework.filters import SearchFilter from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.parsers import MultiPartParser from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response +from rest_framework.serializers import Serializer +from rest_framework.status import HTTP_404_NOT_FOUND from rest_framework.views import APIView from rest_framework.viewsets import GenericViewSet, ModelViewSet -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions -from application.core.models import Branch +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions +from application.core.models import Branch, Product from application.core.queries.branch import get_branch_by_id, get_branch_by_name from application.core.queries.product import get_product_by_id, get_product_by_name +from application.core.queries.service import get_service_by_id from application.import_observations.api.filters import ( ApiConfigurationFilter, ParserFilter, @@ -28,9 +36,13 @@ ApiConfigurationSerializer, ApiImportObservationsByIdRequestSerializer, ApiImportObservationsByNameRequestSerializer, + APIImportObservationsResponseSerializer, + FileImportObservationsResponseSerializer, + FileImportSBOMResponseSerializer, FileUploadObservationsByIdRequestSerializer, FileUploadObservationsByNameRequestSerializer, - ImportObservationsResponseSerializer, + FileUploadSBOMByIdRequestSerializer, + FileUploadSBOMByNameRequestSerializer, ParserSerializer, VulnerabilityCheckSerializer, ) @@ -47,6 +59,10 @@ from application.import_observations.queries.vulnerability_check import ( get_vulnerability_checks, ) +from application.import_observations.scanners.osv_scanner import ( + scan_branch, + scan_product, +) from application.import_observations.services.import_observations import ( ApiImportParameters, FileUploadParameters, @@ -58,76 +74,38 @@ class ApiImportObservationsById(APIView): @extend_schema( request=ApiImportObservationsByIdRequestSerializer, - responses={status.HTTP_200_OK: ImportObservationsResponseSerializer}, + responses={status.HTTP_200_OK: APIImportObservationsResponseSerializer}, ) - def post(self, request): - request_serializer = ApiImportObservationsByIdRequestSerializer( - data=request.data - ) + def post(self, request: Request) -> Response: + request_serializer = ApiImportObservationsByIdRequestSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) - api_configuration_id = request_serializer.validated_data.get( - "api_configuration" - ) + api_configuration_id = request_serializer.validated_data.get("api_configuration") api_configuration = get_api_configuration_by_id(api_configuration_id) if not api_configuration: - raise ValidationError( - f"API Configuration {api_configuration} does not exist" - ) + raise ValidationError(f"API Configuration {api_configuration} does not exist") - user_has_permission_or_403( - api_configuration.product, Permissions.Product_Import_Observations - ) + user_has_permission_or_403(api_configuration.product, Permissions.Product_Import_Observations) branch = None branch_id = request_serializer.validated_data.get("branch") if branch_id: branch = get_branch_by_id(api_configuration.product, branch_id) if not branch: - raise ValidationError( - f"Branch {branch_id} does not exist for product {api_configuration.product}" - ) - - service = request_serializer.validated_data.get("service") - docker_image_name_tag = request_serializer.validated_data.get( - "docker_image_name_tag" - ) - endpoint_url = request_serializer.validated_data.get("endpoint_url") - kubernetes_cluster = request_serializer.validated_data.get("kubernetes_cluster") - - api_import_parameters = ApiImportParameters( - api_configuration=api_configuration, - branch=branch, - service=service, - docker_image_name_tag=docker_image_name_tag, - endpoint_url=endpoint_url, - kubernetes_cluster=kubernetes_cluster, - ) - - ( - observations_new, - observations_updated, - observations_resolved, - ) = api_import_observations(api_import_parameters) + raise ValidationError(f"Branch {branch_id} does not exist for product {api_configuration.product}") - response_data = { - "observations_new": observations_new, - "observations_updated": observations_updated, - "observations_resolved": observations_resolved, - } + response_data = _api_import_observations(request_serializer, api_configuration, branch) return Response(response_data) class ApiImportObservationsByName(APIView): @extend_schema( request=ApiImportObservationsByNameRequestSerializer, - responses={status.HTTP_200_OK: ImportObservationsResponseSerializer}, + responses={status.HTTP_200_OK: APIImportObservationsResponseSerializer}, ) - def post(self, request): - request_serializer = ApiImportObservationsByNameRequestSerializer( - data=request.data - ) + def post(self, request: Request) -> Response: + request_serializer = ApiImportObservationsByNameRequestSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -143,212 +121,251 @@ def post(self, request): if branch_name: branch = get_branch_by_name(product, branch_name) if not branch: - branch = Branch.objects.create(product=product, name=branch_name) - - api_configuration_name = request_serializer.validated_data.get( - "api_configuration_name" - ) - api_configuration = get_api_configuration_by_name( - product, api_configuration_name - ) + branch = Branch.objects.create( + product=product, name=branch_name, is_default_branch=product.repository_default_branch is None + ) + + api_configuration_name = request_serializer.validated_data.get("api_configuration_name") + api_configuration = get_api_configuration_by_name(product, api_configuration_name) if not api_configuration: raise ValidationError( f"API Configuration {api_configuration_name} does not exist for product {product.name}" ) - service = request_serializer.validated_data.get("service") - docker_image_name_tag = request_serializer.validated_data.get( - "docker_image_name_tag" - ) - endpoint_url = request_serializer.validated_data.get("endpoint_url") - kubernetes_cluster = request_serializer.validated_data.get("kubernetes_cluster") - - api_import_parameters = ApiImportParameters( - api_configuration=api_configuration, - branch=branch, - service=service, - docker_image_name_tag=docker_image_name_tag, - endpoint_url=endpoint_url, - kubernetes_cluster=kubernetes_cluster, - ) - ( - observations_new, - observations_updated, - observations_resolved, - ) = api_import_observations(api_import_parameters) - - response_data = { - "observations_new": observations_new, - "observations_updated": observations_updated, - "observations_resolved": observations_resolved, - } + response_data = _api_import_observations(request_serializer, api_configuration, branch) return Response(response_data) +def _api_import_observations( + request_serializer: Serializer, api_configuration: Api_Configuration, branch: Optional[Branch] +) -> dict[str, int]: + service_name = _get_service_name(api_configuration.product, request_serializer) + docker_image_name_tag = request_serializer.validated_data.get("docker_image_name_tag") + endpoint_url = request_serializer.validated_data.get("endpoint_url") + kubernetes_cluster = request_serializer.validated_data.get("kubernetes_cluster") + + api_import_parameters = ApiImportParameters( + api_configuration=api_configuration, + branch=branch, + service_name=service_name, + docker_image_name_tag=docker_image_name_tag, + endpoint_url=endpoint_url, + kubernetes_cluster=kubernetes_cluster, + ) + + ( + observations_new, + observations_updated, + observations_resolved, + ) = api_import_observations(api_import_parameters) + + response_data = { + "observations_new": observations_new, + "observations_updated": observations_updated, + "observations_resolved": observations_resolved, + } + + return response_data + + class FileUploadObservationsById(APIView): parser_classes = [MultiPartParser] @extend_schema( request=FileUploadObservationsByIdRequestSerializer, - responses={status.HTTP_200_OK: ImportObservationsResponseSerializer}, + responses={status.HTTP_200_OK: FileImportObservationsResponseSerializer}, ) - def post(self, request): # pylint: disable=too-many-locals - # not too much we can do about this - request_serializer = FileUploadObservationsByIdRequestSerializer( - data=request.data - ) - if not request_serializer.is_valid(): - raise ValidationError(request_serializer.errors) + def post(self, request: Request) -> Response: + request_serializer = FileUploadObservationsByIdRequestSerializer(data=request.data) + product, branch = _get_product_branch_by_id(request_serializer) + service_name = _get_service_name(product, request_serializer) + response_data = _file_upload_observations(request_serializer, product, branch, service_name) + return Response(response_data) - product_id = request_serializer.validated_data.get("product") - product = get_product_by_id(product_id) - if not product: - raise ValidationError(f"Product {product_id} does not exist") - user_has_permission_or_403(product, Permissions.Product_Import_Observations) +class FileUploadObservationsByName(APIView): + parser_classes = [MultiPartParser] - branch = None - branch_id = request_serializer.validated_data.get("branch") - if branch_id: - branch = get_branch_by_id(product, branch_id) - if not branch: - raise ValidationError( - f"Branch {branch_id} does not exist for product {product}" - ) + @extend_schema( + request=FileUploadObservationsByNameRequestSerializer, + responses={status.HTTP_200_OK: FileImportObservationsResponseSerializer}, + ) + def post(self, request: Request) -> Response: + request_serializer = FileUploadObservationsByNameRequestSerializer(data=request.data) + product, branch = _get_product_branch_by_name(request_serializer) + service_name = request_serializer.validated_data.get("service", "") + response_data = _file_upload_observations(request_serializer, product, branch, service_name) - file = request_serializer.validated_data.get("file") - service = request_serializer.validated_data.get("service") - docker_image_name_tag = request_serializer.validated_data.get( - "docker_image_name_tag" - ) - endpoint_url = request_serializer.validated_data.get("endpoint_url") - kubernetes_cluster = request_serializer.validated_data.get("kubernetes_cluster") - suppress_licenses = request_serializer.validated_data.get( - "suppress_licenses", False - ) - - file_upload_parameters = FileUploadParameters( - product=product, - branch=branch, - file=file, - service=service, - docker_image_name_tag=docker_image_name_tag, - endpoint_url=endpoint_url, - kubernetes_cluster=kubernetes_cluster, - suppress_licenses=suppress_licenses, - ) - - ( - observations_new, - observations_updated, - observations_resolved, - license_components_new, - license_components_updated, - license_components_deleted, - ) = file_upload_observations(file_upload_parameters) - - num_observations = ( - observations_new + observations_updated + observations_resolved - ) - num_license_components = ( - license_components_new - + license_components_updated - + license_components_deleted - ) - - response_data = {} - if num_observations > 0 or num_license_components == 0: - response_data["observations_new"] = observations_new - response_data["observations_updated"] = observations_updated - response_data["observations_resolved"] = observations_resolved - if num_license_components > 0: - response_data["license_components_new"] = license_components_new - response_data["license_components_updated"] = license_components_updated - response_data["license_components_deleted"] = license_components_deleted + return Response(response_data) + + +class FileUploadSBOMById(APIView): + parser_classes = [MultiPartParser] + @extend_schema( + request=FileUploadSBOMByIdRequestSerializer, + responses={status.HTTP_200_OK: FileImportSBOMResponseSerializer}, + ) + def post(self, request: Request) -> Response: + request_serializer = FileUploadSBOMByIdRequestSerializer(data=request.data) + product, branch = _get_product_branch_by_id(request_serializer) + service_name = _get_service_name(product, request_serializer) + response_data = _file_upload_sbom(request_serializer, product, branch, service_name) return Response(response_data) -class FileUploadObservationsByName(APIView): +class FileUploadSBOMByName(APIView): parser_classes = [MultiPartParser] @extend_schema( - request=FileUploadObservationsByNameRequestSerializer, - responses={status.HTTP_200_OK: ImportObservationsResponseSerializer}, + request=FileUploadSBOMByNameRequestSerializer, + responses={status.HTTP_200_OK: FileImportSBOMResponseSerializer}, ) - def post(self, request): # pylint: disable=too-many-locals - # not too much we can do about this - request_serializer = FileUploadObservationsByNameRequestSerializer( - data=request.data - ) - if not request_serializer.is_valid(): - raise ValidationError(request_serializer.errors) + def post(self, request: Request) -> Response: + request_serializer = FileUploadSBOMByNameRequestSerializer(data=request.data) + product, branch = _get_product_branch_by_name(request_serializer) + service_name = request_serializer.validated_data.get("service", "") + response_data = _file_upload_sbom(request_serializer, product, branch, service_name) + return Response(response_data) - product_name = request_serializer.validated_data.get("product_name") - product = get_product_by_name(product_name) - if not product: - raise ValidationError(f"Product {product_name} does not exist") - user_has_permission_or_403(product, Permissions.Product_Import_Observations) +def _get_product_branch_by_id(request_serializer: Serializer) -> tuple[Product, Optional[Branch]]: + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) - branch = None - branch_name = request_serializer.validated_data.get("branch_name") - if branch_name: - branch = get_branch_by_name(product, branch_name) - if not branch: - branch = Branch.objects.create(product=product, name=branch_name) - - file = request_serializer.validated_data.get("file") - service = request_serializer.validated_data.get("service") - docker_image_name_tag = request_serializer.validated_data.get( - "docker_image_name_tag" - ) - endpoint_url = request_serializer.validated_data.get("endpoint_url") - kubernetes_cluster = request_serializer.validated_data.get("kubernetes_cluster") - suppress_licenses = request_serializer.validated_data.get( - "suppress_licenses", False - ) - - file_upload_parameters = FileUploadParameters( - product=product, - branch=branch, - file=file, - service=service, - docker_image_name_tag=docker_image_name_tag, - endpoint_url=endpoint_url, - kubernetes_cluster=kubernetes_cluster, - suppress_licenses=suppress_licenses, - ) - - ( - observations_new, - observations_updated, - observations_resolved, - license_components_new, - license_components_updated, - license_components_deleted, - ) = file_upload_observations(file_upload_parameters) - - num_observations = ( - observations_new + observations_updated + observations_resolved - ) - num_license_components = ( - license_components_new - + license_components_updated - + license_components_deleted - ) - - response_data = {} - if num_observations > 0 or num_license_components == 0: - response_data["observations_new"] = observations_new - response_data["observations_updated"] = observations_updated - response_data["observations_resolved"] = observations_resolved - if num_license_components > 0: - response_data["license_components_new"] = license_components_new - response_data["license_components_updated"] = license_components_updated - response_data["license_components_deleted"] = license_components_deleted + product_id = request_serializer.validated_data.get("product") + product = get_product_by_id(product_id) + if not product: + raise ValidationError(f"Product {product_id} does not exist") + + user_has_permission_or_403(product, Permissions.Product_Import_Observations) + + branch = None + branch_id = request_serializer.validated_data.get("branch") + if branch_id: + branch = get_branch_by_id(product, branch_id) + if not branch: + raise ValidationError(f"Branch {branch_id} does not exist for product {product}") + return product, branch - return Response(response_data) + +def _get_product_branch_by_name(request_serializer: Serializer) -> tuple[Product, Optional[Branch]]: + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) + + product_name = request_serializer.validated_data.get("product_name") + product = get_product_by_name(product_name) + if not product: + raise ValidationError(f"Product {product_name} does not exist") + + user_has_permission_or_403(product, Permissions.Product_Import_Observations) + + branch = None + branch_name = request_serializer.validated_data.get("branch_name") + if branch_name: + branch = get_branch_by_name(product, branch_name) + if not branch: + branch = Branch.objects.create( + product=product, name=branch_name, is_default_branch=product.repository_default_branch is None + ) + return product, branch + + +def _get_service_name(product: Product, request_serializer: Serializer) -> str: + service_name = request_serializer.validated_data.get("service", "") + service_id = request_serializer.validated_data.get("service_id") + + if service_name and service_id: + raise ValidationError("Only one of the fields service and service_id may be set") + + if service_id: + service = get_service_by_id(product, service_id) + if not service: + raise ValidationError(f"Service {service_id} does not exist for product {product.name}") + service_name = service.name + + return service_name + + +def _file_upload_observations( + request_serializer: Serializer, product: Product, branch: Optional[Branch], service_name: str +) -> dict[str, int]: + file = request_serializer.validated_data.get("file") + docker_image_name_tag = request_serializer.validated_data.get("docker_image_name_tag") + endpoint_url = request_serializer.validated_data.get("endpoint_url") + kubernetes_cluster = request_serializer.validated_data.get("kubernetes_cluster") + suppress_licenses = request_serializer.validated_data.get("suppress_licenses", False) + + file_upload_parameters = FileUploadParameters( + product=product, + branch=branch, + file=file, + service_name=service_name, + docker_image_name_tag=docker_image_name_tag, + endpoint_url=endpoint_url, + kubernetes_cluster=kubernetes_cluster, + suppress_licenses=suppress_licenses, + sbom=False, + ) + + ( + observations_new, + observations_updated, + observations_resolved, + license_components_new, + license_components_updated, + license_components_deleted, + ) = file_upload_observations(file_upload_parameters) + + num_observations = observations_new + observations_updated + observations_resolved + num_license_components = license_components_new + license_components_updated + license_components_deleted + + response_data = {} + if num_observations > 0 or num_license_components == 0: + response_data["observations_new"] = observations_new + response_data["observations_updated"] = observations_updated + response_data["observations_resolved"] = observations_resolved + if num_license_components > 0: + response_data["license_components_new"] = license_components_new + response_data["license_components_updated"] = license_components_updated + response_data["license_components_deleted"] = license_components_deleted + return response_data + + +def _file_upload_sbom( + request_serializer: Serializer, product: Product, branch: Optional[Branch], service_name: str +) -> dict[str, int]: + file = request_serializer.validated_data.get("file") + + file_upload_parameters = FileUploadParameters( + product=product, + branch=branch, + file=file, + service_name=service_name, + docker_image_name_tag="", + endpoint_url="", + kubernetes_cluster="", + suppress_licenses=False, + sbom=True, + ) + + ( + _, + _, + _, + license_components_new, + license_components_updated, + license_components_deleted, + ) = file_upload_observations(file_upload_parameters) + + num_license_components = license_components_new + license_components_updated + license_components_deleted + + response_data = {} + if num_license_components > 0: + response_data["license_components_new"] = license_components_new + response_data["license_components_updated"] = license_components_updated + response_data["license_components_deleted"] = license_components_deleted + return response_data class ApiConfigurationViewSet(ModelViewSet): @@ -359,8 +376,8 @@ class ApiConfigurationViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): - return get_api_configurations() + def get_queryset(self) -> QuerySet[Api_Configuration]: + return get_api_configurations().select_related("product").select_related("product__product_group") class VulnerabilityCheckViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): @@ -370,8 +387,8 @@ class VulnerabilityCheckViewSet(GenericViewSet, ListModelMixin, RetrieveModelMix queryset = Vulnerability_Check.objects.none() filter_backends = [DjangoFilterBackend] - def get_queryset(self): - return get_vulnerability_checks() + def get_queryset(self) -> QuerySet[Vulnerability_Check]: + return get_vulnerability_checks().select_related("branch").select_related("service") class ParserViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): @@ -380,3 +397,57 @@ class ParserViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): queryset = Parser.objects.all() filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] + + +class ScanOSVProductView(APIView): + @extend_schema( + request=None, + responses={status.HTTP_200_OK: APIImportObservationsResponseSerializer}, + ) + @action(detail=True, methods=["post"]) + def post(self, request: Request, product_id: int) -> Response: + product = get_product_by_id(product_id) + if not product: + return Response(status=HTTP_404_NOT_FOUND) + + user_has_permission_or_403(product, Permissions.Product_Scan_OSV) + + if not product.osv_enabled: + raise ValidationError(f"OSV scan is not enabled for product {product.name}") + + observations_new, observations_updated, observations_resolved = scan_product(product) + response_data = { + "observations_new": observations_new, + "observations_updated": observations_updated, + "observations_resolved": observations_resolved, + } + return Response(response_data) + + +class ScanOSVBranchView(APIView): + @extend_schema( + request=None, + responses={status.HTTP_200_OK: APIImportObservationsResponseSerializer}, + ) + @action(detail=True, methods=["post"]) + def post(self, request: Request, product_id: int, branch_id: int) -> Response: + product = get_product_by_id(product_id) + if not product: + return Response(status=HTTP_404_NOT_FOUND) + + user_has_permission_or_403(product, Permissions.Product_Scan_OSV) + + if not product.osv_enabled: + raise ValidationError(f"OSV scan is not enabled for product {product.name}") + + branch = get_branch_by_id(product, branch_id) + if not branch: + return Response(status=HTTP_404_NOT_FOUND) + + observations_new, observations_updated, observations_resolved = scan_branch(branch) + response_data = { + "observations_new": observations_new, + "observations_updated": observations_updated, + "observations_resolved": observations_resolved, + } + return Response(response_data) diff --git a/backend/application/import_observations/apps.py b/backend/application/import_observations/apps.py index e589f1f83..a9d30da4b 100644 --- a/backend/application/import_observations/apps.py +++ b/backend/application/import_observations/apps.py @@ -5,6 +5,6 @@ class UploadObservationsConfig(AppConfig): name = "application.import_observations" verbose_name = "Upload observations" - def ready(self): + def ready(self) -> None: # This forces the schema extension for DRF to be loaded import config.schema # noqa F401 pylint: disable=import-outside-toplevel,unused-import diff --git a/backend/application/import_observations/exceptions.py b/backend/application/import_observations/exceptions.py index d64acee2c..d4f4bdfed 100644 --- a/backend/application/import_observations/exceptions.py +++ b/backend/application/import_observations/exceptions.py @@ -1,3 +1,3 @@ class ParserError(Exception): - def __init__(self, message): + def __init__(self, message: str) -> None: self.message = message diff --git a/backend/application/import_observations/management/commands/register_parsers.py b/backend/application/import_observations/management/commands/register_parsers.py index 882d8c0ee..7bc92b294 100644 --- a/backend/application/import_observations/management/commands/register_parsers.py +++ b/backend/application/import_observations/management/commands/register_parsers.py @@ -4,6 +4,7 @@ from importlib.util import find_spec from inspect import isclass from pathlib import Path +from typing import Any from django.core.management.base import BaseCommand, CommandError from django.db import connection @@ -23,7 +24,7 @@ class Command(BaseCommand): help = "Register parsers to import vulnerability scans." - def handle(self, *args, **options): + def handle(self, *args: Any, **options: Any) -> None: all_tables = connection.introspection.table_names() if "core_parser" in all_tables: # Create parser entry for manual observations @@ -38,29 +39,19 @@ def handle(self, *args, **options): def register_module(self, module_name: str) -> None: try: # Check if it is a Python module - if find_spec( - f"application.import_observations.parsers.{module_name}.parser" - ): + if find_spec(f"application.import_observations.parsers.{module_name}.parser"): _register_parser(module_name) except Exception as exc: print(exc) - raise CommandError( - format_log_message(message=f"Failed to load {module_name}") - ) from exc + raise CommandError(format_log_message(message=f"Failed to load {module_name}")) from exc def _register_parser(module_name: str) -> None: # Import the module and register the classname - module = import_module( # nosemgrep - f"application.import_observations.parsers.{module_name}.parser" - ) + module = import_module(f"application.import_observations.parsers.{module_name}.parser") # nosemgrep # nosemgrep because of rule python.lang.security.audit.non-literal-import.non-literal-import # This is the price you pay for a dynamic parser registry. We accept the risk. for attribute_name in dir(module): attribute = getattr(module, attribute_name) - if ( - isclass(attribute) - and issubclass(attribute, BaseParser) - and attribute is not BaseParser - ): + if isclass(attribute) and issubclass(attribute, BaseParser) and attribute is not BaseParser: register_parser(module_name, attribute_name) diff --git a/backend/application/import_observations/migrations/0001_initial.py b/backend/application/import_observations/migrations/0001_initial.py index ff3175d79..59fbc3f58 100644 --- a/backend/application/import_observations/migrations/0001_initial.py +++ b/backend/application/import_observations/migrations/0001_initial.py @@ -31,15 +31,11 @@ class Migration(migrations.Migration): ("api_key", encrypted_model_fields.fields.EncryptedCharField()), ( "parser", - models.ForeignKey( - on_delete=django.db.models.deletion.PROTECT, to="core.parser" - ), + models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="core.parser"), ), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ], options={ diff --git a/backend/application/import_observations/migrations/0002_vulnerability_check.py b/backend/application/import_observations/migrations/0002_vulnerability_check.py index 005aac55c..7e1fd9c66 100644 --- a/backend/application/import_observations/migrations/0002_vulnerability_check.py +++ b/backend/application/import_observations/migrations/0002_vulnerability_check.py @@ -64,21 +64,15 @@ class Migration(migrations.Migration): ), ( "branch", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.branch" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.branch"), ), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ], options={ - "unique_together": { - ("product", "branch", "filename", "api_configuration_name") - }, + "unique_together": {("product", "branch", "filename", "api_configuration_name")}, }, ), ] diff --git a/backend/application/import_observations/migrations/0003_alter_vulnerability_check_branch.py b/backend/application/import_observations/migrations/0003_alter_vulnerability_check_branch.py index 093875f16..71280aa2b 100644 --- a/backend/application/import_observations/migrations/0003_alter_vulnerability_check_branch.py +++ b/backend/application/import_observations/migrations/0003_alter_vulnerability_check_branch.py @@ -14,8 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="vulnerability_check", name="branch", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="core.branch" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="core.branch"), ), ] diff --git a/backend/application/import_observations/migrations/0006_parser_alter_api_configuration_parser.py b/backend/application/import_observations/migrations/0006_parser_alter_api_configuration_parser.py index 56d475ddb..e976cf344 100644 --- a/backend/application/import_observations/migrations/0006_parser_alter_api_configuration_parser.py +++ b/backend/application/import_observations/migrations/0006_parser_alter_api_configuration_parser.py @@ -68,9 +68,7 @@ class Migration(migrations.Migration): state_operations=[ migrations.AddIndex( model_name="parser", - index=models.Index( - fields=["name"], name="core_parser_name_d48b0a_idx" - ), + index=models.Index(fields=["name"], name="core_parser_name_d48b0a_idx"), ), ], database_operations=[], diff --git a/backend/application/import_observations/migrations/0007_api_configuration_automatic_import_branch_and_more.py b/backend/application/import_observations/migrations/0007_api_configuration_automatic_import_branch_and_more.py index f9e41d7c8..907f39b0c 100644 --- a/backend/application/import_observations/migrations/0007_api_configuration_automatic_import_branch_and_more.py +++ b/backend/application/import_observations/migrations/0007_api_configuration_automatic_import_branch_and_more.py @@ -15,9 +15,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="api_configuration", name="automatic_import_branch", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.PROTECT, to="core.branch" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to="core.branch"), ), migrations.AddField( model_name="api_configuration", diff --git a/backend/application/import_observations/migrations/0011_osv_cache_alter_parser_source.py b/backend/application/import_observations/migrations/0011_osv_cache_alter_parser_source.py new file mode 100644 index 000000000..f0af6d624 --- /dev/null +++ b/backend/application/import_observations/migrations/0011_osv_cache_alter_parser_source.py @@ -0,0 +1,47 @@ +# Generated by Django 5.1.5 on 2025-01-21 07:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ( + "import_observations", + "0010_vulnerability_check_last_import_licenses_deleted_and_more", + ), + ] + + operations = [ + migrations.CreateModel( + name="OSV_Cache", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("osv_id", models.CharField(max_length=255, unique=True)), + ("data", models.TextField()), + ("modified", models.DateTimeField()), + ], + ), + migrations.AlterField( + model_name="parser", + name="source", + field=models.CharField( + choices=[ + ("API", "API"), + ("File", "File"), + ("Manual", "Manual"), + ("Other", "Other"), + ("Unknown", "Unknown"), + ], + max_length=16, + ), + ), + ] diff --git a/backend/application/import_observations/migrations/0012_parser_sbom.py b/backend/application/import_observations/migrations/0012_parser_sbom.py new file mode 100644 index 000000000..98da61f72 --- /dev/null +++ b/backend/application/import_observations/migrations/0012_parser_sbom.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.7 on 2025-03-30 17:11 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("import_observations", "0011_osv_cache_alter_parser_source"), + ] + + operations = [ + migrations.AddField( + model_name="parser", + name="sbom", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/application/import_observations/migrations/0013_alter_vulnerability_check_unique_together_and_more.py b/backend/application/import_observations/migrations/0013_alter_vulnerability_check_unique_together_and_more.py new file mode 100644 index 000000000..0364d806c --- /dev/null +++ b/backend/application/import_observations/migrations/0013_alter_vulnerability_check_unique_together_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 5.2.4 on 2025-08-03 13:43 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0064_product_description_markdown"), + ("import_observations", "0012_parser_sbom"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="vulnerability_check", + unique_together=set(), + ), + migrations.AddField( + model_name="vulnerability_check", + name="service", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="core.service"), + ), + migrations.AlterUniqueTogether( + name="vulnerability_check", + unique_together={("product", "branch", "service", "filename", "api_configuration_name")}, + ), + ] diff --git a/backend/application/import_observations/migrations/0014_rename_automatic_import_service_api_configuration_automatic_import_service_name.py b/backend/application/import_observations/migrations/0014_rename_automatic_import_service_api_configuration_automatic_import_service_name.py new file mode 100644 index 000000000..0e10cf853 --- /dev/null +++ b/backend/application/import_observations/migrations/0014_rename_automatic_import_service_api_configuration_automatic_import_service_name.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.7 on 2025-10-08 09:31 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("import_observations", "0013_alter_vulnerability_check_unique_together_and_more"), + ] + + operations = [ + migrations.RenameField( + model_name="api_configuration", + old_name="automatic_import_service", + new_name="automatic_import_service_name", + ), + ] diff --git a/backend/application/import_observations/migrations/0015_api_configuration_automatic_import_service.py b/backend/application/import_observations/migrations/0015_api_configuration_automatic_import_service.py new file mode 100644 index 000000000..cd5cf28cf --- /dev/null +++ b/backend/application/import_observations/migrations/0015_api_configuration_automatic_import_service.py @@ -0,0 +1,25 @@ +# Generated by Django 5.2.7 on 2025-10-08 09:32 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0070_components_view_open_observations"), + ("import_observations", "0014_rename_automatic_import_service_api_configuration_automatic_import_service_name"), + ] + + operations = [ + migrations.RenameField( + model_name="api_configuration", + old_name="automatic_import_service_name", + new_name="automatic_import_service_legacy", + ), + migrations.AddField( + model_name="api_configuration", + name="automatic_import_service", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to="core.service"), + ), + ] diff --git a/backend/application/import_observations/migrations/0016_api_configuration_migrate_names.py b/backend/application/import_observations/migrations/0016_api_configuration_migrate_names.py new file mode 100644 index 000000000..8e29384e1 --- /dev/null +++ b/backend/application/import_observations/migrations/0016_api_configuration_migrate_names.py @@ -0,0 +1,52 @@ +import logging + +from django.core.paginator import Paginator +from django.db import migrations +from django.db.models import Q + +logger = logging.getLogger("secobserve.migration") + + +def migrate_service_names(apps, schema_editor): + API_Configuration = apps.get_model("import_observations", "API_Configuration") + Service = apps.get_model("core", "Service") + + api_configurations = API_Configuration.objects.exclude(automatic_import_service_legacy__exact="").order_by("id") + + paginator = Paginator(api_configurations, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for api_configuration in page.object_list: + service = Service.objects.filter( + product=api_configuration.product, name=api_configuration.automatic_import_service_legacy + ).first() + if service: + api_configuration.automatic_import_service = service + api_configuration.automatic_import_service_legacy = "" + updates.append(api_configuration) + + API_Configuration.objects.bulk_update( + updates, + [ + "automatic_import_service", + "automatic_import_service_legacy", + ], + ) + + +class Migration(migrations.Migration): + dependencies = [ + ( + "import_observations", + "0015_api_configuration_automatic_import_service", + ), + ] + + operations = [ + migrations.RunPython( + migrate_service_names, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/backend/application/import_observations/models.py b/backend/application/import_observations/models.py index b3dde7ab0..e34faf1e8 100644 --- a/backend/application/import_observations/models.py +++ b/backend/application/import_observations/models.py @@ -9,10 +9,11 @@ Index, IntegerField, Model, + TextField, ) from encrypted_model_fields.fields import EncryptedCharField -from application.core.models import Branch, Product +from application.core.models import Branch, Product, Service from application.import_observations.types import Parser_Source, Parser_Type @@ -20,6 +21,7 @@ class Parser(Model): name = CharField(max_length=255, unique=True) type = CharField(max_length=16, choices=Parser_Type.TYPE_CHOICES) source = CharField(max_length=16, choices=Parser_Source.SOURCE_CHOICES) + sbom = BooleanField(default=False) module_name = CharField(max_length=255, blank=True) class_name = CharField(max_length=255, blank=True) @@ -29,7 +31,7 @@ class Meta: Index(fields=["name"]), ] - def __str__(self): + def __str__(self) -> str: return self.name @@ -49,7 +51,8 @@ class Api_Configuration(Model): verify_ssl = BooleanField(default=False) automatic_import_enabled = BooleanField(default=False) automatic_import_branch = ForeignKey(Branch, on_delete=PROTECT, null=True) - automatic_import_service = CharField(max_length=255, blank=True) + automatic_import_service = ForeignKey(Service, on_delete=PROTECT, null=True) + automatic_import_service_legacy = CharField(max_length=255, blank=True) automatic_import_docker_image_name_tag = CharField(max_length=513, blank=True) automatic_import_endpoint_url = CharField(max_length=2048, blank=True) automatic_import_kubernetes_cluster = CharField(max_length=255, blank=True) @@ -60,41 +63,44 @@ class Meta: "name", ) - def __str__(self): + def __str__(self) -> str: return f"{self.product.name} / {self.name}" class Vulnerability_Check(Model): product = ForeignKey(Product, on_delete=CASCADE) branch = ForeignKey(Branch, on_delete=CASCADE, null=True) + service = ForeignKey(Service, on_delete=CASCADE, null=True) filename = CharField(max_length=255, blank=True) api_configuration_name = CharField(max_length=255, blank=True) scanner = CharField(max_length=255, blank=True) first_import = DateTimeField(auto_now_add=True) last_import = DateTimeField(auto_now=True) - last_import_observations_new = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) + last_import_observations_new = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) last_import_observations_updated = IntegerField( null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] ) last_import_observations_resolved = IntegerField( null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] ) - last_import_licenses_new = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) - last_import_licenses_updated = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) - last_import_licenses_deleted = IntegerField( - null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)] - ) + last_import_licenses_new = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) + last_import_licenses_updated = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) + last_import_licenses_deleted = IntegerField(null=True, validators=[MinValueValidator(0), MaxValueValidator(999999)]) class Meta: unique_together = ( "product", "branch", + "service", "filename", "api_configuration_name", ) + + +class OSV_Cache(Model): + osv_id = CharField(max_length=255, unique=True) + data = TextField() + modified = DateTimeField() + + def __str__(self) -> str: + return self.osv_id diff --git a/backend/application/import_observations/parsers/azure_defender/parser.py b/backend/application/import_observations/parsers/azure_defender/parser.py index 79763abaa..8ed68952b 100644 --- a/backend/application/import_observations/parsers/azure_defender/parser.py +++ b/backend/application/import_observations/parsers/azure_defender/parser.py @@ -1,7 +1,8 @@ import re from json import dumps +from typing import Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.import_observations.parsers.base_parser import ( BaseFileParser, BaseParser, @@ -27,7 +28,9 @@ def check_format(self, data: list[dict]) -> bool: return True return False - def get_observations(self, data: list[dict]) -> list[Observation]: + def get_observations( + self, data: list[dict], product: Product, branch: Optional[Branch] + ) -> tuple[list[Observation], str]: observations = [] for row in data: @@ -63,7 +66,7 @@ def get_observations(self, data: list[dict]) -> list[Observation]: observations.append(observation) - return observations + return observations, self.get_name() def format_markdown(self, string: str) -> str: string = self.replace_string_with_newlines(string, r"\.[A-Z]") diff --git a/backend/application/import_observations/parsers/base_parser.py b/backend/application/import_observations/parsers/base_parser.py index e87b121e6..06b760e91 100644 --- a/backend/application/import_observations/parsers/base_parser.py +++ b/backend/application/import_observations/parsers/base_parser.py @@ -1,6 +1,6 @@ from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.import_observations.models import Api_Configuration from application.licenses.models import License_Component @@ -14,14 +14,14 @@ def get_name(cls) -> str: def get_type(cls) -> str: raise NotImplementedError("get_type() must be overridden") - def get_observations(self, data: Any) -> list[Observation]: + def get_observations(self, data: Any, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: raise NotImplementedError("get_observations() must be overridden") def get_license_components( self, data: Any # pylint: disable=unused-argument - ) -> list[License_Component]: + ) -> tuple[list[License_Component], str]: # data is used in the child classes - return [] + return [], "" def get_int_or_none(self, value: Optional[str]) -> int | None: if value: @@ -34,9 +34,7 @@ def get_int_or_none(self, value: Optional[str]) -> int | None: class BaseAPIParser: - def check_connection( - self, api_configuration: Api_Configuration - ) -> tuple[bool, list[str], dict | list]: + def check_connection(self, api_configuration: Api_Configuration) -> tuple[bool, list[str], dict | list]: raise NotImplementedError("check_connection() must be overridden") @@ -45,5 +43,9 @@ class BaseFileParser: def get_filetype(cls) -> str: raise NotImplementedError("check_format() must be overridden") + @classmethod + def sbom(cls) -> bool: + return False + def check_format(self, data: Any) -> bool: raise NotImplementedError("check_format() must be overridden") diff --git a/backend/application/import_observations/parsers/cryptolyzer/parser.py b/backend/application/import_observations/parsers/cryptolyzer/parser.py index c3b876ee2..59ef5f208 100644 --- a/backend/application/import_observations/parsers/cryptolyzer/parser.py +++ b/backend/application/import_observations/parsers/cryptolyzer/parser.py @@ -1,7 +1,7 @@ from json import dumps from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, @@ -137,21 +137,17 @@ def check_format(self, data: Any) -> bool: return True return False - def get_observations(self, data: dict) -> list[Observation]: + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: observations = [] observation = self.check_weak_protocols(data) if observation: observations.append(observation) - observation = self.check_ciphers( - "tls1_2", "TLS 1.2", TLS12_RECOMMENDED_CIPHERS, data - ) + observation = self.check_ciphers("tls1_2", "TLS 1.2", TLS12_RECOMMENDED_CIPHERS, data) if observation: observations.append(observation) - observation = self.check_ciphers( - "tls1_3", "TLS 1.3", TLS13_RECOMMENDED_CIPHERS, data - ) + observation = self.check_ciphers("tls1_3", "TLS 1.3", TLS13_RECOMMENDED_CIPHERS, data) if observation: observations.append(observation) @@ -163,7 +159,7 @@ def get_observations(self, data: dict) -> list[Observation]: if observation: observations.append(observation) - return observations + return observations, self.get_name() def check_weak_protocols(self, data: dict) -> Optional[Observation]: endpoint_url = self.get_endpoint_url(data.get("versions", {}).get("target", {})) @@ -172,10 +168,7 @@ def check_weak_protocols(self, data: dict) -> Optional[Observation]: versions.remove("tls1_2") if "tls1_3" in versions: versions.remove("tls1_3") - description = ( - "**Weak protocols according to BSI recommendations:**\n* " - + "\n* ".join(versions) - ) + description = "**Weak protocols according to BSI recommendations:**\n* " + "\n* ".join(versions) if not versions: return None @@ -216,9 +209,8 @@ def check_ciphers( if unrecommended_cipher_suites: endpoint_url = self.get_endpoint_url(cipher.get("target", {})) - description = ( - "**Unrecommended cipher suites according to BSI recommendations:**\n* " - + "\n* ".join(unrecommended_cipher_suites) + description = "**Unrecommended cipher suites according to BSI recommendations:**\n* " + "\n* ".join( + unrecommended_cipher_suites ) observation = Observation( title="Unrecommended " + protocol_name + " cipher suites", @@ -254,9 +246,8 @@ def check_curves( return None endpoint_url = self.get_endpoint_url(curves.get("target", {})) - description = ( - "**Unrecommended elliptic curves according to BSI recommendations:**\n* " - + "\n* ".join(unrecommended_curves) + description = "**Unrecommended elliptic curves according to BSI recommendations:**\n* " + "\n* ".join( + unrecommended_curves ) observation = Observation( title="Unrecommended elliptic curves", @@ -283,19 +274,15 @@ def check_signature_algorithms( unrecommended_signature_algorithms = [] inner_signature_algorithms = signature_algorithms.get("sig_algos", {}) for inner_signature_algorithm in inner_signature_algorithms: - if ( - inner_signature_algorithm.lower() - not in RECOMMENDED_SIGNATURE_ALGORITHMS - ): + if inner_signature_algorithm.lower() not in RECOMMENDED_SIGNATURE_ALGORITHMS: unrecommended_signature_algorithms.append(inner_signature_algorithm) if not unrecommended_signature_algorithms: return None endpoint_url = self.get_endpoint_url(signature_algorithms.get("target", {})) - description = ( - "**Unrecommended signature algorithms according to BSI recommendations:**\n* " - + "\n* ".join(unrecommended_signature_algorithms) + description = "**Unrecommended signature algorithms according to BSI recommendations:**\n* " + "\n* ".join( + unrecommended_signature_algorithms ) observation = Observation( title="Unrecommended signature algorithms", diff --git a/backend/application/import_observations/parsers/cyclone_dx/dependencies.py b/backend/application/import_observations/parsers/cyclone_dx/dependencies.py index 768099621..0707fe3d6 100644 --- a/backend/application/import_observations/parsers/cyclone_dx/dependencies.py +++ b/backend/application/import_observations/parsers/cyclone_dx/dependencies.py @@ -1,200 +1,6 @@ -import logging from collections import defaultdict -from application.import_observations.parsers.cyclone_dx.types import Component, Metadata - -logger = logging.getLogger("secobserve.import_observations.cyclone_dx.dependencies") - - -def get_component_dependencies( - data: dict, - components: dict[str, Component], - component: Component, - metadata: Metadata, -) -> tuple[str, list[dict]]: - component_dependencies: list[dict[str, str | list[str]]] = [] - _filter_component_dependencies( - component.bom_ref, - data.get("dependencies", []), - component_dependencies, - ) - observation_component_dependencies = "" - translated_component_dependencies = [] - if component_dependencies: - translated_component_dependencies = _translate_component_dependencies( - component_dependencies, components - ) - - observation_component_dependencies = _generate_dependency_list_as_text( - _get_dependencies( - component.bom_ref, - component_dependencies, - components, - metadata, - ) - ) - - if len(observation_component_dependencies) > 32768: - observation_component_dependencies = ( - observation_component_dependencies[:32764] + " ..." - ) - - return observation_component_dependencies, translated_component_dependencies - - -def _filter_component_dependencies( - bom_ref: str, - dependencies: list[dict[str, str | list[str]]], - component_dependencies: list[dict[str, str | list[str]]], -) -> None: - for dependency in dependencies: - if dependency in component_dependencies: - continue - depends_on = dependency.get("dependsOn", []) - if bom_ref in depends_on: - component_dependencies.append(dependency) - _filter_component_dependencies( - str(dependency.get("ref")), dependencies, component_dependencies - ) - - -def _translate_component_dependencies( - component_dependencies: list[dict[str, str | list[str]]], - components: dict[str, Component], -) -> list[dict]: - translated_component_dependencies = [] - - for component_dependency in component_dependencies: - translated_component_dependency: dict[str, str | list[str]] = {} - - translated_component_dependency["ref"] = _translate_component( - str(component_dependency.get("ref")), components - ) - - translated_component_dependencies_inner: list[str] = [] - for dependency in component_dependency.get("dependsOn", []): - translated_component_dependencies_inner.append( - _translate_component(dependency, components) - ) - translated_component_dependencies_inner.sort() - translated_component_dependency["dependsOn"] = ( - translated_component_dependencies_inner - ) - - translated_component_dependencies.append(translated_component_dependency) - - return translated_component_dependencies - - -def _translate_component(bom_ref: str, components: dict[str, Component]) -> str: - component = components.get(bom_ref, None) - if not component: - logger.warning("Component with BOM ref %s not found", bom_ref) - return "" - - if component.version: - component_name_version = f"{component.name}:{component.version}" - else: - component_name_version = component.name - - return component_name_version - - -def _get_dependencies( - component_bom_ref: str, - component_dependencies: list[dict], - components: dict[str, Component], - metadata: Metadata, -) -> dict[str, set[str]]: - roots = _get_roots(component_dependencies) - - dependencies: list[str] = [] - try: - for root in roots: - recursive_dependencies = _get_dependencies_recursive( - root=root, - translated_initial_dependency=_translate_component(root, components), - initial_dependency=root, - component_bom_ref=component_bom_ref, - component_dependencies=component_dependencies, - components=components, - ) - if recursive_dependencies not in dependencies: - dependencies += recursive_dependencies - except RecursionError as e: - logger.warning( - "%s:%s -> %s", metadata.container_name, metadata.container_tag, str(e) - ) - return {} - - return_dependencies = [] - for dependency in dependencies: - if ( - dependency - and dependency.endswith(_translate_component(component_bom_ref, components)) - or dependency.startswith("Circular dependency for") - ): - return_dependencies.append(dependency) - - graph = _parse_mermaid_graph_content(sorted(return_dependencies)) - - return graph - - -def _get_dependencies_recursive( - *, - root: str, - translated_initial_dependency: str, - initial_dependency: str, - component_bom_ref: str, - component_dependencies: list[dict], - components: dict[str, Component], -) -> list[str]: - - dependencies = [] - for dependency in component_dependencies: - ref = dependency.get("ref") - if ref == root: - for dependant in dependency.get("dependsOn", []): - translated_dependant = _translate_component(dependant, components) - if dependant in initial_dependency: - return [f"Circular dependency for {translated_dependant}"] - - new_translated_dependency = ( - f"{translated_initial_dependency} --> {translated_dependant}" - ) - new_dependency = f"{initial_dependency} --> {dependant}" - if dependant == component_bom_ref: - dependencies.append(new_translated_dependency) - else: - new_dependencies = _get_dependencies_recursive( - root=dependant, - translated_initial_dependency=new_translated_dependency, - initial_dependency=new_dependency, - component_bom_ref=component_bom_ref, - component_dependencies=component_dependencies, - components=components, - ) - if new_dependencies not in dependencies: - dependencies += new_dependencies - - return dependencies - - -def _get_roots( - translated_component_dependencies: list[dict], -) -> list[str]: - roots = [] - for dependency in translated_component_dependencies: - ref = dependency.get("ref") - if not ref: - continue - if not any( - ref in d.get("dependsOn", []) for d in translated_component_dependencies - ): - roots.append(ref) - - return roots +# These functions are still needed for migration 0051_convert_origin_component_dependencies def _parse_mermaid_graph_content( diff --git a/backend/application/import_observations/parsers/cyclone_dx/parser.py b/backend/application/import_observations/parsers/cyclone_dx/parser.py index 5c464a1f5..7f7dd0041 100644 --- a/backend/application/import_observations/parsers/cyclone_dx/parser.py +++ b/backend/application/import_observations/parsers/cyclone_dx/parser.py @@ -1,24 +1,50 @@ +import logging +from dataclasses import dataclass from json import dumps from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, BaseParser, ) -from application.import_observations.parsers.cyclone_dx.dependencies import ( - get_component_dependencies, -) -from application.import_observations.parsers.cyclone_dx.types import Component, Metadata from application.import_observations.types import Parser_Filetype, Parser_Type from application.licenses.models import License_Component +logger = logging.getLogger("secobserve.import_observations.cyclone_dx.dependencies") + + +@dataclass +class Component: + bom_ref: str + name: str + version: str + type: str + purl: str + cpe: str + cyclonedx_bom_link: str + json: dict[str, str] + unsaved_declared_licenses: list[str] + unsaved_concluded_licenses: list[str] + + +@dataclass +class Metadata: + serial_number: str + version: int + scanner: str + container_name: str + container_tag: str + container_digest: str + file: str + class CycloneDXParser(BaseParser, BaseFileParser): - def __init__(self): - self.metadata = Metadata("", "", "", "", "") + def __init__(self) -> None: + self.metadata = Metadata("", 1, "", "", "", "", "") self.components: dict[str, Component] = {} + self.dependencies: dict[str, list[str]] = {} @classmethod def get_name(cls) -> str: @@ -32,49 +58,52 @@ def get_filetype(cls) -> str: def get_type(cls) -> str: return Parser_Type.TYPE_SCA + @classmethod + def sbom(cls) -> bool: + return True + def check_format(self, data: Any) -> bool: if isinstance(data, dict) and data.get("bomFormat") == "CycloneDX": return True return False - def get_observations(self, data: dict) -> list[Observation]: - self.components = self._get_components(data) + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: self.metadata = self._get_metadata(data) + self.components = self._get_components(data) + self.dependencies = self._get_dependencies(data) + observations = self._create_observations(data) - return observations + return observations, self.metadata.scanner - def get_license_components(self, data) -> list[License_Component]: + def get_license_components(self, data: dict) -> tuple[list[License_Component], str]: + self.metadata = self._get_metadata(data) if not self.components: self.components = self._get_components(data) - if not self.metadata: - self.metadata = self._get_metadata(data) + if not self.dependencies: + self.dependencies = self._get_dependencies(data) components = [] - licenses_exist = False for component in self.components.values(): - if component.unsaved_license: - licenses_exist = True - break - - if licenses_exist: - for component in self.components.values(): - observation_component_dependencies, _ = get_component_dependencies( - data, self.components, component, self.metadata - ) - model_component = License_Component( - component_name=component.name, - component_version=component.version, - component_purl=component.purl, - component_cpe=component.cpe, - component_dependencies=observation_component_dependencies, - ) - model_component.unsaved_license = component.unsaved_license - self._add_license_component_evidence(component, model_component) - components.append(model_component) - - return components + observation_component_dependencies = self._get_component_dependencies( + component.bom_ref, self.components, self.dependencies + ) + model_component = License_Component( + component_name=component.name, + component_version=component.version, + component_purl=component.purl, + component_cpe=component.cpe, + component_cyclonedx_bom_link=component.cyclonedx_bom_link, + component_dependencies=observation_component_dependencies, + ) + model_component.unsaved_declared_licenses = component.unsaved_declared_licenses + model_component.unsaved_concluded_licenses = component.unsaved_concluded_licenses + + self._add_license_component_evidence(component, model_component) + components.append(model_component) + + return components, self.metadata.scanner def _add_license_component_evidence( self, @@ -110,9 +139,7 @@ def _get_root_component_with_subs(self, data: dict) -> list[Component]: return self._get_sbom_component_with_subs(metadata_component) - def _get_sbom_component_with_subs( - self, component_data: dict[str, Any] - ) -> list[Component]: + def _get_sbom_component_with_subs(self, component_data: dict[str, Any]) -> list[Component]: components: list[Component] = [] component = self._get_component(component_data) if component: @@ -124,37 +151,54 @@ def _get_sbom_component_with_subs( return components def _get_component(self, component_data: dict[str, Any]) -> Optional[Component]: - if not component_data.get("bom-ref"): + + bom_ref = component_data.get("bom-ref") + if not bom_ref: return None - cyclonedx_licenses = [] + declared_licenses: list[str] = [] + concluded_licenses: list[str] = [] licenses = component_data.get("licenses", []) if licenses and licenses[0].get("expression"): - cyclonedx_licenses.append(licenses[0].get("expression")) + acknowledgement = licenses[0].get("acknowledgement") + if acknowledgement == "concluded": + concluded_licenses.append(licenses[0].get("expression")) + else: + declared_licenses.append(licenses[0].get("expression")) else: for my_license in licenses: component_license = my_license.get("license", {}).get("id") - if component_license and component_license not in cyclonedx_licenses: - cyclonedx_licenses.append(component_license) - - component_license = my_license.get("license", {}).get("name") - if component_license and component_license not in cyclonedx_licenses: - cyclonedx_licenses.append(component_license) + if not component_license: + component_license = my_license.get("license", {}).get("name") + if not component_license: + continue + + acknowledgement = my_license.get("license", {}).get("acknowledgement") + if acknowledgement == "concluded": + if component_license and component_license not in concluded_licenses: + concluded_licenses.append(component_license) + else: + if component_license and component_license not in declared_licenses: + declared_licenses.append(component_license) + + bom_link = "" + if self.metadata.serial_number: + bom_link = f"urn:cdx:{self.metadata.serial_number}/{str(self.metadata.version)}#{bom_ref}" return Component( - bom_ref=component_data.get("bom-ref", ""), + bom_ref=bom_ref, name=component_data.get("name", ""), version=component_data.get("version", ""), type=component_data.get("type", ""), purl=component_data.get("purl", ""), cpe=component_data.get("cpe", ""), + cyclonedx_bom_link=bom_link, json=component_data, - unsaved_license=", ".join(cyclonedx_licenses), + unsaved_declared_licenses=declared_licenses, + unsaved_concluded_licenses=concluded_licenses, ) - def _create_observations( # pylint: disable=too-many-locals - self, data: dict - ) -> list[Observation]: + def _create_observations(self, data: dict) -> list[Observation]: # pylint: disable=too-many-locals observations = [] for vulnerability in data.get("vulnerabilities", []): @@ -177,11 +221,8 @@ def _create_observations( # pylint: disable=too-many-locals if component: title = vulnerability_id - ( - observation_component_dependencies, - translated_component_dependencies, - ) = get_component_dependencies( - data, self.components, component, self.metadata + observation_component_dependencies = self._get_component_dependencies( + ref, self.components, self.dependencies ) observation = Observation( @@ -190,10 +231,12 @@ def _create_observations( # pylint: disable=too-many-locals recommendation=recommendation, parser_severity=severity, vulnerability_id=vulnerability_id, + vulnerability_id_aliases=self._get_aliases(vulnerability), origin_component_name=component.name, origin_component_version=component.version, origin_component_purl=component.purl, origin_component_cpe=component.cpe, + origin_component_cyclonedx_bom_link=component.cyclonedx_bom_link, origin_component_dependencies=observation_component_dependencies, cvss3_score=cvss3_score, cvss3_vector=cvss3_vector, @@ -213,20 +256,25 @@ def _create_observations( # pylint: disable=too-many-locals vulnerability, component, observation, - translated_component_dependencies, ) observations.append(observation) return observations - def _get_metadata(self, data: dict) -> Metadata: + def _get_metadata(self, data: dict) -> Metadata: # pylint: disable=too-many-branches scanner = "" container_name = "" container_tag = "" container_digest = "" file = "" + serial_number = data.get("serialNumber", "") + if serial_number.startswith("urn:uuid:"): + serial_number = serial_number.replace("urn:uuid:", "") + else: + serial_number = "" + tools = data.get("metadata", {}).get("tools") if tools: if isinstance(tools, dict): @@ -238,6 +286,7 @@ def _get_metadata(self, data: dict) -> Metadata: version = components_or_services[0].get("version") if version: scanner += " / " + version + if isinstance(tools, list): scanner = tools[0].get("name", "") version = tools[0].get("version") @@ -246,9 +295,7 @@ def _get_metadata(self, data: dict) -> Metadata: component_type = data.get("metadata", {}).get("component", {}).get("type") component_name = data.get("metadata", {}).get("component", {}).get("name", "") - component_version = ( - data.get("metadata", {}).get("component", {}).get("version", "") - ) + component_version = data.get("metadata", {}).get("component", {}).get("version", "") if component_type == "container": container_name = component_name if component_version and component_version.startswith("sha256:"): @@ -259,6 +306,8 @@ def _get_metadata(self, data: dict) -> Metadata: file = component_name return Metadata( + serial_number=serial_number, + version=data.get("version", 1), scanner=scanner, container_name=container_name, container_tag=container_tag, @@ -266,43 +315,51 @@ def _get_metadata(self, data: dict) -> Metadata: file=file, ) - def _get_cvss(self, vulnerability: dict, version: int): + def _get_cvss(self, vulnerability: dict, version: int) -> tuple[Optional[float], str]: ratings = vulnerability.get("ratings", []) if ratings: cvss_score = 0 - cvss_vector = None + cvss_vector = "" for rating in ratings: method = rating.get("method") if method and method.lower().startswith(f"cvssv{str(version)}"): current_cvss_score = rating.get("score", 0) if current_cvss_score > cvss_score: cvss_score = current_cvss_score - cvss_vector = rating.get("vector") + cvss_vector = str(rating.get("vector")) if cvss_score > 0: return cvss_score, cvss_vector - return None, None + return None, "" - def _get_highest_severity(self, vulnerability): + def _get_highest_severity(self, vulnerability: dict) -> str: current_severity = Severity.SEVERITY_UNKNOWN current_numerical_severity = 999 ratings = vulnerability.get("ratings", []) if ratings: for rating in ratings: - severity = rating.get( - "severity", Severity.SEVERITY_UNKNOWN - ).capitalize() + severity = rating.get("severity", Severity.SEVERITY_UNKNOWN).capitalize() numerical_severity = Severity.NUMERICAL_SEVERITIES.get(severity, 99) if numerical_severity < current_numerical_severity: current_severity = severity return current_severity - def _get_cwe(self, vulnerability): + def _get_cwe(self, vulnerability: dict) -> Optional[str]: cwes = vulnerability.get("cwes", []) if len(cwes) >= 1: return cwes[0] return None + def _get_aliases(self, vulnerability: dict) -> str: + aliases = [] + references = vulnerability.get("references", []) + for reference in references: + if reference.get("id"): + aliases.append(reference.get("id")) + if aliases: + return ", ".join(aliases) + return "" + def _add_references(self, vulnerability: dict, observation: Observation) -> None: advisories = vulnerability.get("advisories", []) if advisories: @@ -314,19 +371,64 @@ def _add_evidences( vulnerability: dict, component: Component, observation: Observation, - translated_component_dependencies: list[dict], - ): + ) -> None: evidence = [] evidence.append("Vulnerability") evidence.append(dumps(vulnerability)) observation.unsaved_evidences.append(evidence) + evidence = [] evidence.append("Component") evidence.append(dumps(component.json)) observation.unsaved_evidences.append(evidence) - if translated_component_dependencies: - evidence = [] - evidence.append("Dependencies") - evidence.append(dumps(translated_component_dependencies)) - observation.unsaved_evidences.append(evidence) + def _get_dependencies(self, data: dict) -> dict[str, list[str]]: + dependency_dict: dict[str, list[str]] = {} + + for dependency in data.get("dependencies", {}): + for dependency_key in dependency.get("dependsOn", []): + if dependency_key not in dependency_dict: + dependency_dict[dependency_key] = [dependency.get("ref")] + else: + dependency_dict[dependency_key].append(dependency.get("ref")) + + return dependency_dict + + def _get_component_dependencies( + self, + component_bom_ref: str, + component_dict: dict[str, Component], + dependency_dict: dict[str, list[str]], + ) -> str: + dependencies: list[str] = [] + self._get_dependencies_recursive(component_bom_ref, component_dict, dependency_dict, dependencies) + + dependencies.sort() + return "\n".join(dependencies) + + def _get_dependencies_recursive( + self, + component_bom_ref: str, + component_dict: dict[str, Component], + dependency_dict: dict[str, list[str]], + dependencies: list[str], + ) -> None: + if component_bom_ref in dependency_dict.keys(): + for dependency_id in dependency_dict[component_bom_ref]: + translated_dependency_id = self._translate_package_id(dependency_id, component_dict) + translated_package_id = self._translate_package_id(component_bom_ref, component_dict) + dependency = f"{translated_dependency_id} --> {translated_package_id}" + if dependency not in dependencies: + dependencies.append(dependency) + self._get_dependencies_recursive(dependency_id, component_dict, dependency_dict, dependencies) + + def _translate_package_id(self, component_bom_ref: str, component_dict: dict[str, Component]) -> str: + component = component_dict.get(component_bom_ref) + if not component: + logger.warning("Component with BOM ref %s not found", component_bom_ref) + return "" + + if component.version: + return f"{component.name}:{component.version}" + + return component.name diff --git a/backend/application/import_observations/parsers/cyclone_dx/types.py b/backend/application/import_observations/parsers/cyclone_dx/types.py deleted file mode 100644 index 79024aa58..000000000 --- a/backend/application/import_observations/parsers/cyclone_dx/types.py +++ /dev/null @@ -1,22 +0,0 @@ -from dataclasses import dataclass - - -@dataclass -class Component: - bom_ref: str - name: str - version: str - type: str - purl: str - cpe: str - json: dict[str, str] - unsaved_license: str - - -@dataclass -class Metadata: - scanner: str - container_name: str - container_tag: str - container_digest: str - file: str diff --git a/backend/application/import_observations/parsers/dependency_track/parser.py b/backend/application/import_observations/parsers/dependency_track/parser.py index 3efd80d89..64909ee89 100644 --- a/backend/application/import_observations/parsers/dependency_track/parser.py +++ b/backend/application/import_observations/parsers/dependency_track/parser.py @@ -3,7 +3,7 @@ import requests -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity, Status from application.import_observations.models import Api_Configuration from application.import_observations.parsers.base_parser import ( @@ -23,7 +23,7 @@ class DependencyTrack(BaseParser, BaseAPIParser): - def __init__(self): + def __init__(self) -> None: self.api_configuration: Optional[Api_Configuration] = None @classmethod @@ -34,9 +34,7 @@ def get_name(cls) -> str: def get_type(cls) -> str: return Parser_Type.TYPE_SCA - def check_connection( - self, api_configuration: Api_Configuration - ) -> tuple[bool, list[str], dict]: + def check_connection(self, api_configuration: Api_Configuration) -> tuple[bool, list[str], dict]: self.api_configuration = api_configuration dependency_track_base_url = api_configuration.base_url @@ -47,9 +45,7 @@ def check_connection( if not dependency_track_base_url.endswith("/"): dependency_track_base_url += "/" - dependency_track_base_url += ( - f"api/v1/finding/project/{dependency_track_project_key}?suppressed=false" - ) + dependency_track_base_url += f"api/v1/finding/project/{dependency_track_project_key}?suppressed=false" headers = { "X-Api-Key": dependency_track_api_key, @@ -67,7 +63,9 @@ def check_connection( return True, [], response.json() - def get_observations(self, data: list[dict]) -> list[Observation]: + def get_observations( # pylint: disable=too-many-locals + self, data: list[dict], product: Product, branch: Optional[Branch] + ) -> tuple[list[Observation], str]: observations = [] scanner, version = self.get_about() @@ -83,9 +81,7 @@ def get_observations(self, data: list[dict]) -> list[Observation]: vulnerability_id = finding.get("vulnerability", {}).get("vulnId", "") cvss_v3_base_score = finding.get("vulnerability", {}).get("cvssV3BaseScore") cvss_v3_vector = finding.get("vulnerability", {}).get("cvssV3Vector") - severity = finding.get("vulnerability", {}).get( - "severity", Severity.SEVERITY_UNKNOWN - ) + severity = finding.get("vulnerability", {}).get("severity", Severity.SEVERITY_UNKNOWN) description = finding.get("vulnerability", {}).get("description") state = finding.get("analysis", {}).get("state") @@ -118,7 +114,7 @@ def get_observations(self, data: list[dict]) -> list[Observation]: observations.append(observation) - return observations + return observations, scanner def get_status(self, state: str) -> str: if not state: diff --git a/backend/application/import_observations/parsers/drheader/parser.py b/backend/application/import_observations/parsers/drheader/parser.py index b3053d124..eadbaea61 100644 --- a/backend/application/import_observations/parsers/drheader/parser.py +++ b/backend/application/import_observations/parsers/drheader/parser.py @@ -1,7 +1,7 @@ from json import dumps -from typing import Any +from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, @@ -112,7 +112,7 @@ def check_format(self, data: Any) -> bool: return True return False - def get_observations(self, data: list) -> list[Observation]: + def get_observations(self, data: list, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: observations = [] for drheader_observation in data: @@ -146,9 +146,7 @@ def get_observations(self, data: list) -> list[Observation]: else: description += "**Expected:** " + str(expected) - observation = Observation( - title=title, parser_severity=severity.title(), description=description - ) + observation = Observation(title=title, parser_severity=severity.title(), description=description) evidence = [] evidence.append("Result") @@ -160,4 +158,4 @@ def get_observations(self, data: list) -> list[Observation]: observation.unsaved_references = references observations.append(observation) - return observations + return observations, self.get_name() diff --git a/backend/application/import_observations/parsers/gitleaks/__init__.py b/backend/application/import_observations/parsers/gitleaks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/import_observations/parsers/gitleaks/parser.py b/backend/application/import_observations/parsers/gitleaks/parser.py new file mode 100644 index 000000000..db4ba5bab --- /dev/null +++ b/backend/application/import_observations/parsers/gitleaks/parser.py @@ -0,0 +1,102 @@ +from json import dumps +from typing import Any, Optional + +from application.core.models import Branch, Observation, Product +from application.core.types import Severity +from application.import_observations.parsers.base_parser import ( + BaseFileParser, + BaseParser, +) +from application.import_observations.types import Parser_Filetype, Parser_Type + + +class GitleaksParser(BaseParser, BaseFileParser): + @classmethod + def get_name(cls) -> str: + return "Gitleaks" + + @classmethod + def get_filetype(cls) -> str: + return Parser_Filetype.FILETYPE_JSON + + @classmethod + def get_type(cls) -> str: + return Parser_Type.TYPE_SECRETS + + def check_format(self, data: Any) -> bool: + if not data or not isinstance(data, dict): + return False + + findings = data.get("findings") + if findings is None or not isinstance(findings, list): + return False + + if len(findings) == 0 or ( # pylint: disable=too-many-boolean-expressions + len(findings) >= 1 + and isinstance(findings[0], dict) + and findings[0].get("RuleID") + and findings[0].get("Match") + and findings[0].get("Secret") + ): + return True + return False + + def get_observations( # pylint: disable=too-many-locals + self, data: dict, product: Product, branch: Optional[Branch] + ) -> tuple[list[Observation], str]: + + findings = data.get("findings", []) + + observations = [] + + for entry in findings: + rule_id = entry.get("RuleID") + description = entry.get("Description") + start_line = entry.get("StartLine") + end_line = entry.get("EndLine") + match = entry.get("Match") + secret = entry.get("Secret") + file = entry.get("File") + link = entry.get("Link") + commit = entry.get("Commit") + date = entry.get("Date") + message = entry.get("Message") + + if match: + if secret: + match = match.replace(secret, "REDACTED") + description += f"\n\n**Match:** `{match}`" + + if commit: + description += f"\n\n**Commit hash:** {commit}" + if date: + description += f"\n\n**Commit date:** {date}" + if message: + if message.find("\n") >= 0: + message = message.split("\n")[0] + " ..." + description += f"\n\n**Commit message:** {message}" + + observation = Observation( + title=rule_id, + parser_severity=Severity.SEVERITY_MEDIUM, + description=description, + origin_source_file=file, + origin_source_line_start=self.get_int_or_none(start_line), + origin_source_line_end=self.get_int_or_none(end_line), + origin_source_file_link=link, + ) + + evidence = [] + evidence.append("Entry") + + evidence_string = dumps(entry) + if secret: + secret = secret.replace("\r", "\\r").replace("\n", "\\n") + evidence_string = evidence_string.replace(secret, "REDACTED") + evidence.append(evidence_string) + + observation.unsaved_evidences.append(evidence) + + observations.append(observation) + + return observations, self.get_name() diff --git a/backend/application/import_observations/parsers/ocsf/parser.py b/backend/application/import_observations/parsers/ocsf/parser.py index 73a2fc8cd..eb6e662b3 100644 --- a/backend/application/import_observations/parsers/ocsf/parser.py +++ b/backend/application/import_observations/parsers/ocsf/parser.py @@ -1,5 +1,5 @@ import logging -from typing import Any +from typing import Any, Optional from py_ocsf_models.events.findings.detection_finding import ( ClassUID, @@ -10,7 +10,7 @@ from rest_framework.exceptions import ValidationError from semver import Version -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, @@ -36,26 +36,17 @@ def get_type(cls) -> str: return Parser_Type.TYPE_INFRASTRUCTURE def check_format(self, data: Any) -> bool: - if ( - isinstance(data, list) - and len(data) >= 1 - and isinstance(data[0], dict) - and data[0].get("class_uid") - ): + if isinstance(data, list) and len(data) >= 1 and isinstance(data[0], dict) and data[0].get("class_uid"): tool_name = data[0].get("metadata", {}).get("product", {}).get("name", "") - tool_version = ( - data[0].get("metadata", {}).get("product", {}).get("version", "") - ) - if tool_name == "Prowler" and ( - not tool_version or Version.parse(tool_version) < Version.parse("4.5.0") - ): + tool_version = data[0].get("metadata", {}).get("product", {}).get("version", "") + if tool_name == "Prowler" and (not tool_version or Version.parse(tool_version) < Version.parse("4.5.0")): return False return True return False - def get_observations(self, data: list) -> list[Observation]: + def get_observations(self, data: list, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: observations = [] for element in data: @@ -73,6 +64,10 @@ def get_observations(self, data: list) -> list[Observation]: if finding.status_id not in [StatusID.New, StatusID.InProgress]: continue + if finding.status_code in ["PASS", "MANUAL", "MUTED"]: + # These are status codes set by Prowler + continue + if finding.activity_id not in [ActivityID.Create, ActivityID.Update]: continue @@ -107,7 +102,8 @@ def get_observations(self, data: list) -> list[Observation]: except Exception as e: raise ValidationError(f"Error parsing OCSF finding: {str(e)}") from e - return observations + scanner = observations[0].scanner if observations else self.get_name() + return observations, scanner def get_origins(finding: DetectionFinding) -> list[Origin]: @@ -119,7 +115,9 @@ def get_origins(finding: DetectionFinding) -> list[Origin]: if finding.finding_info.uid.startswith("prowler-kubernetes"): for resource in finding.resources: namespace = "" - if resource.region and ":" in resource.region: + if resource.namespace: + namespace = resource.namespace + elif resource.region and ":" in resource.region: namespace = resource.region.split(":")[1].strip() origins.append( Origin( @@ -148,6 +146,8 @@ def get_origins(finding: DetectionFinding) -> list[Origin]: def get_description(finding: DetectionFinding) -> str: description = finding.finding_info.desc + if finding.status_code and finding.status_code != "FAIL": + description += f"\n\n**Status code:** {finding.status_code}" if finding.status_detail: description += f"\n\n**Status detail:** {finding.status_detail}" if finding.risk_details: diff --git a/backend/application/import_observations/parsers/osv/__init__.py b/backend/application/import_observations/parsers/osv/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/import_observations/parsers/osv/parser.py b/backend/application/import_observations/parsers/osv/parser.py new file mode 100644 index 000000000..bd6f73ac5 --- /dev/null +++ b/backend/application/import_observations/parsers/osv/parser.py @@ -0,0 +1,474 @@ +import logging +from concurrent.futures import ThreadPoolExecutor +from dataclasses import dataclass +from datetime import datetime +from json import dumps, loads +from typing import Callable, Optional + +import requests +from packageurl import PackageURL + +from application.core.models import Branch, Observation, Product +from application.core.types import OSVLinuxDistribution +from application.import_observations.models import OSV_Cache +from application.import_observations.parsers.base_parser import BaseParser +from application.import_observations.parsers.osv.rpm import RpmVersion +from application.import_observations.types import ExtendedSemVer, Parser_Type +from application.licenses.models import License_Component + +logger = logging.getLogger("secobserve.import_observations") + + +@dataclass(frozen=True) +class OSV_Vulnerability: + id: str + modified: datetime + + +@dataclass +class OSV_Component: + license_component: License_Component + vulnerabilities: set[OSV_Vulnerability] + + +@dataclass +class Event: + type: str + introduced: str + fixed: str + + +OSV_Non_Linux_Ecosystems = { + "bitnami": "Bitnami", + "conan": "ConanCenter", + "cran": "CRAN", + "cargo": "crates.io", + "golang": "Go", + "hackage": "Hackage", + "hex": "Hex", + "maven": "Maven", + "npm": "npm", + "nuget": "NuGet", + "pub": "Pub", + "pypi": "PyPI", + "gem": "RubyGems", + "swift": "SwiftURL", +} + + +class OSVParser(BaseParser): + @classmethod + def get_name(cls) -> str: + return "OSV (Open Source Vulnerabilities)" + + @classmethod + def get_type(cls) -> str: + return Parser_Type.TYPE_SCA + + def get_observations( # pylint: disable=too-many-locals + self, data: list[OSV_Component], product: Product, branch: Optional[Branch] + ) -> tuple[list[Observation], str]: + observations = [] + + for osv_component in data: + ordered_vulnerabilities = sorted(osv_component.vulnerabilities, key=lambda x: x.id) + osv_cache = self._fill_osv_cache(ordered_vulnerabilities) + + for vulnerability in ordered_vulnerabilities: + osv_cache_item = osv_cache.get(vulnerability.id) + if not osv_cache_item: + logger.warning("OSV vulnerability %s not found", vulnerability.id) + continue + + osv_vulnerability = loads(osv_cache_item.data) + if osv_vulnerability.get("withdrawn"): + continue + + vulnerability_id, vulnerability_id_aliases = self._get_osv_ids(osv_vulnerability) + osv_cvss3_vector, osv_cvss4_vector = self._get_cvss(osv_vulnerability) + + try: + parsed_purl = PackageURL.from_string(osv_component.license_component.component_purl) + except ValueError as e: + logger.error("Invalid PURL %s: %s", osv_component.license_component.component_purl, str(e)) + continue + + affected_ecosystems = self._get_affected_ecosystems(parsed_purl, osv_vulnerability, product, branch) + + component_in_versions = None + component_in_ranges = None + recommendation = "" + events = [] + + for affected_ecosystem in affected_ecosystems: + component_in_versions = self._is_version_in_affected( + osv_component.license_component.component_version, affected_ecosystem + ) + component_in_ranges, fixed_version, affected_events = self._is_version_in_ranges( + parsed_purl, + osv_component.license_component.component_version, + affected_ecosystem, + ) + + events.extend(affected_events) + + if component_in_versions or component_in_ranges: + affected_cvss3_vector, affected_cvss4_vector = self._get_affected_cvss(affected_ecosystem) + if affected_cvss3_vector: + osv_cvss3_vector = affected_cvss3_vector + if affected_cvss4_vector: + osv_cvss4_vector = affected_cvss4_vector + + if fixed_version: + recommendation = f"Update to version {fixed_version}" + + break + + if affected_ecosystems and ( + (component_in_versions is None and component_in_ranges is None) + or component_in_versions is True + or component_in_ranges is not False + ): + observation = Observation( + title=vulnerability_id, + description=self._get_description( + osv_vulnerability, + component_in_versions, + component_in_ranges, + events, + ), + recommendation=recommendation, + cvss3_vector=osv_cvss3_vector, + cvss4_vector=osv_cvss4_vector, + vulnerability_id=vulnerability_id, + vulnerability_id_aliases=vulnerability_id_aliases, + origin_component_name=osv_component.license_component.component_name, + origin_component_version=osv_component.license_component.component_version, + origin_component_purl=osv_component.license_component.component_purl, + origin_component_cpe=osv_component.license_component.component_cpe, + origin_component_cyclonedx_bom_link=osv_component.license_component.component_cyclonedx_bom_link, # noqa: E501 pylint: disable=line-too-long + origin_component_dependencies=osv_component.license_component.component_dependencies, + ) + observations.append(observation) + + observation.unsaved_references = self._get_references(osv_vulnerability) + + evidence = [] + evidence.append("OSV Vulnerability") + evidence.append(dumps(osv_vulnerability)) + observation.unsaved_evidences.append(evidence) + + return observations, self.get_name() + + def _fill_osv_cache(self, ordered_vulnerabilities: list[OSV_Vulnerability]) -> dict[str, OSV_Cache]: + vulnerability_ids_tmp = {vulnerability.id: vulnerability.modified for vulnerability in ordered_vulnerabilities} + vulnerabilities_from_cache = list(OSV_Cache.objects.filter(osv_id__in=vulnerability_ids_tmp)) + valid_vulnerability_ids = [ + vulnerability.osv_id + for vulnerability in vulnerabilities_from_cache + if vulnerability.modified >= vulnerability_ids_tmp[vulnerability.osv_id] + ] + invalid_vulnerability_ids = [ + vulnerability.osv_id + for vulnerability in vulnerabilities_from_cache + if vulnerability.modified < vulnerability_ids_tmp[vulnerability.osv_id] + ] + OSV_Cache.objects.filter(osv_id__in=invalid_vulnerability_ids).delete() + missing_osv_vulnerabilities = [] + for osv_vulnerability in ordered_vulnerabilities: + if osv_vulnerability.id not in valid_vulnerability_ids: + missing_osv_vulnerabilities.append(osv_vulnerability) + + def _read_osv_vulnerability(osv_vulnerability: OSV_Vulnerability) -> OSV_Cache: + response = requests.get( + url=f"https://api.osv.dev/v1/vulns/{osv_vulnerability.id}", + timeout=60, + ) + response.raise_for_status() + return OSV_Cache(osv_id=osv_vulnerability.id, modified=osv_vulnerability.modified, data=response.text) + + # max number of threads to use + + MAX_THREADS = 100 + + with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: + osv_cache_items_from_osv = list(executor.map(_read_osv_vulnerability, missing_osv_vulnerabilities)) + + if osv_cache_items_from_osv: + OSV_Cache.objects.bulk_create(osv_cache_items_from_osv) + + valid_osv_cache_items = [ + vulnerability + for vulnerability in vulnerabilities_from_cache + if vulnerability.osv_id not in invalid_vulnerability_ids + ] + + relevant_osv_cache_items = valid_osv_cache_items + osv_cache_items_from_osv + osv_cache = {osv_cache_item.osv_id: osv_cache_item for osv_cache_item in relevant_osv_cache_items} + return osv_cache + + def _get_osv_ids(self, osv_vulnerability: dict) -> tuple[str, str]: + osv_id = str(osv_vulnerability.get("id", "")) + + aliases: list[str] = [] + cve_to_swap = None + cve_found = False + for alias in osv_vulnerability.get("aliases", []): + aliases.append(str(alias)) + if not osv_id.startswith("CVE-") and str(alias).startswith("CVE-"): + if cve_found: + cve_to_swap = None + else: + cve_to_swap = str(alias) + cve_found = True + for alias in osv_vulnerability.get("upstream", []): + aliases.append(str(alias)) + if not osv_id.startswith("CVE-") and str(alias).startswith("CVE-"): + if cve_found: + cve_to_swap = None + else: + cve_to_swap = str(alias) + cve_found = True + + if cve_to_swap: + aliases.insert(0, osv_id) + osv_id = cve_to_swap + aliases = [x for x in aliases if x != cve_to_swap] + + aliases.sort() + return osv_id, ", ".join(aliases) + + def _get_description( + self, + osv_vulnerability: dict, + component_in_versions: Optional[bool], + component_in_ranges: Optional[bool], + events: list[Event], + ) -> str: + osv_description_parts: list[str] = [] + if osv_vulnerability.get("summary"): + osv_description_parts.append(str(osv_vulnerability.get("summary"))) + if osv_vulnerability.get("details"): + osv_description_parts.append(str(osv_vulnerability.get("details"))) + + if component_in_versions: + osv_description_parts.append("**Confidence: High** (Component found in affected versions)") + elif component_in_ranges: + osv_description_parts.append("**Confidence: High** (Component found in affected ranges)") + elif component_in_versions is None and component_in_ranges is None: + osv_description_parts.append("**Confidence: Low** (No information about affected versions or ranges)") + elif component_in_ranges is None: + osv_description_parts.append("**Confidence: Low** (Events could not be evaluated)") + osv_description_parts.append("**Events:**") + for event in events: + osv_description_parts.append(f"* {event.type}: Introduced: {event.introduced} - Fixed: {event.fixed}") + + return "\n\n".join(osv_description_parts) + + def _get_cvss(self, osv_vulnerability: dict) -> tuple[str, str]: + cvss3_vector = "" + cvss4_vector = "" + + severities = osv_vulnerability.get("severity", []) + for severity in severities: + if severity.get("type") == "CVSS_V3": + cvss3_vector = severity.get("score") + if severity.get("type") == "CVSS_V4": + cvss4_vector = severity.get("score") + + return cvss3_vector, cvss4_vector + + def _get_references(self, osv_vulnerability: dict) -> list[str]: + references = [] + for reference in osv_vulnerability.get("references", []): + references.append(reference.get("url")) + return references + + def _get_affected_ecosystems( + self, + parsed_purl: PackageURL, + osv_vulnerability: dict, + product: Product, + branch: Optional[Branch], + ) -> list[dict]: + + affected = [] + + package_type = parsed_purl.type + package_name = self._get_package_name(parsed_purl) + + package_osv_ecosystem = OSV_Non_Linux_Ecosystems.get(package_type) + + if not package_osv_ecosystem and branch and branch.osv_linux_distribution and branch.osv_linux_release: + package_osv_ecosystem = f"{branch.osv_linux_distribution}:{branch.osv_linux_release}" + if not package_osv_ecosystem and branch and branch.osv_linux_distribution: + package_osv_ecosystem = branch.osv_linux_distribution + + if not package_osv_ecosystem and product.osv_linux_distribution and product.osv_linux_release: + package_osv_ecosystem = f"{product.osv_linux_distribution}:{product.osv_linux_release}" + if not package_osv_ecosystem and product.osv_linux_distribution: + package_osv_ecosystem = product.osv_linux_distribution + + package_osv_ecosystem = self._get_linux_package_osv_ecosystem(parsed_purl, package_osv_ecosystem) + + for affected_item in osv_vulnerability.get("affected", []): + package = affected_item.get("package", {}) + affected_ecosystem = package.get("ecosystem") + affected_name = package.get("name") + + if ( + affected_ecosystem + and package_osv_ecosystem + and affected_ecosystem.startswith(package_osv_ecosystem) + and package_name == affected_name + ): + affected.append(affected_item) + + return affected + + def _get_linux_package_osv_ecosystem( + self, parsed_purl: PackageURL, package_osv_ecosystem: Optional[str] + ) -> Optional[str]: + if not package_osv_ecosystem: + package_osv_ecosystem = self._get_linux_package_osv_ecosystem_apk(parsed_purl) + if not package_osv_ecosystem: + package_osv_ecosystem = self._get_linux_package_osv_ecosystem_deb(parsed_purl) + return package_osv_ecosystem + + def _get_linux_package_osv_ecosystem_apk(self, parsed_purl: PackageURL) -> Optional[str]: + package_osv_ecosystem = None + + if parsed_purl.qualifiers and isinstance(parsed_purl.qualifiers, dict): + package_type = parsed_purl.type + if package_type == "apk" and parsed_purl.namespace == "alpine": + distro = parsed_purl.qualifiers.get("distro") + if distro: + if distro.startswith("alpine-"): + distro = distro[7:] + distro_parts = distro.split(".") + if len(distro_parts) >= 2 and distro_parts[0].isdigit() and distro_parts[1].isdigit(): + distro_version = f"{distro_parts[0]}.{distro_parts[1]}" + package_osv_ecosystem = f"{OSVLinuxDistribution.DISTRIBUTION_ALPINE}:v{distro_version}" + elif package_type == "apk" and parsed_purl.namespace == "chainguard": + package_osv_ecosystem = OSVLinuxDistribution.DISTRIBUTION_CHAINGUARD + elif package_type == "apk" and parsed_purl.namespace == "wolfi": + package_osv_ecosystem = OSVLinuxDistribution.DISTRIBUTION_WOLFI + + return package_osv_ecosystem + + def _get_linux_package_osv_ecosystem_deb(self, parsed_purl: PackageURL) -> Optional[str]: + package_osv_ecosystem = None + + if parsed_purl.qualifiers and isinstance(parsed_purl.qualifiers, dict): + package_type = parsed_purl.type + if package_type == "deb" and parsed_purl.namespace == "debian": + distro = parsed_purl.qualifiers.get("distro") + if distro: + if distro.startswith("debian-"): + distro = distro[7:] + distro_parts = distro.split(".") + if len(distro_parts) >= 1 and distro_parts[0].isdigit(): + package_osv_ecosystem = f"{OSVLinuxDistribution.DISTRIBUTION_DEBIAN}:{distro_parts[0]}" + elif package_type == "deb" and parsed_purl.namespace == "ubuntu": + distro = parsed_purl.qualifiers.get("distro") + if distro: + if distro.startswith("ubuntu-"): + distro = distro[7:] + distro_parts = distro.split(".") + if len(distro_parts) >= 2: + if distro_parts[0].isdigit() and int(distro_parts[0]) % 2 == 0 and distro_parts[1] == "04": + package_osv_ecosystem = ( + f"{OSVLinuxDistribution.DISTRIBUTION_UBUNTU}:{distro_parts[0]}.{distro_parts[1]}:LTS" + ) + else: + package_osv_ecosystem = ( + f"{OSVLinuxDistribution.DISTRIBUTION_UBUNTU}:{distro_parts[0]}.{distro_parts[1]}" + ) + + return package_osv_ecosystem + + def _get_package_name(self, parsed_purl: PackageURL) -> str: + package_name = parsed_purl.name + package_namespace = parsed_purl.namespace + package_type = parsed_purl.type + if package_namespace and OSV_Non_Linux_Ecosystems.get(package_type): + if package_type == "maven": + package_name = f"{package_namespace}:{package_name}" + else: + package_name = f"{package_namespace}/{package_name}" + return package_name + + def _get_affected_cvss(self, affected: dict) -> tuple[str, str]: + cvss3_vector = "" + cvss4_vector = "" + + severity = affected.get("severity") + if severity: + if severity.get("type") == "CVSS_V3": + cvss3_vector = severity.get("score") + if severity.get("type") == "CVSS_V4": + cvss4_vector = severity.get("score") + + return cvss3_vector, cvss4_vector + + def _is_version_in_affected(self, version: str, affected: dict) -> bool: + if not version: + return True + + versions = affected.get("versions", []) + return version in versions + + def _is_version_in_ranges( + self, parsed_purl: PackageURL, version: str, affected: dict + ) -> tuple[Optional[bool], Optional[str], list[Event]]: + if not version: + return None, None, [] + + version_parser: Callable[[str | None], ExtendedSemVer | RpmVersion | None] = ExtendedSemVer.parse + if parsed_purl.type == "rpm": + version_parser = RpmVersion.parse + + events = self._get_events(affected) + + version_semver = version_parser(version) + if not version_semver: + return None, None, events + + num_rejected_events = 0 + for event in events: + if event.type in ("ECOSYSTEM", "SEMVER"): + introduced_semver = version_parser(event.introduced) + fixed_semver = version_parser(event.fixed) + + if not introduced_semver: + introduced_semver = version_parser("0.0.0") + if not fixed_semver: + continue + + if introduced_semver <= version_semver < fixed_semver: + return True, event.fixed, events + + num_rejected_events += 1 + + if num_rejected_events == len(events): + return False, None, events + + return None, None, events + + def _get_events(self, affected: dict) -> list[Event]: + events = [] + + osv_ranges = affected.get("ranges", []) + for osv_range in osv_ranges: + event = Event(osv_range.get("type", ""), introduced="", fixed="") + for osv_event in osv_range.get("events", []): + introduced = osv_event.get("introduced", "") + if introduced: + event.introduced = introduced + fixed = osv_event.get("fixed", "") + if fixed: + event.fixed = fixed + if event.introduced and event.fixed: + events.append(event) + event = Event(osv_range.get("type", ""), introduced="", fixed="") + return events diff --git a/backend/application/import_observations/parsers/osv/rpm.py b/backend/application/import_observations/parsers/osv/rpm.py new file mode 100644 index 000000000..29c11b644 --- /dev/null +++ b/backend/application/import_observations/parsers/osv/rpm.py @@ -0,0 +1,268 @@ +# +# Copyright (c) SAS Institute Inc. +# Copyright (c) Facebook, Inc. and its affiliates. +# +# SPDX-License-Identifier: MIT AND Apache-2.0 +# Version comparison utility extracted from python-rpm-vercmp and further +# stripped down and significantly modified from the original at python-rpm-vercmp +# Also includes updates from Facebook antlir merged in. +# +# Fetched from https://github.com/google/osv.dev/blob/master/osv/third_party/univers/rpm.py +# +# Visit https://aboutcode.org and https://github.com/nexB/univers for support and download. + +import re +from typing import ( + Any, + NamedTuple, + Optional, + Tuple, + TypeVar, + no_type_check, +) + +# Define a TypeVar for RpmVersion comparisons +T_co = TypeVar("T_co", covariant=True) + + +class RpmVersion(NamedTuple): + """ + Represent an RPM version. It is ordered. + """ + + epoch: int + version: str + release: str + + def __str__(self, *args: Any, **kwargs: Any) -> str: + return self.to_string() + + def to_string(self) -> str: + if self.release: + vr = f"{self.version}-{self.release}" + else: + vr = self.version + + if self.epoch: + vr = f"{self.epoch}:{vr}" + return vr + + @classmethod + def parse(cls, s: Optional[str]) -> Optional["RpmVersion"]: + if not s: + return None + return RpmVersion.from_string(s) + + @classmethod + def from_string(cls, s: str) -> "RpmVersion": + s = s.strip() + e, v, r = from_evr(s) + return cls(e, v, r) + + def __lt__(self, other: object) -> bool: + if not isinstance(other, RpmVersion): + return NotImplemented + return compare_rpm_versions(self, other) < 0 + + def __gt__(self, other: object) -> bool: + if not isinstance(other, RpmVersion): + return NotImplemented + return compare_rpm_versions(self, other) > 0 + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RpmVersion): + return NotImplemented + return compare_rpm_versions(self, other) == 0 + + def __le__(self, other: object) -> bool: + if not isinstance(other, RpmVersion): + return NotImplemented + return compare_rpm_versions(self, other) <= 0 + + def __ge__(self, other: object) -> bool: + if not isinstance(other, RpmVersion): + return NotImplemented + return compare_rpm_versions(self, other) >= 0 + + +def from_evr(s: str) -> Tuple[int, str, str]: + """ + Return an (E, V, R) tuple given a string by splitting + [e:]version-release into the three possible subcomponents. + Default epoch to 0, version and release to empty string if not specified. + + >>> assert from_evr("1:11.13.2.0-1") == (1, "11.13.2.0", "1") + >>> assert from_evr("11.13.2.0-1") == (0, "11.13.2.0", "1") + """ + if ":" in s: + e, _, vr = s.partition(":") + else: + e = "0" + vr = s + + ie = int(e) + + if "-" in vr: + v, _, r = vr.partition("-") + else: + v = vr + r = "" + return ie, v, r + + +def compare_rpm_versions(a: RpmVersion | str, b: RpmVersion | str) -> int: + """ + Compare two RPM versions ``a`` and ``b`` and return: + - 1 if the version of a is newer than b + - 0 if the versions match + - -1 if the version of a is older than b + + These are the legacy "cmp()" function semantics. + + This implementation is adapted from both this blog post: + https://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/ + and this Apache 2 licensed implementation: + https://github.com/sassoftware/python-rpm-vercmp/blob/master/rpm_vercmp/vercmp.py + + For example:: + >>> assert compare_rpm_versions("1.0", "1.1") == -1 + >>> assert compare_rpm_versions("1.1", "1.0") == 1 + >>> assert compare_rpm_versions("11.13.2-1", "11.13.2.0-1") == -1 + >>> assert compare_rpm_versions("11.13.2.0-1", "11.13.2-1") == 1 + """ + if isinstance(a, str): + a = RpmVersion.from_string(a) + if isinstance(b, str): + b = RpmVersion.from_string(b) + if not isinstance(a, RpmVersion) and not isinstance(b, RpmVersion): + raise TypeError(f"{a!r} and {b!r} must be RpmVersion or strings") + + # First compare the epoch, if set. If the epoch's are not the same, then + # the higher one wins no matter what the rest of the EVR is. + if a.epoch != b.epoch: + if a.epoch > b.epoch: + return 1 # a > b + return -1 # a < b + + # Epoch is the same, if version + release are the same we have a match + if (a.version == b.version) and (a.release == b.release): + return 0 # a == b + # Compare version first, if version is equal then compare release + compare_res = vercmp(a.version, b.version) + if compare_res != 0: # a > b || a < b + return compare_res + return vercmp(a.release, b.release) + + +class Vercmp: + R_NONALNUMTILDE_CARET = re.compile(rb"^([^a-zA-Z0-9~\^]*)(.*)$") # NOSONAR + R_NUM = re.compile(rb"^([\d]+)(.*)$") # NOSONAR + R_ALPHA = re.compile(rb"^([a-zA-Z]+)(.*)$") # NOSONAR + # The risk of a DDOS attack on these regular expressions is very low here. Attackers would need to provide + # a specially crafted SBOM and SecObserve typically would not have very high availability requirements. + + @classmethod + @no_type_check + def compare(cls, first, second) -> int: + # pylint: disable=too-many-return-statements, too-many-branches + # Rpm versions can only be ascii, anything else is just ignored + first = first.encode("ascii", "ignore") + second = second.encode("ascii", "ignore") + + if first == second: + return 0 + + while first or second: + m1 = cls.R_NONALNUMTILDE_CARET.match(first) + m2 = cls.R_NONALNUMTILDE_CARET.match(second) + m1_head, first = m1.group(1), m1.group(2) + m2_head, second = m2.group(1), m2.group(2) + if m1_head or m2_head: + # Ignore junk at the beginning + continue + + # handle the tilde separator, it sorts before everything else + if first.startswith(b"~"): + if not second.startswith(b"~"): + return -1 + first, second = first[1:], second[1:] + continue + if second.startswith(b"~"): + return 1 + + # Now look at the caret, which is like the tilde but pointier. + if first.startswith(b"^"): + # first has a caret but second has ended + if not second: + return 1 # first > second + + # first has a caret but second continues on + if not second.startswith(b"^"): + return -1 # first < second + + # strip the ^ and start again + first, second = first[1:], second[1:] + continue + + # Caret means the version is less... Unless the other version + # has ended, then do the exact opposite. + if second.startswith(b"^"): + return -1 if not first else 1 + + # We've run out of characters to compare. + # Note: we have to do this after we compare the ~ and ^ madness + # because ~'s and ^'s take precedance. + # If we ran to the end of either, we are finished with the loop + if not first or not second: + break + + # grab first completely alpha or completely numeric segment + m1 = cls.R_NUM.match(first) + if m1: + m2 = cls.R_NUM.match(second) + if not m2: + # numeric segments are always newer than alpha segments + return 1 + isnum = True + else: + m1 = cls.R_ALPHA.match(first) + m2 = cls.R_ALPHA.match(second) + if not m2: + return -1 + isnum = False + + m1_head, first = m1.group(1), m1.group(2) + m2_head, second = m2.group(1), m2.group(2) + + if isnum: + # throw away any leading zeros - it's a number, right? + m1_head = m1_head.lstrip(b"0") + m2_head = m2_head.lstrip(b"0") + + # whichever number has more digits wins + m1hlen = len(m1_head) + m2hlen = len(m2_head) + if m1hlen < m2hlen: + return -1 + if m1hlen > m2hlen: + return 1 + + # Same number of chars + if m1_head < m2_head: + return -1 + if m1_head > m2_head: + return 1 + # Both segments equal + continue + + m1len = len(first) + m2len = len(second) + if m1len == m2len == 0: + return 0 + if m1len != 0: + return 1 + return -1 + + +def vercmp(first: str, second: str) -> int: + return Vercmp.compare(first, second) diff --git a/backend/application/import_observations/parsers/prowler/parser.py b/backend/application/import_observations/parsers/prowler/parser.py index 6f6d04b9b..9c381f5d4 100644 --- a/backend/application/import_observations/parsers/prowler/parser.py +++ b/backend/application/import_observations/parsers/prowler/parser.py @@ -1,7 +1,7 @@ from json import dumps -from typing import Any +from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, @@ -34,17 +34,15 @@ def check_format(self, data: Any) -> bool: return True return False - def get_observations(self, data: list[dict]) -> list[Observation]: + def get_observations( # pylint: disable=too-many-locals + self, data: list[dict], product: Product, branch: Optional[Branch] + ) -> tuple[list[Observation], str]: observations = [] for prowler_observation in data: if prowler_observation.get("Status", "").lower() == "fail": - status_extended = prowler_observation.get( - "StatusExtended", "No StatusExtended found" - ) - severity = prowler_observation.get( - "Severity", Severity.SEVERITY_UNKNOWN - ).capitalize() + status_extended = prowler_observation.get("StatusExtended", "No StatusExtended found") + severity = prowler_observation.get("Severity", Severity.SEVERITY_UNKNOWN).capitalize() if severity == "Informational": severity = Severity.SEVERITY_NONE @@ -100,7 +98,7 @@ def get_observations(self, data: list[dict]) -> list[Observation]: observations.append(observation) - return observations + return observations, self.get_name() def get_description(self, prowler_observation: dict) -> str: check_title = prowler_observation.get("CheckTitle") @@ -115,16 +113,8 @@ def get_description(self, prowler_observation: dict) -> str: return description def get_recommendation(self, prowler_observation: dict) -> str: - recommendation_text = ( - prowler_observation.get("Remediation", {}) - .get("Recommendation", {}) - .get("Text") - ) - recommendation_url = ( - prowler_observation.get("Remediation", {}) - .get("Recommendation", {}) - .get("Url") - ) + recommendation_text = prowler_observation.get("Remediation", {}).get("Recommendation", {}).get("Text") + recommendation_url = prowler_observation.get("Remediation", {}).get("Recommendation", {}).get("Url") recommendation_code = prowler_observation.get("Remediation", {}).get("Code") recommendation = "" if recommendation_text: diff --git a/backend/application/import_observations/parsers/sarif/parser.py b/backend/application/import_observations/parsers/sarif/parser.py index 4469bfa64..b7fd3b080 100644 --- a/backend/application/import_observations/parsers/sarif/parser.py +++ b/backend/application/import_observations/parsers/sarif/parser.py @@ -4,7 +4,7 @@ from packageurl import PackageURL -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, @@ -55,24 +55,18 @@ def get_type(cls) -> str: return Parser_Type.TYPE_SAST def check_format(self, data: Any) -> bool: - if ( - isinstance(data, dict) - and "sarif" in data.get("$schema", "").lower() - and data.get("version") == "2.1.0" - ): + if isinstance(data, dict) and "sarif" in data.get("$schema", "").lower() and data.get("version") == "2.1.0": return True return False - def get_observations(self, data: dict) -> list[Observation]: + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: observations: list[Observation] = [] for run in data.get("runs", []): sarif_scanner = run.get("tool", {}).get("driver", {}).get("name") sarif_version = run.get("tool", {}).get("driver", {}).get("version") if not sarif_version: - sarif_version = ( - run.get("tool", {}).get("driver", {}).get("semanticVersion") - ) + sarif_version = run.get("tool", {}).get("driver", {}).get("semanticVersion") if sarif_version: sarif_scanner += " / " + sarif_version @@ -105,7 +99,7 @@ def get_observations(self, data: dict) -> list[Observation]: sarif_location=None, ) - return observations + return observations, sarif_scanner def create_observation( # pylint: disable=too-many-locals self, @@ -115,7 +109,7 @@ def create_observation( # pylint: disable=too-many-locals sarif_scanner: str, sarif_rules: dict[str, Rule], sarif_location: Optional[dict], - ): + ) -> None: location = self.get_location_data(sarif_location) sarif_rule_id = result.get("ruleId", "") @@ -123,12 +117,8 @@ def create_observation( # pylint: disable=too-many-locals parser_severity = self.get_parser_severity(result, sarif_scanner, sarif_rule) - parser_cvss3_score, parser_cvss3_vector = self.get_dependency_check_cvss( - sarif_scanner, sarif_rule, 3 - ) - parser_cvss4_score, parser_cvss4_vector = self.get_dependency_check_cvss( - sarif_scanner, sarif_rule, 4 - ) + parser_cvss3_score, parser_cvss3_vector = self.get_dependency_check_cvss(sarif_scanner, sarif_rule, 3) + parser_cvss4_score, parser_cvss4_vector = self.get_dependency_check_cvss(sarif_scanner, sarif_rule, 4) if parser_cvss3_score or parser_cvss4_score: parser_severity = "" @@ -146,17 +136,11 @@ def create_observation( # pylint: disable=too-many-locals if sarif_rule.properties and isinstance(sarif_rule.properties, dict): sarif_cwe = self.get_cwe(sarif_rule.properties.get("tags", [])) - parser_vulnerability_id = self.get_dependency_check_vulnerability_id( - sarif_scanner, title - ) + parser_vulnerability_id = self.get_dependency_check_vulnerability_id(sarif_scanner, title) - origin_component_purl = self.get_dependency_check_origin_component_purl( - sarif_scanner, sarif_location - ) + origin_component_purl = self.get_dependency_check_origin_component_purl(sarif_scanner, sarif_location) if origin_component_purl: - origin_component_name, origin_component_version = self.extract_component( - origin_component_purl - ) + origin_component_name, origin_component_version = self.extract_component(origin_component_purl) location.uri = "" else: origin_component_name = "" @@ -197,9 +181,7 @@ def create_observation( # pylint: disable=too-many-locals observations.append(observation) - def get_title( - self, sarif_scanner: str, sarif_rule_id: str, sarif_rule: Rule - ) -> str: + def get_title(self, sarif_scanner: str, sarif_rule_id: str, sarif_rule: Rule) -> str: if sarif_rule.name: title = sarif_rule.name else: @@ -218,38 +200,19 @@ def get_trivy_title(self, title: str, sarif_scanner: str, sarif_rule: Rule) -> s def get_location_data(self, sarif_location: Optional[dict]) -> Location: location = Location() if sarif_location: - location.uri = ( - sarif_location.get("physicalLocation", {}) - .get("artifactLocation", {}) - .get("uri", "") - ) - location.start_line = ( - sarif_location.get("physicalLocation", {}) - .get("region", {}) - .get("startLine") - ) - location.end_line = ( - sarif_location.get("physicalLocation", {}) - .get("region", {}) - .get("endLine") - ) + location.uri = sarif_location.get("physicalLocation", {}).get("artifactLocation", {}).get("uri", "") + location.start_line = sarif_location.get("physicalLocation", {}).get("region", {}).get("startLine") + location.end_line = sarif_location.get("physicalLocation", {}).get("region", {}).get("endLine") location.snippet = ( - sarif_location.get("physicalLocation", {}) - .get("region", {}) - .get("snippet", {}) - .get("text") + sarif_location.get("physicalLocation", {}).get("region", {}).get("snippet", {}).get("text") ) return location - def get_parser_severity( - self, result: dict, sarif_scanner: str, sarif_rule: Rule - ) -> str: + def get_parser_severity(self, result: dict, sarif_scanner: str, sarif_rule: Rule) -> str: sarif_level = result.get("level") if sarif_level: - parser_severity = SEVERITIES.get( - sarif_level.lower(), Severity.SEVERITY_UNKNOWN - ) + parser_severity = SEVERITIES.get(sarif_level.lower(), Severity.SEVERITY_UNKNOWN) elif sarif_rule.default_level: parser_severity = SEVERITIES.get( sarif_rule.default_level.lower(), @@ -273,7 +236,7 @@ def get_description( # pylint: disable=too-many-branches ) -> str: description = "" - sarif_message_text = result.get("message", {}).get("text") + sarif_message_text = result.get("message", {}).get("text", "") if sarif_message_text and not sarif_scanner.lower().startswith("trivy"): # Message text of Trivy has only redundant information description += f"{sarif_message_text}\n\n" @@ -285,13 +248,9 @@ def get_description( # pylint: disable=too-many-branches and not sarif_scanner.lower().startswith("semgrep") ): # Rule short description of some scanners have only redundant information - description += ( - f"**Rule short description:** {sarif_rule.short_description}\n\n" - ) + description += f"**Rule short description:** {sarif_rule.short_description}\n\n" - rule_short_description = ( - sarif_rule.short_description if sarif_rule.short_description else "" - ) + rule_short_description = sarif_rule.short_description if sarif_rule.short_description else "" if ( sarif_rule.full_description and sarif_rule.full_description not in sarif_message_text @@ -299,9 +258,7 @@ def get_description( # pylint: disable=too-many-branches and not sarif_scanner.lower().startswith("semgrep") ): # Rule short description of some scanners have only redundant information - description += ( - f"**Rule full description:** {sarif_rule.full_description}\n\n" - ) + description += f"**Rule full description:** {sarif_rule.full_description}\n\n" if ( # pylint: disable=too-many-boolean-expressions sarif_rule.help @@ -398,22 +355,20 @@ def get_bandit_severity(self, sarif_scanner: str, result: dict) -> str: def get_dependency_check_cvss( self, sarif_scanner: str, sarif_rule: Rule, version: int - ): + ) -> Tuple[Optional[float], str]: # Dependency Check SARIF has no proper level, but stores the severity in a property if ( sarif_scanner.lower().startswith("dependency-check") and sarif_rule.properties and isinstance(sarif_rule.properties, dict) ): - return sarif_rule.properties.get( - f"cvssv{version}_baseScore" - ), sarif_rule.properties.get(f"cvssv{version}_vector") + return sarif_rule.properties.get(f"cvssv{version}_baseScore"), sarif_rule.properties.get( + f"cvssv{version}_vector" + ) - return None, None + return None, "" - def get_dependency_check_vulnerability_id( - self, sarif_scanner: str, title: str - ) -> str: + def get_dependency_check_vulnerability_id(self, sarif_scanner: str, title: str) -> str: # Dependency Check sets the title with a vulnerability if sarif_scanner.lower().startswith("dependency-check") and ( title.startswith("CVE-") or title.startswith("GHSA-") @@ -422,15 +377,10 @@ def get_dependency_check_vulnerability_id( return "" - def get_dependency_check_origin_component_purl( - self, sarif_scanner: str, location: Optional[dict] - ) -> str: + def get_dependency_check_origin_component_purl(self, sarif_scanner: str, location: Optional[dict]) -> str: if location: logicalLocations = location.get("logicalLocations") - if ( - sarif_scanner.lower().startswith("dependency-check") - and logicalLocations - ): + if sarif_scanner.lower().startswith("dependency-check") and logicalLocations: fully_qualified_name = logicalLocations[0].get("fullyQualifiedName") if fully_qualified_name and fully_qualified_name.startswith("pkg:"): return fully_qualified_name diff --git a/backend/application/import_observations/parsers/secobserve/parser.py b/backend/application/import_observations/parsers/secobserve/parser.py index 574f149fb..10c72fb5f 100644 --- a/backend/application/import_observations/parsers/secobserve/parser.py +++ b/backend/application/import_observations/parsers/secobserve/parser.py @@ -1,6 +1,6 @@ -from typing import Any +from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.import_observations.parsers.base_parser import ( BaseFileParser, BaseParser, @@ -26,8 +26,8 @@ def check_format(self, data: Any) -> bool: return True return False - def get_observations(self, data: dict) -> list[Observation]: - observations = [] + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: + observations: list[Observation] = [] for uploaded_observation in data.get("observations", []): observation = Observation( @@ -35,37 +35,21 @@ def get_observations(self, data: dict) -> list[Observation]: description=uploaded_observation.get("description"), recommendation=uploaded_observation.get("recommendation"), parser_severity=uploaded_observation.get("parser_severity"), - scanner_observation_id=uploaded_observation.get( - "scanner_observation_id" - ), + scanner_observation_id=uploaded_observation.get("scanner_observation_id"), vulnerability_id=uploaded_observation.get("vulnerability_id"), origin_component_name=uploaded_observation.get("origin_component_name"), - origin_component_version=uploaded_observation.get( - "origin_component_version" - ), - origin_component_name_version=uploaded_observation.get( - "origin_component_name_version" - ), + origin_component_version=uploaded_observation.get("origin_component_version"), + origin_component_name_version=uploaded_observation.get("origin_component_name_version"), origin_component_purl=uploaded_observation.get("origin_component_purl"), origin_component_cpe=uploaded_observation.get("origin_component_cpe"), - origin_docker_image_name=uploaded_observation.get( - "origin_docker_image_name" - ), - origin_docker_image_tag=uploaded_observation.get( - "origin_docker_image_tag" - ), - origin_docker_image_name_tag=uploaded_observation.get( - "origin_docker_image_name_tag" - ), + origin_docker_image_name=uploaded_observation.get("origin_docker_image_name"), + origin_docker_image_tag=uploaded_observation.get("origin_docker_image_tag"), + origin_docker_image_name_tag=uploaded_observation.get("origin_docker_image_name_tag"), origin_endpoint_url=uploaded_observation.get("origin_endpoint_url"), origin_service_name=uploaded_observation.get("origin_service_name"), origin_source_file=uploaded_observation.get("origin_source_file"), - origin_source_line_start=uploaded_observation.get( - "origin_source_line_start" - ), - origin_source_line_end=uploaded_observation.get( - "origin_source_line_end" - ), + origin_source_line_start=uploaded_observation.get("origin_source_line_start"), + origin_source_line_end=uploaded_observation.get("origin_source_line_end"), cvss3_score=uploaded_observation.get("cvss3_score"), cvss3_vector=uploaded_observation.get("cvss3_vector"), cwe=uploaded_observation.get("cwe"), @@ -75,4 +59,6 @@ def get_observations(self, data: dict) -> list[Observation]: if reference: observation.unsaved_references = [reference] observations.append(observation) - return observations + + scanner = observations[0].scanner if observations else self.get_name() + return observations, scanner diff --git a/backend/application/import_observations/parsers/spdx/parser.py b/backend/application/import_observations/parsers/spdx/parser.py index f5ed043e0..a2e6d395f 100644 --- a/backend/application/import_observations/parsers/spdx/parser.py +++ b/backend/application/import_observations/parsers/spdx/parser.py @@ -1,13 +1,12 @@ from json import dumps -from typing import Any +from typing import Any, Optional from rest_framework.exceptions import ValidationError -from spdx_tools.spdx.model.document import Document -from spdx_tools.spdx.model.relationship import RelationshipType +from spdx_tools.spdx.model import Document, RelationshipType, SpdxNoAssertion, SpdxNone from spdx_tools.spdx.parser.error import SPDXParsingError from spdx_tools.spdx.parser.jsonlikedict.json_like_dict_parser import JsonLikeDictParser -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.import_observations.parsers.base_parser import ( BaseFileParser, BaseParser, @@ -29,26 +28,26 @@ def get_filetype(cls) -> str: def get_type(cls) -> str: return Parser_Type.TYPE_SCA + @classmethod + def sbom(cls) -> bool: + return True + def check_format(self, data: Any) -> bool: - if ( - isinstance(data, dict) - and data.get("SPDXID") - and (data.get("SPDXVersion") or data.get("spdxVersion")) - ): + if isinstance(data, dict) and data.get("SPDXID") and (data.get("SPDXVersion") or data.get("spdxVersion")): return True return False - def get_observations(self, data: dict) -> list[Observation]: - return [] + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: + return [], "" - def get_license_components(self, data: dict) -> list[License_Component]: + def get_license_components(self, data: dict) -> tuple[list[License_Component], str]: try: document: Document = JsonLikeDictParser().parse(data) except SPDXParsingError as e: - raise ValidationError( # pylint: disable=raise-missing-from - e.get_messages() - ) - # The DjangoValidationError itself is not relevant and must not be re-raised + raise ValidationError(e.get_messages()) # pylint: disable=raise-missing-from + # The ValidationError itself is not relevant and must not be re-raised + + scanner = self._get_scanner(data) observations = [] @@ -61,19 +60,21 @@ def get_license_components(self, data: dict) -> list[License_Component]: if package.version is not None: version = str(package.version) - unsaved_license = None + concluded_licenses: list[str] = [] if ( package.license_concluded is not None - and str(package.license_concluded) != "" - and str(package.license_concluded) != "NOASSERTION" + and not isinstance(package.license_concluded, SpdxNoAssertion) + and not isinstance(package.license_concluded, SpdxNone) ): - unsaved_license = package.license_concluded - elif ( + concluded_licenses.append(str(package.license_concluded)) + + declared_licenses: list[str] = [] + if ( package.license_declared is not None - and str(package.license_declared) != "" - and str(package.license_declared) != "NOASSERTION" + and not isinstance(package.license_declared, SpdxNoAssertion) + and not isinstance(package.license_declared, SpdxNone) ): - unsaved_license = package.license_declared + declared_licenses.append(str(package.license_declared)) purl = "" for external_reference in package.external_references: @@ -81,9 +82,7 @@ def get_license_components(self, data: dict) -> list[License_Component]: purl = external_reference.locator break - dependencies = self._get_dependencies( - package.spdx_id, packages, relationships - ) + dependencies = self._get_dependencies(package.spdx_id, packages, relationships) license_component = License_Component( component_name=package.name, @@ -91,8 +90,9 @@ def get_license_components(self, data: dict) -> list[License_Component]: component_purl=purl, component_dependencies=dependencies, ) - if unsaved_license is not None: - license_component.unsaved_license = str(unsaved_license) + + license_component.unsaved_declared_licenses = declared_licenses + license_component.unsaved_concluded_licenses = concluded_licenses evidence = [] package_json = packages.get(package.spdx_id) @@ -103,7 +103,19 @@ def get_license_components(self, data: dict) -> list[License_Component]: observations.append(license_component) - return observations + return observations, scanner + + def _get_scanner(self, data: dict) -> str: + scanner = "" + + creators = data.get("creationInfo", {}).get("creators", []) + for creator in creators: + creator_elements = creator.split(":") + if len(creator_elements) == 2 and creator_elements[0].strip() == "Tool": + scanner = creator_elements[1].strip() + break + + return scanner def _create_package_dict(self, data: dict) -> dict[str, dict]: package_dict = {} @@ -111,35 +123,22 @@ def _create_package_dict(self, data: dict) -> dict[str, dict]: package_dict[package["SPDXID"]] = package return package_dict - def _create_relationship_dict( - self, document: Document, package_dict: dict[str, dict] - ) -> dict[str, list[str]]: + def _create_relationship_dict(self, document: Document, package_dict: dict[str, dict]) -> dict[str, list[str]]: relationship_dict: dict[str, list[str]] = {} for relationship in document.relationships: if ( relationship.spdx_element_id in package_dict.keys() and relationship.related_spdx_element_id in package_dict.keys() - and ( - relationship.relationship_type - in (RelationshipType.DEPENDS_ON, RelationshipType.CONTAINS) - ) + and (relationship.relationship_type in (RelationshipType.DEPENDS_ON, RelationshipType.CONTAINS)) ): - relationship_value = relationship_dict.get( - str(relationship.related_spdx_element_id), [] - ) + relationship_value = relationship_dict.get(str(relationship.related_spdx_element_id), []) relationship_value.append(relationship.spdx_element_id) - relationship_dict[str(relationship.related_spdx_element_id)] = ( - relationship_value - ) + relationship_dict[str(relationship.related_spdx_element_id)] = relationship_value return relationship_dict - def _get_dependencies( - self, package_id: str, package_dict: dict, relationship_dict: dict - ) -> str: + def _get_dependencies(self, package_id: str, package_dict: dict, relationship_dict: dict) -> str: dependencies: list[str] = [] - self._get_dependencies_recursive( - package_id, package_dict, relationship_dict, dependencies - ) + self._get_dependencies_recursive(package_id, package_dict, relationship_dict, dependencies) dependencies.sort() return "\n".join(dependencies) @@ -153,18 +152,12 @@ def _get_dependencies_recursive( ) -> None: if package_id in relationship_dict.keys(): for dependency_id in relationship_dict[package_id]: - translated_dependency_id = self._translate_package_id( - dependency_id, package_dict - ) - translated_package_id = self._translate_package_id( - package_id, package_dict - ) + translated_dependency_id = self._translate_package_id(dependency_id, package_dict) + translated_package_id = self._translate_package_id(package_id, package_dict) dependency = f"{translated_dependency_id} --> {translated_package_id}" if dependency not in dependencies: dependencies.append(dependency) - self._get_dependencies_recursive( - dependency_id, package_dict, relationship_dict, dependencies - ) + self._get_dependencies_recursive(dependency_id, package_dict, relationship_dict, dependencies) def _translate_package_id(self, package_id: str, package_dict: dict) -> str: package = package_dict.get(package_id) diff --git a/backend/application/import_observations/parsers/trivy_operator_prometheus/parser.py b/backend/application/import_observations/parsers/trivy_operator_prometheus/parser.py index 430d8ebc4..46729bd77 100644 --- a/backend/application/import_observations/parsers/trivy_operator_prometheus/parser.py +++ b/backend/application/import_observations/parsers/trivy_operator_prometheus/parser.py @@ -1,10 +1,10 @@ import json from dataclasses import dataclass -from typing import Optional +from typing import Any, Optional import requests -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.models import Api_Configuration from application.import_observations.parsers.base_parser import ( @@ -22,7 +22,7 @@ class KubernetesResource: class TrivyOperatorPrometheus(BaseParser, BaseAPIParser): - def __init__(self): + def __init__(self) -> None: self.api_configuration: Optional[Api_Configuration] = None @classmethod @@ -33,29 +33,21 @@ def get_name(cls) -> str: def get_type(cls) -> str: return Parser_Type.TYPE_OTHER - def check_connection( - self, api_configuration: Api_Configuration - ) -> tuple[bool, list[str], dict]: + def check_connection(self, api_configuration: Api_Configuration) -> tuple[bool, list[str], dict]: self.api_configuration = api_configuration trivy_operator_prometheus_base_url = api_configuration.base_url trivy_operator_prometheus_query = api_configuration.query trivy_operator_prometheus_verify_ssl = api_configuration.verify_ssl trivy_operator_prometheus_basic_auth = api_configuration.basic_auth_enabled - trivy_operator_prometheus_basic_auth_username = ( - api_configuration.basic_auth_username - ) - trivy_operator_prometheus_basic_auth_password = ( - api_configuration.basic_auth_password - ) + trivy_operator_prometheus_basic_auth_username = api_configuration.basic_auth_username + trivy_operator_prometheus_basic_auth_password = api_configuration.basic_auth_password if not trivy_operator_prometheus_base_url.endswith("/"): trivy_operator_prometheus_base_url += "/" trivy_operator_prometheus_url = ( - trivy_operator_prometheus_base_url - + "api/v1/query?query=" - + trivy_operator_prometheus_query + trivy_operator_prometheus_base_url + "api/v1/query?query=" + trivy_operator_prometheus_query ) trivy_basic_auth_param = None @@ -78,7 +70,7 @@ def check_connection( return True, [], response.json() - def check_format(self, import_data) -> tuple[bool, list[str], dict]: + def check_format(self, import_data: Any) -> tuple[bool, list[str], dict]: try: data = json.load(import_data) except Exception: @@ -87,14 +79,12 @@ def check_format(self, import_data) -> tuple[bool, list[str], dict]: if not data.get("status") == "success": return False, ["Data is not a Prometheus API-Endpoint"], {} - if not isinstance(data.get("data"), dict) or not isinstance( - data.get("data").get("result"), list - ): + if not isinstance(data.get("data"), dict) or not isinstance(data.get("data").get("result"), list): return False, ["Data not in valid Prometheus-Metric Format"], {} return True, [], data - def get_observations(self, data: dict) -> list[Observation]: + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: observations = [] for finding in data.get("data", {}).get("result"): if ( @@ -103,49 +93,33 @@ def get_observations(self, data: dict) -> list[Observation]: ): observation = self._create_compliance_observation(finding) elif ( - finding.get("metric", {}).get("__name__", "") - == "trivy_configaudits_info" + finding.get("metric", {}).get("__name__", "") == "trivy_configaudits_info" and finding.get("metric", {}).get("config_audit_success", "") == "false" ): observation = self._create_config_audit_observation(finding) - elif ( - finding.get("metric", {}).get("__name__", "") - == "trivy_exposedsecrets_info" - ): + elif finding.get("metric", {}).get("__name__", "") == "trivy_exposedsecrets_info": observation = self._create_exposedsecrets_observation(finding) elif ( - finding.get("metric", {}).get("__name__", "") - == "trivy_rbacassessments_info" - and finding.get("metric", {}).get("rbac_assessment_success", "") - == "false" + finding.get("metric", {}).get("__name__", "") == "trivy_rbacassessments_info" + and finding.get("metric", {}).get("rbac_assessment_success", "") == "false" ): observation = self._create_rbac_assessment_observation(finding) - elif ( - finding.get("metric", {}).get("__name__", "") - == "trivy_vulnerability_id" - ): + elif finding.get("metric", {}).get("__name__", "") == "trivy_vulnerability_id": observation = self._create_vulnerability_observation(finding) else: continue kubernetes_resource = self._get_kubernetes_resource(finding) observation.origin_kubernetes_namespace = kubernetes_resource.namespace - observation.origin_kubernetes_resource_type = ( - kubernetes_resource.resource_kind - ) + observation.origin_kubernetes_resource_type = kubernetes_resource.resource_kind if ( - finding.get("metric", {}).get("__name__", "") - == "trivy_rbacassessments_info" + finding.get("metric", {}).get("__name__", "") == "trivy_rbacassessments_info" and not kubernetes_resource.resource_name ): - kubernetes_resource.resource_name = finding.get("metric", {}).get( - "name", "" - ) + kubernetes_resource.resource_name = finding.get("metric", {}).get("name", "") - observation.origin_kubernetes_resource_name = ( - kubernetes_resource.resource_name - ) + observation.origin_kubernetes_resource_name = kubernetes_resource.resource_name observation.scanner = "Trivy Operator" @@ -156,9 +130,9 @@ def get_observations(self, data: dict) -> list[Observation]: observations.append(observation) - return observations + return observations, self.get_name() - def _create_compliance_observation(self, finding) -> Observation: + def _create_compliance_observation(self, finding: dict) -> Observation: title = finding.get("metric", {}).get("title", "") compliance_name = finding.get("metric", {}).get("compliance_name", "") compliance_id = finding.get("metric", {}).get("compliance_id", "") @@ -168,28 +142,22 @@ def _create_compliance_observation(self, finding) -> Observation: return Observation( title=f"{title} / {compliance_name}", parser_severity=self._get_severity(severity), - description=self._get_description( - description=description, audit_id=compliance_id - ), + description=self._get_description(description=description, audit_id=compliance_id), ) - def _create_config_audit_observation(self, finding) -> Observation: + def _create_config_audit_observation(self, finding: dict) -> Observation: config_audit_title = finding.get("metric", {}).get("config_audit_title", "") - config_audit_description = finding.get("metric", {}).get( - "config_audit_description", "" - ) + config_audit_description = finding.get("metric", {}).get("config_audit_description", "") config_audit_id = finding.get("metric", {}).get("config_audit_id", "") severity = finding.get("metric", {}).get("severity", Severity.SEVERITY_UNKNOWN) return Observation( title=config_audit_title, parser_severity=self._get_severity(severity), - description=self._get_description( - description=config_audit_description, audit_id=config_audit_id - ), + description=self._get_description(description=config_audit_description, audit_id=config_audit_id), ) - def _create_exposedsecrets_observation(self, finding) -> Observation: + def _create_exposedsecrets_observation(self, finding: dict) -> Observation: image_registry = finding.get("metric", {}).get("image_registry", "") image_repository = finding.get("metric", {}).get("image_repository", "") image_tag = finding.get("metric", {}).get("image_tag", "") @@ -205,44 +173,30 @@ def _create_exposedsecrets_observation(self, finding) -> Observation: origin_source_file=secret_target, ) - def _create_rbac_assessment_observation(self, finding) -> Observation: - rbac_assessment_title = finding.get("metric", {}).get( - "rbac_assessment_title", "" - ) - rbac_assessment_description = finding.get("metric", {}).get( - "rbac_assessment_description", "" - ) + def _create_rbac_assessment_observation(self, finding: dict) -> Observation: + rbac_assessment_title = finding.get("metric", {}).get("rbac_assessment_title", "") + rbac_assessment_description = finding.get("metric", {}).get("rbac_assessment_description", "") rbac_assessment_id = finding.get("metric", {}).get("rbac_assessment_id", "") severity = finding.get("metric", {}).get("severity", Severity.SEVERITY_UNKNOWN) return Observation( title=rbac_assessment_title, parser_severity=self._get_severity(severity), - description=self._get_description( - description=rbac_assessment_description, audit_id=rbac_assessment_id - ), + description=self._get_description(description=rbac_assessment_description, audit_id=rbac_assessment_id), ) - def _create_vulnerability_observation(self, finding) -> Observation: + def _create_vulnerability_observation(self, finding: dict) -> Observation: origin_component_name = finding.get("metric", {}).get("resource", "") vuln_title = finding.get("metric", {}).get("vuln_title", "") vulnerability_id = finding.get("metric", {}).get("vuln_id", "") cvss3_score = finding.get("metric", {}).get("vuln_score") severity = finding.get("metric", {}).get("severity", Severity.SEVERITY_UNKNOWN) - origin_docker_image_registry = finding.get("metric", {}).get( - "image_registry", "" - ) - origin_docker_image_repository = finding.get("metric", {}).get( - "image_repository", "" - ) - origin_docker_image_name = ( - origin_docker_image_registry + "/" + origin_docker_image_repository - ) + origin_docker_image_registry = finding.get("metric", {}).get("image_registry", "") + origin_docker_image_repository = finding.get("metric", {}).get("image_repository", "") + origin_docker_image_name = origin_docker_image_registry + "/" + origin_docker_image_repository origin_docker_image_tag = finding.get("metric", {}).get("image_tag", "") fixed_version = finding.get("metric", {}).get("fixed_version", "") - origin_component_version = finding.get("metric", {}).get( - "installed_version", "" - ) + origin_component_version = finding.get("metric", {}).get("installed_version", "") return Observation( title=vulnerability_id, @@ -254,22 +208,18 @@ def _create_vulnerability_observation(self, finding) -> Observation: cvss3_score=cvss3_score, origin_component_name=origin_component_name, origin_component_version=origin_component_version, - recommendation=self._get_vulnerability_recommendation( - fixed_version, origin_component_version - ), + recommendation=self._get_vulnerability_recommendation(fixed_version, origin_component_version), description=self._get_description(description=vuln_title), ) def _get_vulnerability_recommendation( self, - fixed_version, - origin_component_version, + fixed_version: str, + origin_component_version: str, ) -> str: recommendation = "" if fixed_version: - recommendation += ( - f"Upgrade from **{origin_component_version}** to **{fixed_version}**" - ) + recommendation += f"Upgrade from **{origin_component_version}** to **{fixed_version}**" return recommendation @@ -288,9 +238,7 @@ def _get_description( description += f"\n\n**Assessment ID:** {audit_id}" prometheus_endpoint_url = ( - self.api_configuration.base_url - if isinstance(self.api_configuration, Api_Configuration) - else "" + self.api_configuration.base_url if isinstance(self.api_configuration, Api_Configuration) else "" ) if prometheus_endpoint_url: description += f"\n\n**Prometheus host:** {prometheus_endpoint_url}" @@ -306,7 +254,7 @@ def _get_severity(self, severity: str) -> str: return Severity.SEVERITY_UNKNOWN - def _get_kubernetes_resource(self, finding) -> KubernetesResource: + def _get_kubernetes_resource(self, finding: dict) -> KubernetesResource: return KubernetesResource( namespace=finding.get("metric", {}).get("namespace", ""), resource_kind=finding.get("metric", {}).get("resource_kind", ""), diff --git a/backend/application/import_observations/parsers/zap/parser.py b/backend/application/import_observations/parsers/zap/parser.py index 864c41c64..72aee72dc 100644 --- a/backend/application/import_observations/parsers/zap/parser.py +++ b/backend/application/import_observations/parsers/zap/parser.py @@ -1,7 +1,7 @@ from json import dumps -from typing import Any +from typing import Any, Optional -from application.core.models import Observation +from application.core.models import Branch, Observation, Product from application.core.types import Severity from application.import_observations.parsers.base_parser import ( BaseFileParser, @@ -36,7 +36,7 @@ def check_format(self, data: Any) -> bool: return True return False - def get_observations(self, data: dict) -> list[Observation]: + def get_observations(self, data: dict, product: Product, branch: Optional[Branch]) -> tuple[list[Observation], str]: observations = [] data_scanner = self.get_scanner(data) @@ -45,9 +45,7 @@ def get_observations(self, data: dict) -> list[Observation]: data_origin_endpoint_url = site.get("@name") for alert in site.get("alerts"): data_title = alert.get("alert") - data_severity = SEVERITIES.get( - alert.get("riskcode"), Severity.SEVERITY_UNKNOWN - ) + data_severity = SEVERITIES.get(alert.get("riskcode"), Severity.SEVERITY_UNKNOWN) data_description = self.get_description(alert) data_recommendation = alert.get("solution") @@ -75,7 +73,7 @@ def get_observations(self, data: dict) -> list[Observation]: observations.append(observation) - return observations + return observations, data_scanner def get_scanner(self, data: dict) -> str: data_program_name = data.get("@programName") diff --git a/backend/application/import_observations/queries/api_configuration.py b/backend/application/import_observations/queries/api_configuration.py index 8423661d8..8577b14d9 100644 --- a/backend/application/import_observations/queries/api_configuration.py +++ b/backend/application/import_observations/queries/api_configuration.py @@ -3,7 +3,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import ( Product, Product_Authorization_Group_Member, @@ -21,9 +21,7 @@ def get_api_configuration_by_id( return None -def get_api_configuration_by_name( - product: Product, name: str -) -> Optional[Api_Configuration]: +def get_api_configuration_by_name(product: Product, name: str) -> Optional[Api_Configuration]: try: return Api_Configuration.objects.get(product=product, name=name) except Api_Configuration.DoesNotExist: @@ -39,34 +37,24 @@ def get_api_configurations() -> QuerySet[Api_Configuration]: api_configurations = Api_Configuration.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) api_configurations = api_configurations.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) api_configurations = api_configurations.filter( diff --git a/backend/application/import_observations/queries/parser.py b/backend/application/import_observations/queries/parser.py index f06d13ccd..be042a2f5 100644 --- a/backend/application/import_observations/queries/parser.py +++ b/backend/application/import_observations/queries/parser.py @@ -17,9 +17,7 @@ def get_parser_by_name(name: str) -> Optional[Parser]: return None -def get_parser_by_module_and_class( - module_name: str, class_name: str -) -> Optional[Parser]: +def get_parser_by_module_and_class(module_name: str, class_name: str) -> Optional[Parser]: try: return Parser.objects.get(module_name=module_name, class_name=class_name) except Parser.DoesNotExist: diff --git a/backend/application/import_observations/queries/vulnerability_check.py b/backend/application/import_observations/queries/vulnerability_check.py index 289586a80..8d36cf9d4 100644 --- a/backend/application/import_observations/queries/vulnerability_check.py +++ b/backend/application/import_observations/queries/vulnerability_check.py @@ -1,7 +1,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Product_Authorization_Group_Member, Product_Member from application.import_observations.models import Vulnerability_Check @@ -15,34 +15,24 @@ def get_vulnerability_checks() -> QuerySet[Vulnerability_Check]: vulnerability_checks = Vulnerability_Check.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) vulnerability_checks = vulnerability_checks.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) vulnerability_checks = vulnerability_checks.filter( diff --git a/backend/application/import_observations/scanners/__init__.py b/backend/application/import_observations/scanners/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/import_observations/scanners/osv_scanner.py b/backend/application/import_observations/scanners/osv_scanner.py new file mode 100644 index 000000000..3241ee6c1 --- /dev/null +++ b/backend/application/import_observations/scanners/osv_scanner.py @@ -0,0 +1,251 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Optional, Tuple + +import jsonpickle +import requests + +from application.commons.models import Settings +from application.core.models import Branch, Product, Service +from application.import_observations.models import Vulnerability_Check +from application.import_observations.parsers.osv.parser import ( + OSV_Component, + OSV_Vulnerability, + OSVParser, +) +from application.import_observations.queries.parser import get_parser_by_name +from application.import_observations.services.import_observations import ( + ImportParameters, + _process_data, +) +from application.licenses.models import License_Component + + +@dataclass +class RequestPURL: + purl: str + + +@dataclass +class RequestPackage: + package: RequestPURL + page_token: Optional[str] + + +@dataclass +class RequestQueries: + queries: list[RequestPackage] + + +class OSVException(Exception): + def __init__(self, message: str): + super().__init__(message) + self.message = message + + +def scan_product(product: Product) -> Tuple[int, int, int]: + numbers: Tuple[int, int, int] = (0, 0, 0) + + new, updated, resolved = scan_no_branch_no_service(product) + numbers = ( + numbers[0] + new, + numbers[1] + updated, + numbers[2] + resolved, + ) + + branches = Branch.objects.filter(product=product) + for branch in branches: + new, updated, resolved = scan_branch_no_service(branch) + numbers = ( + numbers[0] + new, + numbers[1] + updated, + numbers[2] + resolved, + ) + + services = Service.objects.filter(product=product) + for service in services: + new, updated, resolved = scan_no_branch_but_service(product, service) + numbers = ( + numbers[0] + new, + numbers[1] + updated, + numbers[2] + resolved, + ) + + for branch in branches: + new, updated, resolved = scan_branch_and_service(branch, service) + numbers = ( + numbers[0] + new, + numbers[1] + updated, + numbers[2] + resolved, + ) + + return numbers + + +def scan_branch(branch: Branch) -> Tuple[int, int, int]: + numbers: Tuple[int, int, int] = (0, 0, 0) + + new, updated, resolved = scan_branch_no_service(branch) + numbers = ( + numbers[0] + new, + numbers[1] + updated, + numbers[2] + resolved, + ) + + services = Service.objects.filter(product=branch.product) + for service in services: + new, updated, resolved = scan_branch_and_service(branch, service) + numbers = ( + numbers[0] + new, + numbers[1] + updated, + numbers[2] + resolved, + ) + + return numbers + + +def scan_no_branch_no_service(product: Product) -> Tuple[int, int, int]: + license_components = list( + License_Component.objects.filter(product=product, branch__isnull=True, origin_service__isnull=True).exclude( + component_purl="" + ) + ) + return scan_license_components(license_components, product, None, None) + + +def scan_branch_no_service(branch: Branch) -> Tuple[int, int, int]: + license_components = list( + License_Component.objects.filter(branch=branch, origin_service__isnull=True).exclude(component_purl="") + ) + return scan_license_components(license_components, branch.product, branch, None) + + +def scan_no_branch_but_service(product: Product, service: Service) -> Tuple[int, int, int]: + license_components = list( + License_Component.objects.filter(product=product, branch__isnull=True, origin_service=service).exclude( + component_purl="" + ) + ) + return scan_license_components(license_components, product, None, service) + + +def scan_branch_and_service(branch: Branch, service: Service) -> Tuple[int, int, int]: + license_components = list( + License_Component.objects.filter(branch=branch, origin_service=service).exclude(component_purl="") + ) + return scan_license_components(license_components, branch.product, branch, service) + + +def scan_license_components( + license_components: list[License_Component], product: Product, branch: Optional[Branch], service: Optional[Service] +) -> Tuple[int, int, int]: + if not license_components: + return 0, 0, 0 + + jsonpickle.set_encoder_options("json", ensure_ascii=False) + + next_pages: dict[License_Component, str] = {} + osv_components, next_pages = _do_scan(license_components, next_pages) + + while next_pages: + new_osv_components, next_pages = _do_scan(list(next_pages.keys()), next_pages) + osv_components += new_osv_components + + osv_parser = OSVParser() + observations, scanner = osv_parser.get_observations(osv_components, product, branch) + + parser = get_parser_by_name(osv_parser.get_name()) + if parser is None: + raise OSVException(f"Parser {osv_parser.get_name()} not found") # pylint: disable=broad-exception-raised + + import_parameters = ImportParameters( + product=product, + branch=branch, + service=service, + parser=parser, + filename="", + api_configuration_name="", + docker_image_name_tag="", + endpoint_url="", + kubernetes_cluster="", + imported_observations=observations, + ) + numbers: Tuple[int, int, int] = _process_data(import_parameters, Settings.load()) + + Vulnerability_Check.objects.update_or_create( + product=product, + branch=branch, + service=service, + filename="", + api_configuration_name="", + defaults={ + "last_import_observations_new": numbers[0], + "last_import_observations_updated": numbers[1], + "last_import_observations_resolved": numbers[2], + "scanner": scanner, + }, + ) + + return numbers[0], numbers[1], numbers[2] + + +def _do_scan( + license_components: list[License_Component], + next_pages: dict[License_Component, str], +) -> Tuple[list[OSV_Component], dict]: + + osv_components = [ + OSV_Component( + license_component=license_component, + vulnerabilities=set(), + ) + for license_component in license_components + ] + + slice_actual = 0 + slice_size = 500 + results = [] + + while slice_actual * slice_size < len(license_components): + queries = RequestQueries( + queries=[ + RequestPackage( + RequestPURL(purl=license_component.component_purl), + next_pages[license_component] if next_pages else None, + ) + for license_component in license_components[ + (slice_actual * slice_size) : ((slice_actual + 1) * slice_size) # noqa: E203 + ] + ] + ) + + response = requests.post( # nosec B113 + # This is a false positive, there is a timeout of 5 minutes + url="https://api.osv.dev/v1/querybatch", + data=jsonpickle.encode(queries, unpicklable=False), + timeout=5 * 60, + ) + + response.raise_for_status() + results.extend(response.json().get("results", [])) + + slice_actual += 1 + + if len(osv_components) != len(results): + raise OSVException( # pylint: disable=broad-exception-raised + "Number of results is different than number of components" + ) + + new_next_pages: dict[License_Component, str] = {} + for i, result in enumerate(results): + for vuln in result.get("vulns", []): + osv_components[i].vulnerabilities.add( + OSV_Vulnerability( + id=vuln.get("id"), + modified=datetime.fromisoformat(vuln.get("modified")), + ) + ) + if result.get("next_page_token"): + new_next_pages[osv_components[i].license_component] = result.get("next_page_token") + + return osv_components, new_next_pages diff --git a/backend/application/import_observations/services/import_observations.py b/backend/application/import_observations/services/import_observations.py index 0c697c0bf..7dfbbd91b 100644 --- a/backend/application/import_observations/services/import_observations.py +++ b/backend/application/import_observations/services/import_observations.py @@ -27,20 +27,23 @@ ) from application.core.services.observation_log import create_observation_log from application.core.services.potential_duplicates import find_potential_duplicates -from application.core.services.product import set_repository_default_branch from application.core.services.risk_acceptance_expiry import ( calculate_risk_acceptance_expiry_date, ) from application.core.services.security_gate import check_security_gate from application.core.types import Assessment_Status, Status -from application.epss.services.epss import epss_apply_observation +from application.epss.services.cvss_bt import apply_exploit_information +from application.epss.services.epss import apply_epss from application.import_observations.exceptions import ParserError from application.import_observations.models import ( Api_Configuration, Parser, Vulnerability_Check, ) -from application.import_observations.parsers.base_parser import BaseAPIParser +from application.import_observations.parsers.base_parser import ( + BaseAPIParser, + BaseFileParser, +) from application.import_observations.services.parser_detector import ( detect_parser, instanciate_parser, @@ -48,19 +51,33 @@ from application.issue_tracker.services.issue_tracker import ( push_observations_to_issue_tracker, ) -from application.licenses.services.license_component import process_license_components +from application.licenses.models import ( + License_Component, + License_Component_Evidence, +) +from application.licenses.services.concluded_license import ConcludeLicenseApplicator +from application.licenses.services.license_component import ( + prepare_license_component, + set_effective_license, +) +from application.licenses.services.license_policy import apply_license_policy_product +from application.licenses.services.licenselynx import apply_licenselynx +from application.licenses.services.spdx_license_cache import SPDXLicenseCache +from application.licenses.types import NO_LICENSE_INFORMATION from application.rules.services.rule_engine import Rule_Engine from application.vex.services.vex_engine import VEX_Engine +SBOM_BULK_BATCH_SIZE = 100 + @dataclass class ImportParameters: product: Product branch: Optional[Branch] + service: Optional[Service] parser: Parser filename: str api_configuration_name: str - service: str docker_image_name_tag: str endpoint_url: str kubernetes_cluster: str @@ -72,18 +89,19 @@ class FileUploadParameters: product: Product branch: Optional[Branch] file: File - service: str + service_name: str docker_image_name_tag: str endpoint_url: str kubernetes_cluster: str suppress_licenses: bool + sbom: bool @dataclass class ApiImportParameters: api_configuration: Api_Configuration branch: Optional[Branch] - service: str + service_name: str docker_image_name_tag: str endpoint_url: str kubernetes_cluster: str @@ -93,73 +111,74 @@ def file_upload_observations( file_upload_parameters: FileUploadParameters, ) -> Tuple[int, int, int, int, int, int]: + settings = Settings.load() parser, parser_instance, data = detect_parser(file_upload_parameters.file) - imported_observations = parser_instance.get_observations(data) + filename = os.path.basename(file_upload_parameters.file.name) if file_upload_parameters.file.name else "" - filename = ( - os.path.basename(file_upload_parameters.file.name) - if file_upload_parameters.file.name - else "" - ) + numbers_observations: Tuple[int, int, int] = 0, 0, 0 + new_observations = None + updated_observations = None + resolved_observations = None - import_parameters = ImportParameters( + scanner = "" + + service = None + if file_upload_parameters.service_name: + service = Service.objects.get_or_create( + product=file_upload_parameters.product, name=file_upload_parameters.service_name + )[0] + + if not file_upload_parameters.sbom: + imported_observations, scanner = parser_instance.get_observations( + data, file_upload_parameters.product, file_upload_parameters.branch + ) + + import_parameters = ImportParameters( + product=file_upload_parameters.product, + branch=file_upload_parameters.branch, + service=service, + parser=parser, + filename=filename, + api_configuration_name="", + docker_image_name_tag=file_upload_parameters.docker_image_name_tag, + endpoint_url=file_upload_parameters.endpoint_url, + kubernetes_cluster=file_upload_parameters.kubernetes_cluster, + imported_observations=imported_observations, + ) + + numbers_observations = _process_data(import_parameters, settings) + new_observations = numbers_observations[0] + updated_observations = numbers_observations[1] + resolved_observations = numbers_observations[2] + else: + if not isinstance(parser_instance, BaseFileParser) or not parser_instance.sbom(): + raise ValidationError(f"{parser.name} is not a SBOM parser") + + vulnerability_check, _ = Vulnerability_Check.objects.update_or_create( product=file_upload_parameters.product, branch=file_upload_parameters.branch, - parser=parser, + service=service, filename=filename, api_configuration_name="", - service=file_upload_parameters.service, - docker_image_name_tag=file_upload_parameters.docker_image_name_tag, - endpoint_url=file_upload_parameters.endpoint_url, - kubernetes_cluster=file_upload_parameters.kubernetes_cluster, - imported_observations=imported_observations, - ) - - numbers_observations: Tuple[int, int, int, str] = _process_data(import_parameters) - - vulnerability_check, _ = Vulnerability_Check.objects.update_or_create( - product=import_parameters.product, - branch=import_parameters.branch, - filename=import_parameters.filename, defaults={ - "last_import_observations_new": numbers_observations[0], - "last_import_observations_updated": numbers_observations[1], - "last_import_observations_resolved": numbers_observations[2], - "scanner": numbers_observations[3], + "last_import_observations_new": new_observations, + "last_import_observations_updated": updated_observations, + "last_import_observations_resolved": resolved_observations, + "last_import_licenses_new": None, + "last_import_licenses_updated": None, + "last_import_licenses_deleted": None, + "scanner": scanner, }, ) numbers_license_components = (0, 0, 0) - settings = Settings.load() - if ( - settings.feature_license_management - and not file_upload_parameters.suppress_licenses + if settings.feature_license_management and ( + not file_upload_parameters.suppress_licenses or file_upload_parameters.sbom ): - imported_license_components = parser_instance.get_license_components(data) + imported_license_components, scanner = parser_instance.get_license_components(data) numbers_license_components = process_license_components( - imported_license_components, vulnerability_check + imported_license_components, scanner, vulnerability_check ) - if numbers_license_components != (0, 0, 0): - vulnerability_check.last_import_licenses_new = numbers_license_components[0] - vulnerability_check.last_import_licenses_updated = ( - numbers_license_components[1] - ) - vulnerability_check.last_import_licenses_deleted = ( - numbers_license_components[2] - ) - if ( - numbers_observations[0] == 0 - and numbers_observations[1] == 0 - and numbers_observations[2] == 0 - ): - vulnerability_check.last_import_observations_new = None - vulnerability_check.last_import_observations_updated = None - vulnerability_check.last_import_observations_resolved = None - else: - vulnerability_check.last_import_licenses_new = None - vulnerability_check.last_import_licenses_updated = None - vulnerability_check.last_import_licenses_deleted = None - vulnerability_check.save() return ( numbers_observations[0], @@ -177,44 +196,50 @@ def api_import_observations( parser_instance = instanciate_parser(api_import_parameters.api_configuration.parser) if not isinstance(parser_instance, BaseAPIParser): - raise ParserError( - f"{api_import_parameters.api_configuration.parser.name} isn't an API parser" - ) + raise ParserError(f"{api_import_parameters.api_configuration.parser.name} isn't an API parser") - format_valid, errors, data = parser_instance.check_connection( - api_import_parameters.api_configuration - ) + format_valid, errors, data = parser_instance.check_connection(api_import_parameters.api_configuration) if not format_valid: - raise ValidationError( - "Connection couldn't be established: " + " / ".join(errors) - ) + raise ValidationError("Connection couldn't be established: " + " / ".join(errors)) + + imported_observations, scanner = parser_instance.get_observations( + data, + api_import_parameters.api_configuration.product, + api_import_parameters.branch, + ) - imported_observations = parser_instance.get_observations(data) + service = None + if api_import_parameters.service_name: + service = Service.objects.get_or_create( + product=api_import_parameters.api_configuration.product, name=api_import_parameters.service_name + )[0] import_parameters = ImportParameters( product=api_import_parameters.api_configuration.product, branch=api_import_parameters.branch, + service=service, parser=api_import_parameters.api_configuration.parser, filename="", api_configuration_name=api_import_parameters.api_configuration.name, - service=api_import_parameters.service, docker_image_name_tag=api_import_parameters.docker_image_name_tag, endpoint_url=api_import_parameters.endpoint_url, kubernetes_cluster=api_import_parameters.kubernetes_cluster, imported_observations=imported_observations, ) - numbers: Tuple[int, int, int, str] = _process_data(import_parameters) + numbers: Tuple[int, int, int] = _process_data(import_parameters, Settings.load()) Vulnerability_Check.objects.update_or_create( product=import_parameters.product, branch=import_parameters.branch, + service=service, + filename="", api_configuration_name=import_parameters.api_configuration_name, defaults={ "last_import_observations_new": numbers[0], "last_import_observations_updated": numbers[1], "last_import_observations_resolved": numbers[2], - "scanner": numbers[3], + "scanner": scanner, }, ) @@ -234,12 +259,10 @@ def api_check_connection( return format_valid, errors -def _process_data(import_parameters: ImportParameters) -> Tuple[int, int, int, str]: +def _process_data(import_parameters: ImportParameters, settings: Settings) -> Tuple[int, int, int]: observations_new = 0 observations_updated = 0 - scanner = "" - rule_engine = Rule_Engine(product=import_parameters.product) vex_engine = VEX_Engine(import_parameters.product, import_parameters.branch) @@ -248,13 +271,11 @@ def _process_data(import_parameters: ImportParameters) -> Tuple[int, int, int, s for observation_before_for_dict in get_observations_for_vulnerability_check( import_parameters.product, import_parameters.branch, + import_parameters.service, import_parameters.filename, import_parameters.api_configuration_name, ): - observations_before[observation_before_for_dict.identity_hash] = ( - observation_before_for_dict - ) - scanner = observation_before_for_dict.scanner + observations_before[observation_before_for_dict.identity_hash] = observation_before_for_dict observations_this_run: set[str] = set() vulnerability_check_observations: set[Observation] = set() @@ -272,18 +293,14 @@ def _process_data(import_parameters: ImportParameters) -> Tuple[int, int, int, s # Only process observation if it hasn't been processed in this run before if imported_observation.identity_hash not in observations_this_run: # Check if new observation is already there in the same check - observation_before = observations_before.get( - imported_observation.identity_hash - ) + observation_before = observations_before.get(imported_observation.identity_hash) if observation_before: - _process_current_observation(imported_observation, observation_before) + _process_current_observation(imported_observation, observation_before, settings) rule_engine.apply_rules_for_observation(observation_before) vex_engine.apply_vex_statements_for_observation(observation_before) - if observation_before.current_status == _get_initial_status( - observation_before.product - ): + if observation_before.current_status in Status.STATUS_ACTIVE: observations_updated += 1 # Remove observation from list of current observations because it is still part of the check @@ -292,81 +309,264 @@ def _process_data(import_parameters: ImportParameters) -> Tuple[int, int, int, s observations_this_run.add(observation_before.identity_hash) vulnerability_check_observations.add(observation_before) else: - _process_new_observation(imported_observation) + _process_new_observation(imported_observation, settings) rule_engine.apply_rules_for_observation(imported_observation) vex_engine.apply_vex_statements_for_observation(imported_observation) - if imported_observation.current_status == _get_initial_status( - imported_observation.product - ): + if imported_observation.current_status in Status.STATUS_ACTIVE: observations_new += 1 # Add identity_hash to set of observations in this run to detect duplicates in this run observations_this_run.add(imported_observation.identity_hash) vulnerability_check_observations.add(imported_observation) - scanner = imported_observation.scanner - observations_resolved = _resolve_unimported_observations(observations_before) vulnerability_check_observations.update(observations_resolved) - check_security_gate(import_parameters.product) - set_repository_default_branch(import_parameters.product) + + if (not import_parameters.branch and not import_parameters.product.repository_default_branch) or ( + import_parameters.branch and import_parameters.branch.is_default_branch + ): + check_security_gate(import_parameters.product) + if import_parameters.branch: import_parameters.branch.last_import = timezone.now() import_parameters.branch.save() - push_observations_to_issue_tracker( - import_parameters.product, vulnerability_check_observations - ) - find_potential_duplicates( - import_parameters.product, import_parameters.branch, import_parameters.service + + push_observations_to_issue_tracker(import_parameters.product, vulnerability_check_observations) + find_potential_duplicates(import_parameters.product, import_parameters.branch, import_parameters.service) + + return observations_new, observations_updated, len(observations_resolved) + + +def process_license_components( # pylint: disable=too-many-statements disable=too-many-locals + license_components: list[License_Component], + scanner: str, + vulnerability_check: Vulnerability_Check, +) -> Tuple[int, int, int]: + existing_components = License_Component.objects.filter( + product=vulnerability_check.product, + branch=vulnerability_check.branch, + upload_filename=vulnerability_check.filename, + ).select_related("effective_spdx_license") + + existing_component: Optional[License_Component] = None + existing_components_dict: dict[str, License_Component] = {} + existing_component_ids: list[int] = [] + for existing_component in existing_components: + existing_components_dict[existing_component.identity_hash] = existing_component + existing_component_ids.append(existing_component.pk) + + License_Component_Evidence.objects.filter(license_component__in=existing_component_ids).delete() + + components_new = [] + components_updated = [] + + spdx_cache = SPDXLicenseCache() + concluded_license_applicator = ConcludeLicenseApplicator(vulnerability_check.product) + + license_component_evidences = [] + processed_hashes = set() + + for unsaved_component in license_components: + prepare_license_component(unsaved_component, spdx_cache) + + if unsaved_component.identity_hash in processed_hashes: + continue + + existing_component = existing_components_dict.get(unsaved_component.identity_hash) + if existing_component: + effective_spdx_license_before = existing_component.effective_spdx_license + effective_non_spdx_license_before = existing_component.effective_non_spdx_license + effective_license_expression_before = existing_component.effective_license_expression + effective_multiple_licenses_before = existing_component.effective_multiple_licenses + evaluation_result_before = existing_component.evaluation_result + existing_component.component_purl = unsaved_component.component_purl + existing_component.component_purl_type = unsaved_component.component_purl_type + existing_component.component_cpe = unsaved_component.component_cpe + existing_component.component_dependencies = unsaved_component.component_dependencies + existing_component.component_cyclonedx_bom_link = unsaved_component.component_cyclonedx_bom_link + existing_component.imported_declared_license_name = unsaved_component.imported_declared_license_name + existing_component.imported_declared_spdx_license = unsaved_component.imported_declared_spdx_license + existing_component.imported_declared_non_spdx_license = unsaved_component.imported_declared_non_spdx_license + existing_component.imported_declared_license_expression = ( + unsaved_component.imported_declared_license_expression + ) + existing_component.imported_declared_multiple_licenses = ( + unsaved_component.imported_declared_multiple_licenses + ) + existing_component.imported_concluded_license_name = unsaved_component.imported_concluded_license_name + existing_component.imported_concluded_spdx_license = unsaved_component.imported_concluded_spdx_license + existing_component.imported_concluded_non_spdx_license = ( + unsaved_component.imported_concluded_non_spdx_license + ) + existing_component.imported_concluded_license_expression = ( + unsaved_component.imported_concluded_license_expression + ) + existing_component.imported_concluded_multiple_licenses = ( + unsaved_component.imported_concluded_multiple_licenses + ) + + existing_component.import_last_seen = timezone.now() + if ( + effective_spdx_license_before != existing_component.effective_spdx_license + or effective_non_spdx_license_before != existing_component.effective_non_spdx_license + or effective_license_expression_before != existing_component.effective_license_expression + or effective_multiple_licenses_before != existing_component.effective_multiple_licenses + or evaluation_result_before != existing_component.evaluation_result + ): + existing_component.last_change = timezone.now() + + set_effective_license(existing_component) + if ( + not existing_component.manual_concluded_license_name + or existing_component.manual_concluded_license_name == NO_LICENSE_INFORMATION + ): + concluded_license_applicator.apply_concluded_license(existing_component) + set_effective_license(existing_component) + + apply_licenselynx(existing_component, spdx_cache) + + clip_fields("licenses", "License_Component", existing_component) + components_updated.append(existing_component) + + license_component_evidences += _process_license_evidences(unsaved_component, existing_component) + + existing_components_dict.pop(unsaved_component.identity_hash) + else: + unsaved_component.product = vulnerability_check.product + unsaved_component.branch = vulnerability_check.branch + unsaved_component.origin_service = vulnerability_check.service + unsaved_component.upload_filename = vulnerability_check.filename + + unsaved_component.import_last_seen = timezone.now() + unsaved_component.last_change = timezone.now() + + set_effective_license(unsaved_component) + concluded_license_applicator.apply_concluded_license(unsaved_component) + set_effective_license(unsaved_component) + + apply_licenselynx(unsaved_component, spdx_cache) + + clip_fields("licenses", "License_Component", unsaved_component) + components_new.append(unsaved_component) + + processed_hashes.add(unsaved_component.identity_hash) + + License_Component.objects.bulk_update( + components_updated, + [ + "component_purl", + "component_purl_type", + "component_cpe", + "component_dependencies", + "component_cyclonedx_bom_link", + "imported_declared_license_name", + "imported_declared_spdx_license", + "imported_declared_license_expression", + "imported_declared_non_spdx_license", + "imported_declared_multiple_licenses", + "imported_concluded_license_name", + "imported_concluded_spdx_license", + "imported_concluded_license_expression", + "imported_concluded_non_spdx_license", + "imported_concluded_multiple_licenses", + "manual_concluded_license_name", + "manual_concluded_spdx_license", + "manual_concluded_license_expression", + "manual_concluded_non_spdx_license", + "manual_concluded_comment", + "effective_license_name", + "effective_spdx_license", + "effective_license_expression", + "effective_non_spdx_license", + "effective_multiple_licenses", + "evaluation_result", + "numerical_evaluation_result", + "import_last_seen", + "last_change", + ], + SBOM_BULK_BATCH_SIZE, ) - return observations_new, observations_updated, len(observations_resolved), scanner + inserted_components = License_Component.objects.bulk_create(components_new, SBOM_BULK_BATCH_SIZE) + for inserted_component in inserted_components: + license_component_evidences += _process_license_evidences(inserted_component, inserted_component) + License_Component_Evidence.objects.bulk_create(license_component_evidences, SBOM_BULK_BATCH_SIZE) -def _prepare_imported_observation( - import_parameters: ImportParameters, imported_observation: Observation -) -> None: + components_deleted = len(existing_components_dict) + license_component_ids: list[int] = [] + for existing_component in existing_components_dict.values(): + license_component_ids.append(existing_component.pk) + License_Component.objects.filter(pk__in=license_component_ids).delete() + + if len(components_new) == 0 and len(components_updated) == 0 and components_deleted == 0: + vulnerability_check.last_import_licenses_new = None + vulnerability_check.last_import_licenses_updated = None + vulnerability_check.last_import_licenses_deleted = None + else: + vulnerability_check.last_import_licenses_new = len(components_new) + vulnerability_check.last_import_licenses_updated = len(components_updated) + vulnerability_check.last_import_licenses_deleted = components_deleted + + if scanner: + vulnerability_check.scanner = scanner + vulnerability_check.save() + + apply_license_policy_product(spdx_cache, vulnerability_check.product, vulnerability_check.branch) + + return len(components_new), len(components_updated), components_deleted + + +def _process_license_evidences( + source_component: License_Component, target_component: License_Component +) -> list[License_Component_Evidence]: + license_component_evidences = [] + + if source_component.unsaved_evidences: + for unsaved_evidence in source_component.unsaved_evidences: + evidence = License_Component_Evidence( + license_component=target_component, + name=unsaved_evidence[0], + evidence=unsaved_evidence[1], + ) + clip_fields("licenses", "License_Component_Evidence", evidence) + license_component_evidences.append(evidence) + + return license_component_evidences + + +def _prepare_imported_observation(import_parameters: ImportParameters, imported_observation: Observation) -> None: imported_observation.product = import_parameters.product imported_observation.branch = import_parameters.branch imported_observation.parser = import_parameters.parser if not imported_observation.scanner: imported_observation.scanner = import_parameters.parser.name imported_observation.upload_filename = import_parameters.filename - imported_observation.api_configuration_name = ( - import_parameters.api_configuration_name - ) + imported_observation.api_configuration_name = import_parameters.api_configuration_name imported_observation.import_last_seen = timezone.now() + imported_observation.origin_service = import_parameters.service if import_parameters.service: - imported_observation.origin_service_name = import_parameters.service - service = Service.objects.get_or_create( - product=import_parameters.product, name=import_parameters.service - )[0] - imported_observation.origin_service = service + imported_observation.origin_service_name = import_parameters.service.name if import_parameters.docker_image_name_tag: - imported_observation.origin_docker_image_name_tag = ( - import_parameters.docker_image_name_tag - ) + imported_observation.origin_docker_image_name_tag = import_parameters.docker_image_name_tag if import_parameters.endpoint_url: imported_observation.origin_endpoint_url = import_parameters.endpoint_url if import_parameters.kubernetes_cluster: - imported_observation.origin_kubernetes_cluster = ( - import_parameters.kubernetes_cluster - ) + imported_observation.origin_kubernetes_cluster = import_parameters.kubernetes_cluster def _process_current_observation( - imported_observation: Observation, observation_before: Observation + imported_observation: Observation, observation_before: Observation, settings: Settings ) -> None: # Set data in the current observation from the new observation observation_before.title = imported_observation.title observation_before.description = imported_observation.description observation_before.recommendation = imported_observation.recommendation - observation_before.scanner_observation_id = ( - imported_observation.scanner_observation_id - ) + observation_before.scanner_observation_id = imported_observation.scanner_observation_id observation_before.vulnerability_id = imported_observation.vulnerability_id + observation_before.vulnerability_id_aliases = imported_observation.vulnerability_id_aliases observation_before.cvss3_score = imported_observation.cvss3_score observation_before.cvss3_vector = imported_observation.cvss3_vector observation_before.cvss4_score = imported_observation.cvss4_score @@ -375,9 +575,7 @@ def _process_current_observation( observation_before.found = imported_observation.found observation_before.scanner = imported_observation.scanner - observation_before.origin_component_dependencies = ( - imported_observation.origin_component_dependencies - ) + observation_before.origin_component_dependencies = imported_observation.origin_component_dependencies previous_severity = observation_before.current_severity observation_before.parser_severity = imported_observation.parser_severity @@ -390,59 +588,51 @@ def _process_current_observation( # Reopen the current observation if it is resolved, # leave the status as is otherwise. if observation_before.parser_status == Status.STATUS_RESOLVED: - observation_before.parser_status = _get_initial_status( - observation_before.product - ) + observation_before.parser_status = _get_initial_status(observation_before.product) observation_before.current_status = get_current_status(observation_before) if observation_before.current_status == Status.STATUS_RISK_ACCEPTED: if previous_status != Status.STATUS_RISK_ACCEPTED: - observation_before.risk_acceptance_expiry_date = ( - calculate_risk_acceptance_expiry_date(observation_before.product) + observation_before.risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date( + observation_before.product ) else: observation_before.risk_acceptance_expiry_date = None - epss_apply_observation(observation_before) + observation_before.fix_available = imported_observation.fix_available + observation_before.update_impact_score = imported_observation.update_impact_score + + apply_epss(observation_before) + apply_exploit_information(observation_before, settings) observation_before.import_last_seen = timezone.now() observation_before.save() observation_before.references.all().delete() if imported_observation.unsaved_references: - for reference in imported_observation.unsaved_references: + for unsaved_reference in imported_observation.unsaved_references: reference = Reference( observation=observation_before, - url=reference, + url=unsaved_reference, ) clip_fields("core", "Reference", reference) reference.save() observation_before.evidences.all().delete() if imported_observation.unsaved_evidences: - for evidence in imported_observation.unsaved_evidences: + for unsaved_evidence in imported_observation.unsaved_evidences: evidence = Evidence( observation=observation_before, - name=evidence[0], - evidence=evidence[1], + name=unsaved_evidence[0], + evidence=unsaved_evidence[1], ) clip_fields("core", "Evidence", evidence) evidence.save() # Write observation log if status or severity has been changed - if ( - previous_status != observation_before.current_status - or previous_severity != observation_before.current_severity - ): - status = ( - observation_before.current_status - if previous_status != observation_before.current_status - else "" - ) - + if previous_status != observation_before.current_status or previous_severity != observation_before.current_severity: + status = observation_before.current_status if previous_status != observation_before.current_status else "" severity = ( - imported_observation.current_severity - if previous_severity != observation_before.current_severity - else "" + observation_before.current_severity if previous_severity != observation_before.current_severity else "" ) create_observation_log( @@ -456,13 +646,11 @@ def _process_current_observation( ) -def _process_new_observation(imported_observation: Observation) -> None: +def _process_new_observation(imported_observation: Observation, settings: Settings) -> None: imported_observation.current_severity = get_current_severity(imported_observation) if not imported_observation.parser_status: - imported_observation.parser_status = _get_initial_status( - imported_observation.product - ) + imported_observation.parser_status = _get_initial_status(imported_observation.product) imported_observation.current_status = get_current_status(imported_observation) @@ -473,24 +661,25 @@ def _process_new_observation(imported_observation: Observation) -> None: ) # Observation has not been imported before, so it is a new one - epss_apply_observation(imported_observation) + apply_epss(imported_observation) + apply_exploit_information(imported_observation, settings) imported_observation.save() if imported_observation.unsaved_references: - for reference in imported_observation.unsaved_references: + for unsaved_reference in imported_observation.unsaved_references: reference = Reference( observation=imported_observation, - url=reference, + url=unsaved_reference, ) clip_fields("core", "Reference", reference) reference.save() if imported_observation.unsaved_evidences: - for evidence in imported_observation.unsaved_evidences: + for unsaved_evidence in imported_observation.unsaved_evidences: evidence = Evidence( observation=imported_observation, - name=evidence[0], - evidence=evidence[1], + name=unsaved_evidence[0], + evidence=unsaved_evidence[1], ) clip_fields("core", "Evidence", evidence) evidence.save() @@ -507,7 +696,7 @@ def _process_new_observation(imported_observation: Observation) -> None: def _resolve_unimported_observations( - observations_before: dict[str, Observation] + observations_before: dict[str, Observation], ) -> set[Observation]: # All observations that are still in observations_before are not in the imported scan # and seem to have been resolved. @@ -520,7 +709,7 @@ def _resolve_unimported_observations( new_status = get_current_status(observation) if old_status != new_status: - if old_status == Status.STATUS_OPEN: + if old_status in Status.STATUS_ACTIVE: observations_resolved.add(observation) observation.current_status = new_status diff --git a/backend/application/import_observations/services/parser_detector.py b/backend/application/import_observations/services/parser_detector.py index 239ff192b..fb31dc888 100644 --- a/backend/application/import_observations/services/parser_detector.py +++ b/backend/application/import_observations/services/parser_detector.py @@ -19,11 +19,7 @@ def detect_parser(file: File) -> tuple[Parser, BaseParser, Any]: - if file.name and not ( - file.name.endswith(".csv") - or file.name.endswith(".json") - or file.name.endswith(".sarif") - ): + if file.name and not (file.name.endswith(".csv") or file.name.endswith(".json") or file.name.endswith(".sarif")): raise ValidationError("File is not CSV, JSON or SARIF") if file.name and file.name.endswith(".csv"): @@ -33,9 +29,7 @@ def detect_parser(file: File) -> tuple[Parser, BaseParser, Any]: content = content.decode("utf-8") reader = DictReader(StringIO(content), delimiter=",", quotechar='"') except Exception: - raise ValidationError( # pylint: disable=raise-missing-from - "File is not valid CSV" - ) + raise ValidationError("File is not valid CSV") # pylint: disable=raise-missing-from # The Exception itself is not relevant and must not be re-raised rows = [] @@ -50,9 +44,7 @@ def detect_parser(file: File) -> tuple[Parser, BaseParser, Any]: try: data = load(file) except Exception: - raise ValidationError( # pylint: disable=raise-missing-from - "File is not valid JSON" - ) + raise ValidationError("File is not valid JSON") # pylint: disable=raise-missing-from # The DjangoValidationError itself is not relevant and must not be re-raised if data: @@ -71,9 +63,7 @@ def instanciate_parser(parser: Parser) -> BaseParser: return parser_instance -def _get_parser( - data: Any, filetype: str -) -> tuple[Optional[Parser], Optional[BaseParser]]: +def _get_parser(data: Any, filetype: str) -> tuple[Optional[Parser], Optional[BaseParser]]: parsers = Parser.objects.filter(source=Parser_Source.SOURCE_FILE).order_by("name") for parser in parsers: try: @@ -84,9 +74,7 @@ def _get_parser( if not isinstance(parser_instance, BaseFileParser): raise ParserError(f"{parser.name} isn't a file parser") - if parser_instance.get_filetype() == filetype and parser_instance.check_format( - data - ): + if parser_instance.get_filetype() == filetype and parser_instance.check_format(data): return parser, parser_instance return None, None diff --git a/backend/application/import_observations/services/parser_registry.py b/backend/application/import_observations/services/parser_registry.py index c00a69964..c239098bf 100644 --- a/backend/application/import_observations/services/parser_registry.py +++ b/backend/application/import_observations/services/parser_registry.py @@ -2,6 +2,7 @@ import logging from typing import Optional, Type +from application.import_observations.exceptions import ParserError from application.import_observations.models import Parser from application.import_observations.parsers.base_parser import ( BaseAPIParser, @@ -21,15 +22,9 @@ def register_parser(module_name: str, class_name: str) -> None: name = parser_class.get_name() my_type = parser_class.get_type() + sbom = parser_class.sbom() if hasattr(parser_class, "sbom") else False - source = Parser_Source.SOURCE_UNKNOWN - for base in parser_class.__bases__: - if base is BaseAPIParser: - source = Parser_Source.SOURCE_API - break - if base is BaseFileParser: - source = Parser_Source.SOURCE_FILE - break + source = _get_source(parser_class) parser = get_parser_by_module_and_class(module_name, class_name) if parser: @@ -43,6 +38,9 @@ def register_parser(module_name: str, class_name: str) -> None: if parser.source != source: parser.source = source changed = True + if hasattr(parser_class, "sbom") and parser.sbom != sbom: + parser.sbom = sbom + changed = True if changed: parser.save() else: @@ -55,6 +53,18 @@ def register_parser(module_name: str, class_name: str) -> None: ).save() +def _get_source(parser_class: Type[BaseParser]) -> str: + source = Parser_Source.SOURCE_OTHER + for base in parser_class.__bases__: + if base is BaseAPIParser: + source = Parser_Source.SOURCE_API + break + if base is BaseFileParser: + source = Parser_Source.SOURCE_FILE + break + return source + + def create_manual_parser() -> None: try: Parser.objects.get(type=Parser_Type.TYPE_MANUAL) @@ -76,24 +86,16 @@ def create_manual_parser() -> None: def get_parser_class_from_parser_name(name: str) -> Optional[Type[BaseParser]]: parser = Parser.objects.get(name=name) - return get_parser_class_from_module_class_names( - parser.module_name, parser.class_name - ) + return get_parser_class_from_module_class_names(parser.module_name, parser.class_name) -def get_parser_class_from_module_class_names( - module_name: str, class_name: str -) -> Type[BaseParser]: - module = importlib.import_module( # nosemgrep - f"application.import_observations.parsers.{module_name}.parser" - ) +def get_parser_class_from_module_class_names(module_name: str, class_name: str) -> Type[BaseParser]: + module = importlib.import_module(f"application.import_observations.parsers.{module_name}.parser") # nosemgrep # nosemgrep because of rule python.lang.security.audit.non-literal-import.non-literal-import # This is the price you pay for a dynamic parser registry. We accept the risk. parser_class = getattr(module, class_name) if not issubclass(parser_class, BaseParser): - raise Exception( # pylint: disable=broad-exception-raised - f"{class_name} is not a subclass of BaseParser" - ) + raise ParserError(f"{class_name} is not a subclass of BaseParser") return parser_class diff --git a/backend/application/import_observations/tasks.py b/backend/application/import_observations/tasks.py deleted file mode 100644 index 1e9f87cd8..000000000 --- a/backend/application/import_observations/tasks.py +++ /dev/null @@ -1,68 +0,0 @@ -import logging - -from huey import crontab -from huey.contrib.djhuey import db_periodic_task, lock_task - -from application.commons import settings_static -from application.commons.models import Settings -from application.commons.services.tasks import handle_task_exception -from application.import_observations.models import Api_Configuration -from application.import_observations.services.import_observations import ( - ApiImportParameters, - api_import_observations, -) - -logger = logging.getLogger("secobserve.import_observations") - - -@db_periodic_task( - crontab( - minute=settings_static.api_import_crontab_minute, - hour=settings_static.api_import_crontab_hour, - ) -) -@lock_task("api_import") -def task_api_import() -> None: - logger.info("--- API import - start ---") - - try: - settings = Settings.load() - if not settings.feature_automatic_api_import: - logger.info("API import is disabled in settings") - return - - api_configurations = Api_Configuration.objects.filter( - automatic_import_enabled=True - ) - for api_configuration in api_configurations: - try: - api_import_parameters = ApiImportParameters( - api_configuration=api_configuration, - branch=api_configuration.automatic_import_branch, - service=api_configuration.automatic_import_service, - docker_image_name_tag=api_configuration.automatic_import_docker_image_name_tag, - endpoint_url=api_configuration.automatic_import_endpoint_url, - kubernetes_cluster=api_configuration.automatic_import_kubernetes_cluster, - ) - ( - observations_new, - observations_updated, - observations_resolved, - ) = api_import_observations(api_import_parameters) - logger.info( - "API import - %s: %s new, %s updated, %s resolved", - api_configuration, - observations_new, - observations_updated, - observations_resolved, - ) - except Exception as e: - logger.warning( - "API import - %s: failed with exception", api_configuration - ) - handle_task_exception(e) - - except Exception as e: - handle_task_exception(e) - - logger.info("--- API import - finished ---") diff --git a/backend/application/import_observations/types.py b/backend/application/import_observations/types.py index b920ffca8..0fb182b2b 100644 --- a/backend/application/import_observations/types.py +++ b/backend/application/import_observations/types.py @@ -1,13 +1,20 @@ +from typing import Any, Optional + +from semver import Version + + class Parser_Source: SOURCE_API = "API" SOURCE_FILE = "File" SOURCE_MANUAL = "Manual" + SOURCE_OTHER = "Other" SOURCE_UNKNOWN = "Unknown" SOURCE_CHOICES = [ (SOURCE_API, SOURCE_API), (SOURCE_FILE, SOURCE_FILE), (SOURCE_MANUAL, SOURCE_MANUAL), + (SOURCE_OTHER, SOURCE_OTHER), (SOURCE_UNKNOWN, SOURCE_UNKNOWN), ] @@ -37,3 +44,123 @@ class Parser_Type: class Parser_Filetype: FILETYPE_CSV = "CSV" FILETYPE_JSON = "JSON" + + +class ExtendedSemVer: + def __init__(self) -> None: + self.prefix: Optional[int] = None + self.semver: Version + + @classmethod + def parse(cls, version: Optional[str]) -> Optional["ExtendedSemVer"]: + if not version: + return None + + instance = cls() + + elements = version.split(":") + if len(elements) >= 2: + prefix = elements[0] + if not prefix.isdigit(): + return None + instance.prefix = int(prefix) + + suffix = ":".join(elements[1:]) + else: + suffix = version + + suffix_semver = instance._get_semver(suffix) + if not suffix_semver: + return None + instance.semver = suffix_semver + + return instance + + @classmethod + def _get_semver(cls, suffix: str) -> Optional[Version]: + if not suffix: + return None + + # Go packages sometimes have a "v" prefix + if suffix.startswith("v"): + suffix = suffix[1:] + + if suffix == "0": + return Version.parse("0.0.0") + + if len(suffix.split("-")) == 2: + prefix = suffix.split("-")[0] + if len(prefix.split(".")) == 2: + prefix = f"{prefix}.0" + suffix = f"{prefix}-{suffix.split('-')[1]}" + + if len(suffix.split(".")) == 2: + suffix = f"{suffix}.0" + + if not Version.is_valid(suffix): + return None + + return Version.parse(suffix) + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __gt__(self, other: Any) -> bool: + if not isinstance(other, self.__class__): + return False + + if (self.prefix is None and other.prefix is not None) or (self.prefix is not None and other.prefix is None): + return False + + if self.prefix is not None and other.prefix is not None: + if self.prefix < other.prefix: + return False + if self.prefix > other.prefix: + return True + + return self.semver > other.semver + + def __ge__(self, other: Any) -> bool: + if not isinstance(other, self.__class__): + return False + + if (self.prefix is None and other.prefix is not None) or (self.prefix is not None and other.prefix is None): + return False + + if self.prefix is not None and other.prefix is not None: + if self.prefix < other.prefix: + return False + if self.prefix > other.prefix: + return True + + return self.semver >= other.semver + + def __lt__(self, other: Any) -> bool: + if not isinstance(other, self.__class__): + return False + + if (self.prefix is None and other.prefix is not None) or (self.prefix is not None and other.prefix is None): + return False + + if self.prefix is not None and other.prefix is not None: + if self.prefix > other.prefix: + return False + if self.prefix < other.prefix: + return True + + return self.semver < other.semver + + def __le__(self, other: Any) -> bool: + if not isinstance(other, self.__class__): + return False + + if (self.prefix is None and other.prefix is not None) or (self.prefix is not None and other.prefix is None): + return False + + if self.prefix is not None and other.prefix is not None: + if self.prefix > other.prefix: + return False + if self.prefix < other.prefix: + return True + + return self.semver <= other.semver diff --git a/backend/application/issue_tracker/apps.py b/backend/application/issue_tracker/apps.py index c373f8587..b970e2a01 100644 --- a/backend/application/issue_tracker/apps.py +++ b/backend/application/issue_tracker/apps.py @@ -5,7 +5,7 @@ class CoreConfig(AppConfig): name = "application.issue_tracker" verbose_name = "Issue Tracker" - def ready(self): + def ready(self) -> None: try: import application.issue_tracker.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import except ImportError: diff --git a/backend/application/issue_tracker/issue_trackers/base_issue_tracker.py b/backend/application/issue_tracker/issue_trackers/base_issue_tracker.py index e2cbbd798..150aa7ffe 100644 --- a/backend/application/issue_tracker/issue_trackers/base_issue_tracker.py +++ b/backend/application/issue_tracker/issue_trackers/base_issue_tracker.py @@ -26,9 +26,7 @@ def update_issue(self, observation: Observation, issue: Issue) -> None: def close_issue(self, observation: Observation, issue: Issue) -> None: raise NotImplementedError("close_issue() must be overridden") - def close_issue_for_deleted_observation( - self, product: Product, issue: Issue - ) -> None: + def close_issue_for_deleted_observation(self, product: Product, issue: Issue) -> None: raise NotImplementedError("close_issue() must be overridden") def get_frontend_issue_url(self, product: Product, issue_id: str) -> str: @@ -80,9 +78,7 @@ def _get_origin(self, observation: Observation) -> str: origin += f"{observation.origin_cloud_resource}" return origin - def _get_description_for_deleted_observation( - self, description: Optional[str] - ) -> str: + def _get_description_for_deleted_observation(self, description: Optional[str]) -> str: return f"**--- Observation has been deleted ---**\n\n{description}" def _normalize_base_url(self, base_url: str) -> str: diff --git a/backend/application/issue_tracker/issue_trackers/github_issue_tracker.py b/backend/application/issue_tracker/issue_trackers/github_issue_tracker.py index ce99c867e..c8e153089 100644 --- a/backend/application/issue_tracker/issue_trackers/github_issue_tracker.py +++ b/backend/application/issue_tracker/issue_trackers/github_issue_tracker.py @@ -96,9 +96,7 @@ def close_issue(self, observation: Observation, issue: Issue) -> None: ) response.raise_for_status() - def close_issue_for_deleted_observation( - self, product: Product, issue: Issue - ) -> None: + def close_issue_for_deleted_observation(self, product: Product, issue: Issue) -> None: data: dict[str, Any] = { "body": self._get_description_for_deleted_observation(issue.description), "state": "closed", @@ -115,9 +113,7 @@ def close_issue_for_deleted_observation( response.raise_for_status() def get_frontend_issue_url(self, product: Product, issue_id: str) -> str: - return ( - f"https://github.com/{product.issue_tracker_project_id}/issues/{issue_id}" - ) + return f"https://github.com/{product.issue_tracker_project_id}/issues/{issue_id}" def _get_issue_tracker_base_url(self, product: Product) -> str: return f"https://api.github.com/repos/{product.issue_tracker_project_id}/issues" diff --git a/backend/application/issue_tracker/issue_trackers/gitlab_issue_tracker.py b/backend/application/issue_tracker/issue_trackers/gitlab_issue_tracker.py index cf5fe6207..6175d19c4 100644 --- a/backend/application/issue_tracker/issue_trackers/gitlab_issue_tracker.py +++ b/backend/application/issue_tracker/issue_trackers/gitlab_issue_tracker.py @@ -90,13 +90,9 @@ def close_issue(self, observation: Observation, issue: Issue) -> None: ) response.raise_for_status() - def close_issue_for_deleted_observation( - self, product: Product, issue: Issue - ) -> None: + def close_issue_for_deleted_observation(self, product: Product, issue: Issue) -> None: data = { - "description": self._get_description_for_deleted_observation( - issue.description - ), + "description": self._get_description_for_deleted_observation(issue.description), "confidential": True, "state_event": "close", } diff --git a/backend/application/issue_tracker/issue_trackers/jira_issue_tracker.py b/backend/application/issue_tracker/issue_trackers/jira_issue_tracker.py index bf0d2a8f9..17e3c4bb2 100644 --- a/backend/application/issue_tracker/issue_trackers/jira_issue_tracker.py +++ b/backend/application/issue_tracker/issue_trackers/jira_issue_tracker.py @@ -65,13 +65,10 @@ def update_issue(self, observation: Observation, issue: Issue) -> None: if ( jira_issue.fields.status - and str(jira_issue.fields.status) - == observation.product.issue_tracker_status_closed + and str(jira_issue.fields.status) == observation.product.issue_tracker_status_closed and observation.issue_tracker_jira_initial_status ): - self.jira.transition_issue( - jira_issue, observation.issue_tracker_jira_initial_status - ) + self.jira.transition_issue(jira_issue, observation.issue_tracker_jira_initial_status) def close_issue(self, observation: Observation, issue: Issue) -> None: if not observation.issue_tracker_issue_id: @@ -88,19 +85,13 @@ def close_issue(self, observation: Observation, issue: Issue) -> None: description=description, ) - self.jira.transition_issue( - jira_issue, observation.product.issue_tracker_status_closed - ) + self.jira.transition_issue(jira_issue, observation.product.issue_tracker_status_closed) - def close_issue_for_deleted_observation( - self, product: Product, issue: Issue - ) -> None: + def close_issue_for_deleted_observation(self, product: Product, issue: Issue) -> None: jira_issue = self._get_jira_issue(issue.id) if jira_issue: - description = self._get_description_for_deleted_observation( - jira_issue.fields.description - ) + description = self._get_description_for_deleted_observation(jira_issue.fields.description) description = description.replace("**", "*") jira_issue.update( diff --git a/backend/application/issue_tracker/services/issue_tracker.py b/backend/application/issue_tracker/services/issue_tracker.py index 0d7d7f956..4053bee40 100644 --- a/backend/application/issue_tracker/services/issue_tracker.py +++ b/backend/application/issue_tracker/services/issue_tracker.py @@ -3,8 +3,7 @@ from huey.contrib.djhuey import db_task, task from application.access_control.models import User -from application.commons.services.global_request import get_current_user -from application.commons.services.tasks import handle_task_exception +from application.access_control.services.current_user import get_current_user from application.core.models import Observation, Product from application.core.types import Severity, Status from application.issue_tracker.issue_trackers.base_issue_tracker import ( @@ -19,11 +18,10 @@ ) from application.issue_tracker.issue_trackers.jira_issue_tracker import JiraIssueTracker from application.issue_tracker.types import Issue_Tracker +from application.notifications.services.tasks import handle_task_exception -def push_observations_to_issue_tracker( - product: Product, observations: set[Observation] -) -> None: +def push_observations_to_issue_tracker(product: Product, observations: set[Observation]) -> None: if product.issue_tracker_active: for observation in observations: push_observation_to_issue_tracker(observation, get_current_user()) @@ -44,7 +42,7 @@ def push_observation_to_issue_tracker(observation: Observation, user: User) -> N observation.issue_tracker_issue_id = "" observation.save() - if observation.current_status == Status.STATUS_OPEN: + if observation.current_status in Status.STATUS_ACTIVE: if observation.product.issue_tracker_minimum_severity: numerical_minimum_severity = Severity.NUMERICAL_SEVERITIES.get( observation.product.issue_tracker_minimum_severity, 99 @@ -74,9 +72,7 @@ def push_observation_to_issue_tracker(observation: Observation, user: User) -> N @task() -def push_deleted_observation_to_issue_tracker( - product: Product, issue_id: Optional[str], user: User -) -> None: +def push_deleted_observation_to_issue_tracker(product: Product, issue_id: Optional[str], user: User) -> None: try: if product.issue_tracker_active and issue_id: issue_tracker = issue_tracker_factory(product) @@ -87,9 +83,7 @@ def push_deleted_observation_to_issue_tracker( handle_task_exception(e, user) -def issue_tracker_factory( - product: Product, with_communication: bool = True -) -> BaseIssueTracker: +def issue_tracker_factory(product: Product, with_communication: bool = True) -> BaseIssueTracker: if product.issue_tracker_type == Issue_Tracker.ISSUE_TRACKER_GITHUB: return GitHubIssueTracker() @@ -102,13 +96,9 @@ def issue_tracker_factory( raise ValueError(f"Unknown issue tracker type: {product.issue_tracker_type}") -def _get_issue( - observation: Observation, issue_tracker: BaseIssueTracker -) -> Optional[Issue]: +def _get_issue(observation: Observation, issue_tracker: BaseIssueTracker) -> Optional[Issue]: if observation.issue_tracker_issue_id: - issue = issue_tracker.get_issue( - observation.product, observation.issue_tracker_issue_id - ) + issue = issue_tracker.get_issue(observation.product, observation.issue_tracker_issue_id) else: issue = None return issue diff --git a/backend/application/licenses/api/filters.py b/backend/application/licenses/api/filters.py index 64f68b4e3..7377e3028 100644 --- a/backend/application/licenses/api/filters.py +++ b/backend/application/licenses/api/filters.py @@ -1,5 +1,7 @@ from datetime import timedelta +from typing import Any +from django.db.models import QuerySet from django.utils import timezone from django_filters import ( BooleanFilter, @@ -14,6 +16,7 @@ from application.commons.api.extended_ordering_filter import ExtendedOrderingFilter from application.commons.types import Age_Choices from application.licenses.models import ( + Concluded_License, License, License_Component, License_Component_Evidence, @@ -28,27 +31,57 @@ from application.licenses.queries.license_group import get_license_groups -class LicenseComponentFilter(FilterSet): - component_name_version = CharFilter( - field_name="component_name_version", lookup_expr="icontains" - ) - license_name = CharFilter(field_name="license_name", lookup_expr="icontains") - license_name_exact = CharFilter(field_name="license_name") - license_spdx_id = CharFilter(field_name="license__spdx_id", lookup_expr="icontains") - license_expression = CharFilter( - field_name="license_expression", lookup_expr="icontains" +class ConcludedLicenseFilter(FilterSet): + component_name = CharFilter(field_name="component_name", lookup_expr="icontains") + component_version = CharFilter(field_name="component_version", lookup_expr="icontains") + manual_concluded_license_expression = CharFilter( + field_name="manual_concluded_license_expression", lookup_expr="icontains" ) - non_spdx_license = CharFilter( - field_name="non_spdx_license", lookup_expr="icontains" + manual_concluded_non_spdx_license = CharFilter( + field_name="manual_concluded_non_spdx_license", lookup_expr="icontains" ) - age = ChoiceFilter( - field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES + age = ChoiceFilter(field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES) + + def get_age(self, queryset: QuerySet, name: Any, value: Any) -> QuerySet: # pylint: disable=unused-argument + days = Age_Choices.get_days_from_age(value) + + if days is None: + return queryset + + today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) + time_threshold = today - timedelta(days=int(days)) + return queryset.filter(last_updated__gte=time_threshold) + + ordering = ExtendedOrderingFilter( + # tuple-mapping retains order + fields=( + ("product__name", "product_data.name"), + (("component_name", "component_version", "component_purl_type"), "component_name_version"), + ("manual_concluded_spdx_license__spdx_id", "manual_concluded_spdx_license_id"), + ("manual_concluded_license_expression", "manual_concluded_license_expression"), + ("manual_concluded_non_spdx_license", "manual_concluded_non_spdx_license"), + ("user__full_name", "user_data.full_name"), + ("last_updated", "last_updated"), + ) ) - branch_name = CharFilter(field_name="branch__name") - def get_age(self, queryset, field_name, value): # pylint: disable=unused-argument - # field_name is used as a positional argument + class Meta: + model = Concluded_License + fields = "__all__" + +class LicenseComponentFilter(FilterSet): + component_name_version = CharFilter(field_name="component_name_version", lookup_expr="icontains") + effective_license_name = CharFilter(field_name="effective_license_name", lookup_expr="icontains") + effective_license_name_exact = CharFilter(field_name="effective_license_name") + effective_license_spdx_id = CharFilter(field_name="effective_spdx_license__spdx_id", lookup_expr="icontains") + effective_license_expression = CharFilter(field_name="effective_license_expression", lookup_expr="icontains") + effective_non_spdx_license = CharFilter(field_name="effective_non_spdx_license", lookup_expr="icontains") + age = ChoiceFilter(field_name="age", method="get_age", choices=Age_Choices.AGE_CHOICES) + branch_name_exact = CharFilter(field_name="branch__name") + manual_concluded_comment = CharFilter(field_name="manual_concluded_comment", lookup_expr="icontains") + + def get_age(self, queryset: QuerySet, name: Any, value: Any) -> QuerySet: # pylint: disable=unused-argument days = Age_Choices.get_days_from_age(value) if days is None: @@ -61,46 +94,49 @@ def get_age(self, queryset, field_name, value): # pylint: disable=unused-argume ordering = ExtendedOrderingFilter( # tuple-mapping retains order fields=( - ("license__spdx_id", "license_data.spdx_id"), - ("license_expression", "license_expression"), - ("non_spdx_license", "non_spdx_license"), + ("effective_spdx_license__spdx_id", "license_data.spdx_id"), + ("effective_license_expression", "effective_license_expression"), + ("effective_non_spdx_license", "effective_non_spdx_license"), ( - ( - "license_name", - "numerical_evaluation_result", - "component_name_version", - ), - "license_name", + ("effective_license_name", "numerical_evaluation_result", "component_name_version"), + "effective_license_name", + ), + (("numerical_evaluation_result", "effective_license_name", "component_name_version"), "evaluation_result"), + ( + ("branch__name", "effective_license_name", "numerical_evaluation_result", "component_name_version"), + "branch_name", ), + ("component_name_version", "component_name_version"), + ("component_name_version", "component_name_version_type"), ( ( + "component_purl_type", "numerical_evaluation_result", - "license_name", + "effective_license_name", "component_name_version", ), - "evaluation_result", + "component_purl_type", ), ( ( - "branch__name", - "license_name", + "origin_service__name", + "effective_license_name", "numerical_evaluation_result", "component_name_version", ), - "branch_name", + "origin_service_name", ), - ("component_name_version", "component_name_version"), ( ( - "component_purl_type", + "manual_concluded_comment", + "effective_license_name", "numerical_evaluation_result", - "license_name", "component_name_version", ), - "component_purl_type", + "manual_concluded_comment", ), ("last_change", "last_change"), - ), + ) ) class Meta: @@ -108,13 +144,15 @@ class Meta: fields = [ "product", "branch", - "license_name", - "license_spdx_id", - "license_expression", - "non_spdx_license", + "effective_license_name", + "effective_license_spdx_id", + "effective_license_expression", + "effective_non_spdx_license", "evaluation_result", "component_name_version", "component_purl_type", + "origin_service", + "manual_concluded_comment", ] @@ -123,7 +161,7 @@ class LicenseComponentEvidenceFilter(FilterSet): ordering = OrderingFilter( # tuple-mapping retains order - fields=(("name", "name"), ("license_component", "license_component")), + fields=(("name", "name"), ("license_component", "license_component")) ) class Meta: @@ -134,24 +172,20 @@ class Meta: class LicenseFilter(FilterSet): spdx_id = CharFilter(field_name="spdx_id", lookup_expr="icontains") name = CharFilter(field_name="name", lookup_expr="icontains") - exclude_license_group = NumberFilter( - field_name="exclude_license_group", method="get_exclude_license_group" - ) - exclude_license_policy = NumberFilter( - field_name="exclude_license_policy", method="get_exclude_license_policy" - ) + exclude_license_group = NumberFilter(field_name="exclude_license_group", method="get_exclude_license_group") + exclude_license_policy = NumberFilter(field_name="exclude_license_policy", method="get_exclude_license_policy") license_groups = ModelMultipleChoiceFilter(queryset=License.objects.none()) def get_exclude_license_group( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, queryset: QuerySet, name: Any, value: Any # pylint: disable=unused-argument + ) -> QuerySet: if value is not None: return queryset.exclude(license_groups__id=value) return queryset def get_exclude_license_policy( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, queryset: QuerySet, name: Any, value: Any # pylint: disable=unused-argument + ) -> QuerySet: if value is not None: return queryset.exclude(license_policy_items__license_policy__id=value) return queryset @@ -163,43 +197,32 @@ def get_exclude_license_policy( ("name", "name"), ("is_osi_approved", "is_osi_approved"), ("is_deprecated", "is_deprecated"), - ), + ) ) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self.filters["license_groups"].queryset = get_license_groups() class Meta: model = License - fields = [ - "spdx_id", - "name", - "is_osi_approved", - "is_deprecated", - "license_groups", - ] + fields = ["spdx_id", "name", "is_osi_approved", "is_deprecated", "license_groups"] class LicenseGroupFilter(FilterSet): name = CharFilter(field_name="name", lookup_expr="icontains") - exclude_license_policy = NumberFilter( - field_name="exclude_license_policy", method="get_exclude_license_policy" - ) + exclude_license_policy = NumberFilter(field_name="exclude_license_policy", method="get_exclude_license_policy") def get_exclude_license_policy( - self, queryset, field_name, value - ): # pylint: disable=unused-argument + self, queryset: QuerySet, name: Any, value: Any # pylint: disable=unused-argument + ) -> QuerySet: if value is not None: return queryset.exclude(license_policy_items__license_policy__id=value) return queryset ordering = OrderingFilter( # tuple-mapping retains order - fields=( - ("name", "name"), - ("is_public", "is_public"), - ), + fields=(("name", "name"), ("is_public", "is_public")) ) class Meta: @@ -218,7 +241,7 @@ class LicenseGroupMemberFilter(FilterSet): ("license_group", "license_group"), ("user", "user"), ("is_manager", "is_manager"), - ), + ) ) class Meta: @@ -236,7 +259,7 @@ class LicenseGroupAuthorizationGroupFilter(FilterSet): ("license_group", "license_group"), ("authorization_group", "authorization_group"), ("is_manager", "is_manager"), - ), + ) ) class Meta: @@ -248,41 +271,29 @@ class LicensePolicyFilter(FilterSet): name = CharFilter(field_name="name", lookup_expr="icontains") is_child = BooleanFilter(field_name="is_child", method="get_is_child") is_not_id = NumberFilter(field_name="is_not_id", method="get_is_not_id") - license = NumberFilter( - field_name="license", method="get_license_policies_with_license" - ) - license_group = NumberFilter( - field_name="license_group", method="get_license_policies_with_license_group" - ) + license = NumberFilter(field_name="license", method="get_license_policies_with_license") + license_group = NumberFilter(field_name="license_group", method="get_license_policies_with_license_group") - def get_is_child( - self, queryset, field_name, value # pylint: disable=unused-argument - ) -> bool: + def get_is_child(self, queryset: QuerySet, name: Any, value: Any) -> QuerySet: # pylint: disable=unused-argument parent_null = not value return queryset.filter(parent__isnull=parent_null) - def get_is_not_id( - self, queryset, field_name, value # pylint: disable=unused-argument - ) -> bool: + def get_is_not_id(self, queryset: QuerySet, name: Any, value: Any) -> QuerySet: # pylint: disable=unused-argument return queryset.exclude(pk=value) def get_license_policies_with_license( - self, queryset, field_name, value # pylint: disable=unused-argument - ) -> bool: + self, queryset: QuerySet, name: Any, value: Any # pylint: disable=unused-argument + ) -> QuerySet: return queryset.filter(license_policy_items__license=value) def get_license_policies_with_license_group( - self, queryset, field_name, value # pylint: disable=unused-argument - ) -> bool: + self, queryset: QuerySet, name: Any, value: Any # pylint: disable=unused-argument + ) -> QuerySet: return queryset.filter(license_policy_items__license_group=value) ordering = ExtendedOrderingFilter( # tuple-mapping retains order - fields=( - ("name", "name"), - (("parent__name", "name"), "parent_name"), - ("is_public", "is_public"), - ), + fields=(("name", "name"), (("parent__name", "name"), "parent_name"), ("is_public", "is_public")) ) class Meta: @@ -291,57 +302,25 @@ class Meta: class LicensePolicyItemFilter(FilterSet): - license_group_name = CharFilter( - field_name="license_group__name", lookup_expr="icontains" - ) + license_group_name = CharFilter(field_name="license_group__name", lookup_expr="icontains") license_spdx_id = CharFilter(field_name="license__spdx_id", lookup_expr="icontains") - license_expression = CharFilter( - field_name="license_expression", lookup_expr="icontains" - ) - non_spdx_license = CharFilter( - field_name="non_spdx_license", lookup_expr="icontains" - ) + license_expression = CharFilter(field_name="license_expression", lookup_expr="icontains") + non_spdx_license = CharFilter(field_name="non_spdx_license", lookup_expr="icontains") ordering = ExtendedOrderingFilter( # tuple-mapping retains order fields=( ("license_policy__name", "license_policy_data.name"), ( - ( - "license_group__name", - "license__spdx_id", - "license_expression", - "non_spdx_license", - ), - "license_group_name", + ("license_group__name", "license__spdx_id", "license_expression", "non_spdx_license"), + "license_group", ), + (("license__spdx_id", "license_group__name", "license_expression", "non_spdx_license"), "spdx_id"), ( - ( - "license__spdx_id", - "license_group__name", - "license_expression", - "non_spdx_license", - ), - "license_spdx_id", - ), - ( - ( - "license_expression", - "license_group__name", - "license__spdx_id", - "non_spdx_license", - ), + ("license_expression", "license_group__name", "license__spdx_id", "non_spdx_license"), "license_expression", ), - ( - ( - "non_spdx_license", - "license_group__name", - "license__spdx_id", - "license_expression", - ), - "non_spdx_license", - ), + (("non_spdx_license", "license_group__name", "license__spdx_id", "license_expression"), "non_spdx_license"), ( ( "numerical_evaluation_result", @@ -352,7 +331,7 @@ class LicensePolicyItemFilter(FilterSet): ), "evaluation_result", ), - ), + ) ) class Meta: @@ -379,7 +358,7 @@ class LicensePolicyMemberFilter(FilterSet): ("license_policy", "license_policy"), ("user", "user"), ("is_manager", "is_manager"), - ), + ) ) class Meta: @@ -397,7 +376,7 @@ class LicensePolicyAuthorizationGroupFilter(FilterSet): ("license_policy", "license_policy"), ("authorization_group", "authorization_group"), ("is_manager", "is_manager"), - ), + ) ) class Meta: diff --git a/backend/application/licenses/api/permissions.py b/backend/application/licenses/api/permissions.py index 521fc82a2..be8d43dc5 100644 --- a/backend/application/licenses/api/permissions.py +++ b/backend/application/licenses/api/permissions.py @@ -1,11 +1,22 @@ +from typing import Any + +from django.contrib.auth.models import AnonymousUser from django.shortcuts import get_object_or_404 from rest_framework.exceptions import NotFound from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView from application.access_control.queries.authorization_group import ( get_authorization_groups, ) +from application.authorization.api.permissions_base import ( + check_object_permission, + check_post_permission, +) +from application.authorization.services.roles_permissions import Permissions from application.licenses.models import ( + Concluded_License, License_Group, License_Group_Authorization_Group_Member, License_Group_Member, @@ -15,14 +26,35 @@ ) +class UserHasConcludedLicensePermission(BasePermission): + def has_permission(self, request: Request, view: APIView) -> bool: + return check_post_permission(request, Concluded_License, "product", Permissions.Concluded_License_Create) + + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: + return check_object_permission( + request=request, + object_to_check=obj, + get_permission=Permissions.Concluded_License_View, + put_permission=Permissions.Concluded_License_Edit, + delete_permission=Permissions.Concluded_License_Delete, + ) + + class UserHasLicenseGroupPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - return not request.user.is_external + user = request.user + if not user: + return False + + if isinstance(user, AnonymousUser): + return False + + return not user.is_external return True - def has_object_permission(self, request, view, obj: License_Group): + def has_object_permission(self, request: Request, view: APIView, obj: License_Group) -> bool: if request.method != "GET": return _has_license_group_manage_permission(request, obj) @@ -30,16 +62,14 @@ def has_object_permission(self, request, view, obj: License_Group): class UserHasLicenseGroupMemberPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - license_group = get_object_or_404( - License_Group, pk=request.data.get("license_group") - ) + license_group = get_object_or_404(License_Group, pk=request.data.get("license_group")) return _has_license_group_manage_permission(request, license_group) return True - def has_object_permission(self, request, view, obj: License_Group_Member): + def has_object_permission(self, request: Request, view: APIView, obj: License_Group_Member) -> bool: if request.method != "GET": return _has_license_group_manage_permission(request, obj.license_group) @@ -47,23 +77,26 @@ def has_object_permission(self, request, view, obj: License_Group_Member): class UserHasLicenseGroupAuthenticationGroupMemberPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - license_group = get_object_or_404( - License_Group, pk=request.data.get("license_group") - ) - - authorization_groups = get_authorization_groups().values_list( - "id", flat=True - ) - if request.data.get("authorization_group") not in authorization_groups: + license_group = get_object_or_404(License_Group, pk=request.data.get("license_group")) + + authorization_group = request.data.get("authorization_group") + if not authorization_group: + raise ValueError("No Authorization_Group provided") + + if not isinstance(authorization_group, int): + raise ValueError("Authorization_Group must be an integer") + + authorization_groups = get_authorization_groups().values_list("id", flat=True) + if authorization_group not in authorization_groups: raise NotFound("Authorization_Group not found.") return _has_license_group_manage_permission(request, license_group) return True - def has_object_permission(self, request, view, obj: License_Group_Member): + def has_object_permission(self, request: Request, view: APIView, obj: License_Group_Member) -> bool: if request.method != "GET": return _has_license_group_manage_permission(request, obj.license_group) @@ -71,13 +104,20 @@ def has_object_permission(self, request, view, obj: License_Group_Member): class UserHasLicensePolicyPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - return not request.user.is_external + user = request.user + if not user: + return False + + if isinstance(user, AnonymousUser): + return False + + return not user.is_external return True - def has_object_permission(self, request, view, obj: License_Policy): + def has_object_permission(self, request: Request, view: APIView, obj: License_Policy) -> bool: if request.method != "GET": return _has_license_policy_manage_permission(request, obj) @@ -85,16 +125,14 @@ def has_object_permission(self, request, view, obj: License_Policy): class UserHasLicensePolicyItemMemberPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - license_policy = get_object_or_404( - License_Policy, pk=request.data.get("license_policy") - ) + license_policy = get_object_or_404(License_Policy, pk=request.data.get("license_policy")) return _has_license_policy_manage_permission(request, license_policy) return True - def has_object_permission(self, request, view, obj: License_Policy_Member): + def has_object_permission(self, request: Request, view: APIView, obj: License_Policy_Member) -> bool: if request.method != "GET": return _has_license_policy_manage_permission(request, obj.license_policy) @@ -102,37 +140,45 @@ def has_object_permission(self, request, view, obj: License_Policy_Member): class UserHasLicensePolicyAuthorizationGroupMemberPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "POST": - license_policy = get_object_or_404( - License_Policy, pk=request.data.get("license_policy") - ) - - authorization_groups = get_authorization_groups().values_list( - "id", flat=True - ) - if request.data.get("authorization_group") not in authorization_groups: + license_policy = get_object_or_404(License_Policy, pk=request.data.get("license_policy")) + + authorization_group = request.data.get("authorization_group") + if not authorization_group: + raise ValueError("No Authorization_Group provided") + + if not isinstance(authorization_group, int): + raise ValueError("Authorization_Group must be an integer") + + authorization_groups = get_authorization_groups().values_list("id", flat=True) + if authorization_group not in authorization_groups: raise NotFound("Authorization_Group not found.") return _has_license_policy_manage_permission(request, license_policy) return True - def has_object_permission(self, request, view, obj: License_Policy_Member): + def has_object_permission(self, request: Request, view: APIView, obj: License_Policy_Member) -> bool: if request.method != "GET": return _has_license_policy_manage_permission(request, obj.license_policy) return True -def _has_license_group_manage_permission(request, license_group: License_Group) -> bool: +def _has_license_group_manage_permission(request: Request, license_group: License_Group) -> bool: user = request.user - if user and user.is_superuser: + + if not user: + return False + + if isinstance(user, AnonymousUser): + return False + + if user.is_superuser: return True - if License_Group_Member.objects.filter( - license_group=license_group, user=user, is_manager=True - ).exists(): + if License_Group_Member.objects.filter(license_group=license_group, user=user, is_manager=True).exists(): return True if License_Group_Authorization_Group_Member.objects.filter( @@ -143,16 +189,19 @@ def _has_license_group_manage_permission(request, license_group: License_Group) return False -def _has_license_policy_manage_permission( - request, license_policy: License_Policy -) -> bool: +def _has_license_policy_manage_permission(request: Request, license_policy: License_Policy) -> bool: user = request.user - if user and user.is_superuser: + + if not user: + return False + + if isinstance(user, AnonymousUser): + return False + + if user.is_superuser: return True - if License_Policy_Member.objects.filter( - license_policy=license_policy, user=user, is_manager=True - ).exists(): + if License_Policy_Member.objects.filter(license_policy=license_policy, user=user, is_manager=True).exists(): return True if License_Policy_Authorization_Group_Member.objects.filter( diff --git a/backend/application/licenses/api/serializers.py b/backend/application/licenses/api/serializers.py index 43e3094bf..ccfa7e357 100644 --- a/backend/application/licenses/api/serializers.py +++ b/backend/application/licenses/api/serializers.py @@ -1,7 +1,7 @@ from typing import Optional +from django.core.validators import MinValueValidator from license_expression import get_spdx_licensing -from packageurl import PackageURL from rest_framework.serializers import ( CharField, IntegerField, @@ -16,10 +16,21 @@ AuthorizationGroupListSerializer, UserListSerializer, ) -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import get_highest_user_role +from application.authorization.services.roles_permissions import ( + Permissions, + get_permissions_for_role, +) +from application.commons.services.functions import get_comma_separated_as_list +from application.core.api.serializers_product import ( + NestedProductSerializer, + NestedProductSerializerSmall, +) from application.core.queries.product import get_products from application.core.types import PURL_Type from application.licenses.models import ( + Concluded_License, License, License_Component, License_Component_Evidence, @@ -48,20 +59,26 @@ get_license_policy_member, get_license_policy_members, ) -from application.licenses.services.license_policy import get_ignore_component_type_list -class LicenseSerializer(ModelSerializer): +class LicenseListSerializer(ModelSerializer): spdx_id_name = SerializerMethodField() - is_in_license_group = SerializerMethodField() - is_in_license_policy = SerializerMethodField() class Meta: model = License fields = "__all__" def get_spdx_id_name(self, obj: License) -> str: - return f"{obj.spdx_id} ({obj.name})" + return f"{obj.spdx_id} / {obj.name}" + + +class LicenseSerializer(LicenseListSerializer): + is_in_license_group = SerializerMethodField() + is_in_license_policy = SerializerMethodField() + + class Meta: + model = License + fields = "__all__" def get_is_in_license_group(self, obj: License) -> bool: return License_Group.objects.filter(licenses=obj).exists() @@ -76,13 +93,6 @@ class LicenseComponentEvidenceSerializer(ModelSerializer): def get_product(self, evidence: License_Component_Evidence) -> int: return evidence.license_component.product.pk - def get_license_component_title(self, evidence: License_Component_Evidence) -> str: - if evidence.license_component.license: - return f"{evidence.license_component.license.spdx_id} ({evidence.license_component.license.name})" - if evidence.license_component.non_spdx_license: - return evidence.license_component.non_spdx_license - return "No license" - class Meta: model = License_Component_Evidence fields = "__all__" @@ -94,32 +104,17 @@ class Meta: exclude = ["license_component", "evidence"] -class LicenseComponentSerializer(ModelSerializer): - license_data = LicenseSerializer( - source="license", - read_only=True, - ) - component_purl_namespace = SerializerMethodField() +class LicenseComponentListSerializer(ModelSerializer): + component_name_version_type = SerializerMethodField() branch_name = SerializerMethodField() - license_policy_name: Optional[SerializerMethodField] = SerializerMethodField() - license_policy_id: Optional[SerializerMethodField] = SerializerMethodField() - evidences: Optional[NestedLicenseComponentEvidenceSerializer] = ( - NestedLicenseComponentEvidenceSerializer(many=True) - ) - type = SerializerMethodField() - title = SerializerMethodField() - - class Meta: - model = License_Component - fields = "__all__" + origin_service_name = SerializerMethodField() - def get_component_purl_namespace(self, obj: License_Component) -> Optional[str]: - if obj.component_purl: - try: - purl = PackageURL.from_string(obj.component_purl) - return purl.namespace - except ValueError: - return "" + def get_component_name_version_type(self, obj: License_Component) -> Optional[str]: + if obj.component_name_version: + component_name_version_type = obj.component_name_version + if obj.component_purl_type: + component_name_version_type += f" ({obj.component_purl_type})" + return component_name_version_type return "" @@ -129,6 +124,29 @@ def get_branch_name(self, obj: License_Component) -> str: return "" + def get_origin_service_name(self, obj: License_Component) -> str: + if obj.origin_service: + return obj.origin_service.name + + return "" + + class Meta: + model = License_Component + exclude = ["component_dependencies"] + + +class LicenseComponentSerializer(LicenseComponentListSerializer): + license_policy_name: Optional[SerializerMethodField] = SerializerMethodField() + license_policy_id: Optional[SerializerMethodField] = SerializerMethodField() + evidences: Optional[NestedLicenseComponentEvidenceSerializer] = NestedLicenseComponentEvidenceSerializer(many=True) + effective_license_type = SerializerMethodField() + title = SerializerMethodField() + permissions: Optional[SerializerMethodField] = SerializerMethodField() + + class Meta: + model = License_Component + fields = "__all__" + def get_license_policy_name(self, obj: License_Component) -> str: if obj.product.license_policy: return obj.product.license_policy.name @@ -147,27 +165,20 @@ def get_license_policy_id(self, obj: License_Component) -> int: return 0 - def get_type(self, obj: License_Component) -> str: - if obj.license: + def get_effective_license_type(self, obj: License_Component) -> str: + if obj.effective_spdx_license: return "SPDX" - if obj.license_expression: + if obj.effective_license_expression: return "Expression" - if obj.non_spdx_license: + if obj.effective_non_spdx_license: return "Non-SPDX" return "" def get_title(self, obj: License_Component) -> str: - return f"{obj.license_name} / {obj.component_name_version}" - + return f"{obj.effective_license_name} / {obj.component_name_version}" -class LicenseComponentListSerializer(LicenseComponentSerializer): - license_policy_id = None - license_policy_name = None - evidences = None - - class Meta: - model = License_Component - exclude = ["component_dependencies"] + def get_permissions(self, obj: License_Component) -> Optional[set[Permissions]]: + return get_permissions_for_role(get_highest_user_role(obj.product)) class LicenseComponentIdSerializer(ModelSerializer): @@ -177,15 +188,13 @@ class Meta: class LicenseComponentBulkDeleteSerializer(Serializer): - components = ListField( - child=IntegerField(min_value=1), min_length=0, max_length=100, required=True - ) + components = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) class LicenseComponentOverviewElementSerializer(Serializer): branch_name = CharField() - license_name = CharField() - type = CharField() + effective_license_name = CharField() + effective_license_type = CharField() evaluation_result = CharField() num_components = IntegerField() @@ -195,7 +204,57 @@ class LicenseComponentOverviewSerializer(Serializer): results = ListField(child=LicenseComponentOverviewElementSerializer()) -class LicenseGroupSerializer(ModelSerializer): +class ConcludedLicenseCreateUpdateSerializer(Serializer): + manual_concluded_spdx_license = IntegerField(validators=[MinValueValidator(1)], required=False, allow_null=True) + manual_concluded_non_spdx_license = CharField(max_length=255, required=False, allow_blank=True) + manual_concluded_license_expression = CharField(max_length=255, required=False, allow_blank=True) + + def validate(self, attrs: dict) -> dict: + # check exactly one attribute is set + if sum(1 for key in attrs if key.startswith("manual_concluded_") and attrs[key]) > 1: + raise ValidationError("Only one concluded license field may be set.") + + return attrs + + +class ConcludedLicenseListSerializer(ModelSerializer): + product_data: NestedProductSerializerSmall | NestedProductSerializer = NestedProductSerializerSmall( + source="product", read_only=True + ) + user_data = UserListSerializer(source="user", read_only=True) + component_name_version = SerializerMethodField() + manual_concluded_spdx_license_id = SerializerMethodField() + + def get_component_name_version(self, obj: Concluded_License) -> str: + component_name_version = obj.component_name + if obj.component_version: + component_name_version += f":{obj.component_version}" + if obj.component_purl_type: + component_name_version += f" ({obj.component_purl_type})" + return component_name_version + + def get_manual_concluded_spdx_license_id(self, obj: Concluded_License) -> str: + if obj.manual_concluded_spdx_license: + return obj.manual_concluded_spdx_license.spdx_id + + return "" + + class Meta: + model = Concluded_License + fields = "__all__" + + +class ConcludedLicenseSerializer(ConcludedLicenseListSerializer): + product_data = NestedProductSerializer(source="product", read_only=True) + + +class LicenseGroupListSerializer(ModelSerializer): + class Meta: + model = License_Group + exclude = ["licenses", "users", "authorization_groups"] + + +class LicenseGroupSerializer(LicenseGroupListSerializer): is_manager = SerializerMethodField() is_in_license_policy = SerializerMethodField() has_licenses = SerializerMethodField() @@ -209,15 +268,11 @@ class Meta: def get_is_manager(self, obj: License_Group) -> bool: user = get_current_user() - if License_Group_Member.objects.filter( - license_group=obj, user=user, is_manager=True - ).exists(): + if License_Group_Member.objects.filter(license_group=obj, user=user, is_manager=True).exists(): return True if License_Group_Authorization_Group_Member.objects.filter( - license_group=obj, - authorization_group__users=user, - is_manager=True, + license_group=obj, authorization_group__users=user, is_manager=True ).exists(): return True @@ -233,11 +288,7 @@ def get_has_users(self, obj: License_Group) -> bool: return get_license_group_members().filter(license_group=obj).exists() def get_has_authorization_groups(self, obj: License_Group) -> bool: - return ( - get_license_group_authorization_group_members() - .filter(license_group=obj) - .exists() - ) + return get_license_group_authorization_group_members().filter(license_group=obj).exists() class LicenseGroupLicenseAddRemoveSerializer(Serializer): @@ -245,39 +296,32 @@ class LicenseGroupLicenseAddRemoveSerializer(Serializer): class LicenseGroupAuthorizationGroupMemberSerializer(ModelSerializer): - license_group_data = LicenseGroupSerializer( - source="license_group", - read_only=True, - ) - authorization_group_data = AuthorizationGroupListSerializer( - source="authorization_group", read_only=True - ) + license_group_data = LicenseGroupListSerializer(source="license_group", read_only=True) + authorization_group_data = AuthorizationGroupListSerializer(source="authorization_group", read_only=True) class Meta: model = License_Group_Authorization_Group_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: License_Group_Authorization_Group_Member data_license_group: Optional[License_Group] = attrs.get("license_group") data_authorization_group = attrs.get("authorization_group") if self.instance is not None and ( (data_license_group and data_license_group != self.instance.license_group) - or ( - data_authorization_group - and data_authorization_group != self.instance.authorization_group - ) + or (data_authorization_group and data_authorization_group != self.instance.authorization_group) ): - raise ValidationError( - "License group and authorization group cannot be changed" - ) + raise ValidationError("License group and authorization group cannot be changed") if self.instance is None: - license_group_authorization_group_member = ( - get_license_group_authorization_group_member( - data_license_group, data_authorization_group - ) + if data_license_group is None: + raise ValidationError("License group is required") + if data_authorization_group is None: + raise ValidationError("Authorization group is required") + + license_group_authorization_group_member = get_license_group_authorization_group_member( + data_license_group, data_authorization_group ) if license_group_authorization_group_member: raise ValidationError( @@ -289,17 +333,14 @@ def validate(self, attrs: dict): class LicenseGroupMemberSerializer(ModelSerializer): - license_group_data = LicenseGroupSerializer( - source="license_group", - read_only=True, - ) + license_group_data = LicenseGroupListSerializer(source="license_group", read_only=True) user_data = UserListSerializer(source="user", read_only=True) class Meta: model = License_Group_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: License_Group_Member data_license_group: Optional[License_Group] = attrs.get("license_group") data_user = attrs.get("user") @@ -311,13 +352,14 @@ def validate(self, attrs: dict): raise ValidationError("License group and user cannot be changed") if self.instance is None: - license_group_member = get_license_group_member( - data_license_group, data_user - ) + if data_license_group is None: + raise ValidationError("License group is required") + if data_user is None: + raise ValidationError("User is required") + + license_group_member = get_license_group_member(data_license_group, data_user) if license_group_member: - raise ValidationError( - f"License group member {data_license_group} / {data_user} already exists" - ) + raise ValidationError(f"License group member {data_license_group} / {data_user} already exists") return attrs @@ -326,6 +368,20 @@ class LicenseGroupCopySerializer(Serializer): name = CharField(max_length=255, required=True) +class LicensePolicyListSerializer(ModelSerializer): + parent_name = SerializerMethodField() + + def get_parent_name(self, obj: License_Policy) -> str: + if obj.parent: + return obj.parent.name + + return "" + + class Meta: + model = License_Policy + exclude = ["users", "authorization_groups"] + + class LicensePolicySerializer(ModelSerializer): parent_name = SerializerMethodField() is_parent = SerializerMethodField() @@ -348,15 +404,11 @@ def get_is_parent(self, obj: License_Policy) -> bool: def get_is_manager(self, obj: License_Policy) -> bool: user = get_current_user() - if License_Policy_Member.objects.filter( - license_policy=obj, user=user, is_manager=True - ).exists(): + if License_Policy_Member.objects.filter(license_policy=obj, user=user, is_manager=True).exists(): return True if License_Policy_Authorization_Group_Member.objects.filter( - license_policy=obj, - authorization_group__users=user, - is_manager=True, + license_policy=obj, authorization_group__users=user, is_manager=True ).exists(): return True @@ -375,18 +427,14 @@ def get_has_users(self, obj: License_Policy) -> bool: return get_license_policy_members().filter(license_policy=obj).exists() def get_has_authorization_groups(self, obj: License_Policy) -> bool: - return ( - get_license_policy_authorization_group_members() - .filter(license_policy=obj) - .exists() - ) + return get_license_policy_authorization_group_members().filter(license_policy=obj).exists() class Meta: model = License_Policy exclude = ["users", "authorization_groups"] def validate_ignore_component_types(self, value: str) -> str: - ignore_component_types = get_ignore_component_type_list(value) + ignore_component_types = get_comma_separated_as_list(value) for component_type in ignore_component_types: for component_type in ignore_component_types: if not PURL_Type.PURL_TYPE_CHOICES.get(component_type): @@ -395,12 +443,12 @@ def validate_ignore_component_types(self, value: str) -> str: return value def validate_parent(self, value: License_Policy) -> License_Policy: - if value.parent: + if value and value.parent: raise ValidationError("A child cannot be a parent itself") return value - def update(self, instance: License_Policy, validated_data: dict): + def update(self, instance: License_Policy, validated_data: dict) -> License_Policy: parent = validated_data.get("parent") instance_has_children = instance.children.exists() if parent: @@ -415,10 +463,7 @@ def update(self, instance: License_Policy, validated_data: dict): class LicensePolicyItemSerializer(ModelSerializer): license_spdx_id = SerializerMethodField() license_group_name = SerializerMethodField() - license_policy_data = LicensePolicySerializer( - source="license_policy", - read_only=True, - ) + license_policy_data = LicensePolicyListSerializer(source="license_policy", read_only=True) class Meta: model = License_Policy_Item @@ -436,7 +481,7 @@ def get_license_group_name(self, obj: License_Policy_Item) -> str: return "" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: License_Policy_Item data_license_group = attrs.get("license_group") data_license = attrs.get("license") @@ -486,9 +531,7 @@ def validate(self, attrs: dict): pass if num_fields == 0: - raise ValidationError( - "One of license group, license, license expression or unknown license must be set" - ) + raise ValidationError("One of license group, license, license expression or unknown license must be set") if num_fields > 1: raise ValidationError( "Only one of license group, license, license expression or unknown license must be set" @@ -502,87 +545,70 @@ def validate_license_expression(self, value: str) -> str: if not expression_info.errors: value = expression_info.normalized_expression else: - raise ValidationError( - f"Invalid license expression: {expression_info.errors}" - ) + raise ValidationError(f"Invalid license expression: {expression_info.errors}") return value class LicensePolicyMemberSerializer(ModelSerializer): - license_policy_data = LicensePolicySerializer( - source="license_policy", - read_only=True, - ) + license_policy_data = LicensePolicyListSerializer(source="license_policy", read_only=True) user_data = UserListSerializer(source="user", read_only=True) class Meta: model = License_Policy_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: License_Policy_Member data_license_policy: Optional[License_Policy] = attrs.get("license_policy") data_user = attrs.get("user") if self.instance is not None and ( - ( - data_license_policy - and data_license_policy != self.instance.license_policy - ) + (data_license_policy and data_license_policy != self.instance.license_policy) or (data_user and data_user != self.instance.user) ): raise ValidationError("License policy and user cannot be changed") if self.instance is None: - license_group_member = get_license_policy_member( - data_license_policy, data_user - ) + if data_license_policy is None: + raise ValidationError("License policy is required") + if data_user is None: + raise ValidationError("User is required") + + license_group_member = get_license_policy_member(data_license_policy, data_user) if license_group_member: - raise ValidationError( - f"License policy member {data_license_policy} / {data_user} already exists" - ) + raise ValidationError(f"License policy member {data_license_policy} / {data_user} already exists") return attrs class LicensePolicyAuthorizationGroupMemberSerializer(ModelSerializer): - license_policy_data = LicensePolicySerializer( - source="license_policy", - read_only=True, - ) - authorization_group_data = AuthorizationGroupListSerializer( - source="authorization_group", read_only=True - ) + license_policy_data = LicensePolicyListSerializer(source="license_policy", read_only=True) + authorization_group_data = AuthorizationGroupListSerializer(source="authorization_group", read_only=True) class Meta: model = License_Policy_Authorization_Group_Member fields = "__all__" - def validate(self, attrs: dict): + def validate(self, attrs: dict) -> dict: self.instance: License_Policy_Authorization_Group_Member data_license_policy: Optional[License_Policy] = attrs.get("license_policy") data_authorization_group = attrs.get("authorization_group") if self.instance is not None and ( - ( - data_license_policy - and data_license_policy != self.instance.license_policy - ) - or ( - data_authorization_group - and data_authorization_group != self.instance.authorization_group - ) + (data_license_policy and data_license_policy != self.instance.license_policy) + or (data_authorization_group and data_authorization_group != self.instance.authorization_group) ): - raise ValidationError( - "License policy and authorization group cannot be changed" - ) + raise ValidationError("License policy and authorization group cannot be changed") if self.instance is None: - license_policy_authorization_group_member = ( - get_license_policy_authorization_group_member( - data_license_policy, data_authorization_group - ) + if data_license_policy is None: + raise ValidationError("License policy is required") + if data_authorization_group is None: + raise ValidationError("Authorization group is required") + + license_policy_authorization_group_member = get_license_policy_authorization_group_member( + data_license_policy, data_authorization_group ) if license_policy_authorization_group_member: raise ValidationError( diff --git a/backend/application/licenses/api/views.py b/backend/application/licenses/api/views.py index de434b0fe..56cec7f36 100644 --- a/backend/application/licenses/api/views.py +++ b/backend/application/licenses/api/views.py @@ -1,6 +1,7 @@ from dataclasses import dataclass -from typing import Optional, Tuple +from typing import Any, Optional, Tuple +from django.contrib.auth.models import AnonymousUser from django.db.models.query import QuerySet from django.http import HttpResponse from django_filters.rest_framework import DjangoFilterBackend @@ -8,19 +9,26 @@ from rest_framework.decorators import action from rest_framework.exceptions import NotFound, PermissionDenied, ValidationError from rest_framework.filters import SearchFilter -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin +from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response +from rest_framework.serializers import BaseSerializer from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT from rest_framework.viewsets import GenericViewSet, ModelViewSet -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import ( + user_has_permission, + user_has_permission_or_403, +) +from application.authorization.services.roles_permissions import Permissions from application.core.models import Branch, Product from application.core.queries.branch import get_branch_by_id +from application.core.queries.component import get_component_by_id from application.core.queries.product import get_product_by_id from application.licenses.api.filters import ( + ConcludedLicenseFilter, LicenseComponentEvidenceFilter, LicenseComponentFilter, LicenseFilter, @@ -33,6 +41,7 @@ LicensePolicyMemberFilter, ) from application.licenses.api.permissions import ( + UserHasConcludedLicensePermission, UserHasLicenseGroupAuthenticationGroupMemberPermission, UserHasLicenseGroupMemberPermission, UserHasLicenseGroupPermission, @@ -41,6 +50,9 @@ UserHasLicensePolicyPermission, ) from application.licenses.api.serializers import ( + ConcludedLicenseCreateUpdateSerializer, + ConcludedLicenseListSerializer, + ConcludedLicenseSerializer, LicenseComponentEvidenceSerializer, LicenseComponentIdSerializer, LicenseComponentListSerializer, @@ -49,16 +61,20 @@ LicenseGroupAuthorizationGroupMemberSerializer, LicenseGroupCopySerializer, LicenseGroupLicenseAddRemoveSerializer, + LicenseGroupListSerializer, LicenseGroupMemberSerializer, LicenseGroupSerializer, + LicenseListSerializer, LicensePolicyAuthorizationGroupMemberSerializer, LicensePolicyCopySerializer, LicensePolicyItemSerializer, + LicensePolicyListSerializer, LicensePolicyMemberSerializer, LicensePolicySerializer, LicenseSerializer, ) from application.licenses.models import ( + Concluded_License, License, License_Component, License_Component_Evidence, @@ -70,8 +86,10 @@ License_Policy_Item, License_Policy_Member, ) +from application.licenses.queries.concluded_license import get_concluded_licenses from application.licenses.queries.license import get_license from application.licenses.queries.license_component import ( + get_license_component, get_license_component_licenses, get_license_components, ) @@ -95,26 +113,31 @@ get_license_policy_member, get_license_policy_members, ) -from application.licenses.services.export_license_policy import ( - export_license_policy_json, - export_license_policy_yaml, +from application.licenses.services.export_license_policy_sbom_utility import ( + export_license_policy_sbom_utility, +) +from application.licenses.services.export_license_policy_secobserve import ( + export_license_policy_secobserve_json, + export_license_policy_secobserve_yaml, ) -from application.licenses.services.license_group import ( - copy_license_group, - import_scancode_licensedb, +from application.licenses.services.license_component import ( + SPDXLicenseCache, + save_concluded_license, ) +from application.licenses.services.license_group import copy_license_group from application.licenses.services.license_policy import ( apply_license_policy, apply_license_policy_product, copy_license_policy, ) +from application.licenses.types import NO_LICENSE_INFORMATION @dataclass class LicenseComponentOverviewElement: branch_name: Optional[str] - license_name: str - type: str + effective_license_name: str + effective_license_type: str evaluation_result: str num_components: int @@ -125,22 +148,107 @@ class LicenseComponentOverview: results: list[LicenseComponentOverviewElement] +class ConcludedLicenseViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin, DestroyModelMixin): + serializer_class = ConcludedLicenseSerializer + filterset_class = ConcludedLicenseFilter + queryset = Concluded_License.objects.none() + filter_backends = [SearchFilter, DjangoFilterBackend] + permission_classes = [IsAuthenticated, UserHasConcludedLicensePermission] + + def get_queryset(self) -> QuerySet[Concluded_License]: + return ( + get_concluded_licenses() + .select_related("product") + .select_related("user") + .select_related("manual_concluded_spdx_license") + ) + + def get_serializer_class(self) -> type[BaseSerializer]: + if self.action == "list": + return ConcludedLicenseListSerializer + return super().get_serializer_class() + + class LicenseComponentViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): serializer_class = LicenseComponentSerializer filterset_class = LicenseComponentFilter queryset = License_Component.objects.none() filter_backends = [DjangoFilterBackend] - def get_serializer_class(self): + def get_serializer_class(self) -> type[BaseSerializer]: if self.action == "list": return LicenseComponentListSerializer return super().get_serializer_class() - def get_queryset(self): + def get_queryset(self) -> QuerySet[License_Component]: return ( - get_license_components().select_related("branch").select_related("license") + get_license_components() + .select_related("branch") + .select_related("origin_service") + .select_related("effective_spdx_license") + ) + + @extend_schema( + methods=["GET"], + responses={200: LicenseComponentListSerializer}, + parameters=[OpenApiParameter(name="component", type=str, required=True)], + ) + @action(detail=False, methods=["get"]) + def for_component(self, request: Request) -> Response: + component_id = request.query_params.get("component") + if not component_id: + raise ValidationError("No component id provided") + component = get_component_by_id(component_id) + if not component or not user_has_permission(component.product, Permissions.Product_View): + raise NotFound("No Component matches the given query.") + license_component = License_Component.objects.filter( + product=component.product, + branch=component.branch, + origin_service=component.origin_service, + component_name_version=component.component_name_version, + component_purl=component.component_purl, + component_cpe=component.component_cpe, + component_dependencies=component.component_dependencies, + component_cyclonedx_bom_link=component.component_cyclonedx_bom_link, + ).first() + + if license_component: + response_serializer = LicenseComponentListSerializer(license_component) + return Response(status=HTTP_200_OK, data=response_serializer.data) + + return Response(status=HTTP_204_NO_CONTENT) + + @extend_schema(methods=["PATCH"], request=ConcludedLicenseCreateUpdateSerializer, responses={200: None}) + @action(detail=True, methods=["patch"]) + def concluded_license(self, request: Request, pk: int) -> Response: + request_serializer = ConcludedLicenseCreateUpdateSerializer(data=request.data) + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) + + license_component = get_license_component(pk) + if not license_component: + raise NotFound(f"License component {pk} not found.") + + user_has_permission_or_403(license_component, Permissions.License_Component_Edit) + + manual_concluded_spdx_license_id = request_serializer.validated_data.get("manual_concluded_spdx_license") + if manual_concluded_spdx_license_id: + license_component.manual_concluded_spdx_license = get_license(manual_concluded_spdx_license_id) + if not license_component.manual_concluded_spdx_license: + raise ValidationError(f"SPDX license {manual_concluded_spdx_license_id} not found.") + else: + license_component.manual_concluded_spdx_license = None + license_component.manual_concluded_non_spdx_license = request_serializer.validated_data.get( + "manual_concluded_non_spdx_license", "" + ) + license_component.manual_concluded_license_expression = request_serializer.validated_data.get( + "manual_concluded_license_expression", "" ) + save_concluded_license(license_component) + + return Response() + @extend_schema( methods=["GET"], responses={200: LicenseComponentOverviewSerializer}, @@ -150,99 +258,98 @@ def get_queryset(self): ], ) @action(detail=False, methods=["get"]) - def license_overview(self, request): + def license_overview(self, request: Request) -> Response: product_id = request.query_params.get("product") if not product_id: raise ValidationError("No product id provided") - product = _get_product(product_id, Permissions.Product_View) - filter_branch = self._get_branch(product, request.query_params.get("branch")) - order_by_1, order_by_2, order_by_3 = self._get_ordering( - request.query_params.get("ordering") - ) + if not product_id.isdigit(): + raise ValidationError("Product id is not an integer") + product = _get_product(int(product_id), Permissions.Product_View) + + filter_branch = None + branch_id = str(request.query_params.get("branch", "")) + if branch_id: + if not branch_id.isdigit(): + raise ValidationError("Branch id is not an integer") + filter_branch = self._get_branch(product, int(branch_id)) + + order_by_1, order_by_2, order_by_3 = self._get_ordering(request.query_params.get("ordering")) license_overview_elements = get_license_component_licenses( product, filter_branch, order_by_1, order_by_2, order_by_3 ) - license_overview_elements = self._filter_data( - request, license_overview_elements - ) + license_overview_elements = self._filter_data(request, license_overview_elements) results = [] for element in license_overview_elements: - if element["license__spdx_id"]: - license_name = element["license__spdx_id"] + if element["effective_spdx_license__spdx_id"]: + effective_license_name = element["effective_spdx_license__spdx_id"] element_type = "SPDX" - elif element["license_expression"]: - license_name = element["license_expression"] + elif element["effective_license_expression"]: + effective_license_name = element["effective_license_expression"] element_type = "Expression" - elif element["non_spdx_license"]: - license_name = element["non_spdx_license"] + elif element["effective_non_spdx_license"]: + effective_license_name = element["effective_non_spdx_license"] element_type = "Non-SPDX" + elif element["effective_multiple_licenses"]: + effective_license_name = element["effective_multiple_licenses"] + element_type = "Multiple" else: - license_name = "No license information" + effective_license_name = NO_LICENSE_INFORMATION element_type = "" license_component_overview_element = LicenseComponentOverviewElement( branch_name=element["branch__name"], - license_name=license_name, - type=element_type, + effective_license_name=effective_license_name, + effective_license_type=element_type, evaluation_result=element["evaluation_result"], num_components=element["id__count"], ) results.append(license_component_overview_element) - license_overview = LicenseComponentOverview( - count=len(results), - results=results, - ) + license_overview = LicenseComponentOverview(count=len(results), results=results) response_serializer = LicenseComponentOverviewSerializer(license_overview) - return Response( - status=HTTP_200_OK, - data=response_serializer.data, - ) + return Response(status=HTTP_200_OK, data=response_serializer.data) - def _get_ordering(self, ordering: str) -> Tuple[str, str, str]: + def _get_ordering(self, ordering: Optional[str]) -> Tuple[str, str, str]: if ordering and ordering == "-branch_name": - return "-branch__name", "-license_name", "-numerical_evaluation_result" + return ("-branch__name", "-effective_license_name", "-numerical_evaluation_result") if ordering and ordering == "branch_name": - return "branch__name", "license_name", "numerical_evaluation_result" + return ("branch__name", "effective_license_name", "numerical_evaluation_result") - if ordering and ordering == "-license_name": - return "-license_name", "-numerical_evaluation_result", "-branch__name" - if ordering and ordering == "license_name": - return "license_name", "numerical_evaluation_result", "branch__name" + if ordering and ordering == "-effective_license_name": + return ("-effective_license_name", "-numerical_evaluation_result", "-branch__name") + if ordering and ordering == "effective_license_name": + return ("effective_license_name", "numerical_evaluation_result", "branch__name") if ordering and ordering == "-evaluation_result": - return "-numerical_evaluation_result", "-license_name", "-branch__name" + return ("-numerical_evaluation_result", "-effective_license_name", "-branch__name") - return "numerical_evaluation_result", "license_name", "branch__name" + return "numerical_evaluation_result", "effective_license_name", "branch__name" - def _filter_data(self, request, license_overview_elements: QuerySet) -> QuerySet: - filter_license_name = request.query_params.get("license_name") - if filter_license_name: + def _filter_data(self, request: Request, license_overview_elements: QuerySet) -> QuerySet: + filter_effective_license_name = request.query_params.get("effective_license_name") + if filter_effective_license_name: license_overview_elements = license_overview_elements.filter( - license_name__icontains=filter_license_name + effective_license_name__icontains=filter_effective_license_name ) filter_evaluation_result = request.query_params.get("evaluation_result") if filter_evaluation_result: - license_overview_elements = license_overview_elements.filter( - evaluation_result=filter_evaluation_result - ) + license_overview_elements = license_overview_elements.filter(evaluation_result=filter_evaluation_result) filter_component_purl_type = request.query_params.get("component_purl_type") if filter_component_purl_type: - license_overview_elements = license_overview_elements.filter( - component_purl_type=filter_component_purl_type - ) + license_overview_elements = license_overview_elements.filter(component_purl_type=filter_component_purl_type) - return license_overview_elements + filter_origin_service = request.query_params.get("origin_service") + if filter_origin_service: + license_overview_elements = license_overview_elements.filter(origin_service=filter_origin_service) - def _get_branch(self, product: Product, pk: int) -> Optional[Branch]: - if not pk: - return None + return license_overview_elements + def _get_branch(self, product: Product, pk: int) -> Branch: branch = get_branch_by_id(product, pk) if not branch: raise NotFound() @@ -258,21 +365,17 @@ class LicenseComponentIdViewSet(GenericViewSet, ListModelMixin, RetrieveModelMix queryset = License_Component.objects.none() filter_backends = [DjangoFilterBackend] - def get_queryset(self): + def get_queryset(self) -> QuerySet[License_Component]: return get_license_components() -class LicenseComponentEvidenceViewSet( - GenericViewSet, ListModelMixin, RetrieveModelMixin -): +class LicenseComponentEvidenceViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): serializer_class = LicenseComponentEvidenceSerializer filterset_class = LicenseComponentEvidenceFilter queryset = License_Component_Evidence.objects.none() - def get_queryset(self): - return get_license_component_evidences().select_related( - "license_component__product" - ) + def get_queryset(self) -> QuerySet[License_Component_Evidence]: + return get_license_component_evidences().select_related("license_component__product") class LicenseViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): @@ -282,6 +385,11 @@ class LicenseViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["spdx_id", "name"] + def get_serializer_class(self) -> type[BaseSerializer]: + if self.action == "list": + return LicenseListSerializer + return super().get_serializer_class() + class LicenseGroupViewSet(ModelViewSet): serializer_class = LicenseGroupSerializer @@ -291,17 +399,22 @@ class LicenseGroupViewSet(ModelViewSet): search_fields = ["name"] permission_classes = [IsAuthenticated, UserHasLicenseGroupPermission] - def get_queryset(self): + def get_queryset(self) -> QuerySet[License_Group]: return get_license_groups() + def get_serializer_class(self) -> type[BaseSerializer]: + if self.action == "list": + return LicenseGroupListSerializer + return super().get_serializer_class() + @extend_schema( - methods=["POST"], - request=LicenseGroupCopySerializer, - responses={HTTP_201_CREATED: LicenseGroupSerializer}, + methods=["POST"], request=LicenseGroupCopySerializer, responses={HTTP_201_CREATED: LicenseGroupSerializer} ) @action(detail=True, methods=["post"]) - def copy(self, request, pk): + def copy(self, request: Request, pk: int) -> Response: user = request.user + if isinstance(user, AnonymousUser): + raise PermissionDenied("You must be authenticated to copy a license group") if user.is_external: raise PermissionDenied("You are not allowed to copy a license group") @@ -322,18 +435,13 @@ def copy(self, request, pk): new_license_group = copy_license_group(license_group, name) - return Response( - status=HTTP_201_CREATED, - data=LicenseGroupSerializer(new_license_group).data, - ) + return Response(status=HTTP_201_CREATED, data=LicenseGroupSerializer(new_license_group).data) @extend_schema( - methods=["POST"], - request=LicenseGroupLicenseAddRemoveSerializer, - responses={HTTP_204_NO_CONTENT: None}, + methods=["POST"], request=LicenseGroupLicenseAddRemoveSerializer, responses={HTTP_204_NO_CONTENT: None} ) @action(detail=True, methods=["post"]) - def add_license(self, request, pk): + def add_license(self, request: Request, pk: int) -> Response: request_serializer = LicenseGroupLicenseAddRemoveSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -348,21 +456,17 @@ def add_license(self, request, pk): raise ValidationError(f"License {license_id} not found") if license_to_be_added in license_group.licenses.filter(id=license_id): - raise ValidationError( - f"License {license_to_be_added} is already in this license group" - ) + raise ValidationError(f"License {license_to_be_added} is already in this license group") license_group.licenses.add(license_to_be_added) return Response(status=HTTP_204_NO_CONTENT) @extend_schema( - methods=["POST"], - request=LicenseGroupLicenseAddRemoveSerializer, - responses={HTTP_204_NO_CONTENT: None}, + methods=["POST"], request=LicenseGroupLicenseAddRemoveSerializer, responses={HTTP_204_NO_CONTENT: None} ) @action(detail=True, methods=["post"]) - def remove_license(self, request, pk): + def remove_license(self, request: Request, pk: int) -> Response: request_serializer = LicenseGroupLicenseAddRemoveSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -380,26 +484,7 @@ def remove_license(self, request, pk): return Response(status=HTTP_204_NO_CONTENT) - @extend_schema( - methods=["POST"], - request=None, - responses={HTTP_204_NO_CONTENT: None}, - ) - @action(detail=False, methods=["post"]) - def import_scancode_licensedb(self, request): - user = request.user - if not user.is_superuser: - raise PermissionDenied( - "User is not allowed to import license groups from ScanCode LicenseDB" - ) - - import_scancode_licensedb() - - return Response(status=HTTP_204_NO_CONTENT) - - def _get_license_group( - self, pk: int, manager: Optional[bool] = False - ) -> License_Group: + def _get_license_group(self, pk: int, manager: Optional[bool] = False) -> License_Group: license_group = get_license_groups().filter(pk=pk).first() if license_group is None: raise NotFound("License group not found") @@ -427,12 +512,8 @@ class LicenseGroupMemberViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] permission_classes = [IsAuthenticated, UserHasLicenseGroupMemberPermission] - def get_queryset(self): - return ( - get_license_group_members() - .select_related("license_group") - .select_related("user") - ) + def get_queryset(self) -> QuerySet[License_Group_Member]: + return get_license_group_members().select_related("license_group").select_related("user") class LicenseGroupAuthorizationGroupMemberViewSet(ModelViewSet): @@ -440,12 +521,9 @@ class LicenseGroupAuthorizationGroupMemberViewSet(ModelViewSet): filterset_class = LicenseGroupAuthorizationGroupFilter queryset = License_Group_Authorization_Group_Member.objects.none() filter_backends = [SearchFilter, DjangoFilterBackend] - permission_classes = [ - IsAuthenticated, - UserHasLicenseGroupAuthenticationGroupMemberPermission, - ] + permission_classes = [IsAuthenticated, UserHasLicenseGroupAuthenticationGroupMemberPermission] - def get_queryset(self): + def get_queryset(self) -> QuerySet[License_Group_Authorization_Group_Member]: return ( get_license_group_authorization_group_members() .select_related("license_group") @@ -461,17 +539,23 @@ class LicensePolicyViewSet(ModelViewSet): search_fields = ["name"] permission_classes = [IsAuthenticated, UserHasLicensePolicyPermission] - def get_queryset(self): - return get_license_policies() + def get_queryset(self) -> QuerySet[License_Policy]: + return get_license_policies().select_related("parent") + + def get_serializer_class(self) -> type[BaseSerializer[Any]]: + if self.action == "list": + return LicensePolicyListSerializer + + return super().get_serializer_class() @extend_schema( - methods=["POST"], - request=LicensePolicyCopySerializer, - responses={HTTP_201_CREATED: LicensePolicySerializer}, + methods=["POST"], request=LicensePolicyCopySerializer, responses={HTTP_201_CREATED: LicensePolicySerializer} ) @action(detail=True, methods=["post"]) - def copy(self, request, pk): + def copy(self, request: Request, pk: int) -> Response: user = request.user + if isinstance(user, AnonymousUser): + raise PermissionDenied("You must be authenticated to copy a license policy") if user.is_external: raise PermissionDenied("You are not allowed to copy a license policy") @@ -492,88 +576,76 @@ def copy(self, request, pk): new_license_policy = copy_license_policy(license_policy, name) - return Response( - status=HTTP_201_CREATED, - data=LicensePolicySerializer(new_license_policy).data, - ) + return Response(status=HTTP_201_CREATED, data=LicensePolicySerializer(new_license_policy).data) - @extend_schema( - methods=["POST"], - request=None, - responses={HTTP_204_NO_CONTENT: None}, - ) + @extend_schema(methods=["POST"], request=None, responses={HTTP_204_NO_CONTENT: None}) @action(detail=True, methods=["post"]) - def apply(self, request, pk): + def apply(self, request: Request, pk: int) -> Response: license_policy = self._get_license_policy(pk, True) if license_policy is None: raise NotFound("License policy not found") apply_license_policy(license_policy) - return Response( - status=HTTP_204_NO_CONTENT, - ) + return Response(status=HTTP_204_NO_CONTENT) @extend_schema( methods=["POST"], request=None, responses={HTTP_204_NO_CONTENT: None}, - parameters=[ - OpenApiParameter(name="product", type=int, required=True), - ], + parameters=[OpenApiParameter(name="product", type=int, required=True)], ) @action(detail=False, methods=["post"]) - def apply_product(self, request): - product = _get_product( - request.query_params.get("product"), Permissions.Product_Edit - ) - apply_license_policy_product(product) + def apply_product(self, request: Request) -> Response: + product_id = str(request.query_params.get("product", "")) + if not product_id: + raise ValidationError("No product id provided") + if not product_id.isdigit(): + raise ValidationError("Product id is not an integer") - return Response( - status=HTTP_204_NO_CONTENT, - ) + product = _get_product(int(product_id), Permissions.Product_Edit) + apply_license_policy_product(SPDXLicenseCache(), product) - @extend_schema( - methods=["GET"], - responses={200: None}, - ) + return Response(status=HTTP_204_NO_CONTENT) + + @extend_schema(methods=["GET"], responses={200: None}) @action(detail=True, methods=["get"]) - def export_json(self, request, pk=None): + def export_json(self, request: Request, pk: int) -> HttpResponse: license_policy = self._get_license_policy(pk, False) - license_policy_export = export_license_policy_json(license_policy) + license_policy_export = export_license_policy_secobserve_json(license_policy) response = HttpResponse( # pylint: disable=http-response-with-content-type-json - content=license_policy_export, - content_type="application/json", - ) - response["Content-Disposition"] = ( - f"attachment; filename=license_policy_{pk}.json" + content=license_policy_export, content_type="application/json" ) + response["Content-Disposition"] = f"attachment; filename=license_policy_{pk}.json" return response - @extend_schema( - methods=["GET"], - responses={200: None}, - ) + @extend_schema(methods=["GET"], responses={200: None}) @action(detail=True, methods=["get"]) - def export_yaml(self, request, pk=None): + def export_yaml(self, request: Request, pk: int) -> HttpResponse: license_policy = self._get_license_policy(pk, False) - license_policy_export = export_license_policy_yaml(license_policy) + license_policy_export = export_license_policy_secobserve_yaml(license_policy) - response = HttpResponse( - content=license_policy_export, - content_type="application/yaml", - ) - response["Content-Disposition"] = ( - f"attachment; filename=license_policy_{pk}.yaml" + response = HttpResponse(content=license_policy_export, content_type="application/yaml") + response["Content-Disposition"] = f"attachment; filename=license_policy_{pk}.yaml" + + return response + + @extend_schema(methods=["GET"], responses={200: None}) + @action(detail=True, methods=["get"]) + def export_sbom_utility(self, request: Request, pk: int) -> HttpResponse: + license_policy = self._get_license_policy(pk, False) + license_policy_export = export_license_policy_sbom_utility(license_policy) + + response = HttpResponse( # pylint: disable=http-response-with-content-type-json + content=license_policy_export, content_type="application/json" ) + response["Content-Disposition"] = f"attachment; filename=license_policy_{pk}.json" return response - def _get_license_policy( - self, pk: int, manager: Optional[bool] = False - ) -> License_Policy: + def _get_license_policy(self, pk: int, manager: Optional[bool] = False) -> License_Policy: license_policy = get_license_policies().filter(pk=pk).first() if license_policy is None: raise NotFound("License policy not found") @@ -601,7 +673,7 @@ class LicensePolicyItemViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] permission_classes = [IsAuthenticated, UserHasLicensePolicyItemMemberPermission] - def get_queryset(self): + def get_queryset(self) -> QuerySet[License_Policy_Item]: return ( get_license_policy_items() .select_related("license_policy") @@ -617,12 +689,8 @@ class LicensePolicyMemberViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] permission_classes = [IsAuthenticated, UserHasLicensePolicyItemMemberPermission] - def get_queryset(self): - return ( - get_license_policy_members() - .select_related("license_policy") - .select_related("user") - ) + def get_queryset(self) -> QuerySet[License_Policy_Member]: + return get_license_policy_members().select_related("license_policy").select_related("user") class LicensePolicyAuthorizationGroupMemberViewSet(ModelViewSet): @@ -630,12 +698,9 @@ class LicensePolicyAuthorizationGroupMemberViewSet(ModelViewSet): filterset_class = LicensePolicyAuthorizationGroupFilter queryset = License_Policy_Authorization_Group_Member.objects.none() filter_backends = [SearchFilter, DjangoFilterBackend] - permission_classes = [ - IsAuthenticated, - UserHasLicensePolicyAuthorizationGroupMemberPermission, - ] + permission_classes = [IsAuthenticated, UserHasLicensePolicyAuthorizationGroupMemberPermission] - def get_queryset(self): + def get_queryset(self) -> QuerySet[License_Policy_Authorization_Group_Member]: return ( get_license_policy_authorization_group_members() .select_related("license_policy") @@ -643,7 +708,7 @@ def get_queryset(self): ) -def _get_product(product_id: int, permission: int) -> Product: +def _get_product(product_id: int, permission: Permissions) -> Product: if not product_id: raise ValidationError("No product id provided") diff --git a/backend/application/licenses/apps.py b/backend/application/licenses/apps.py index 3b7dce39f..299f1735e 100644 --- a/backend/application/licenses/apps.py +++ b/backend/application/licenses/apps.py @@ -5,7 +5,7 @@ class LicenseConfig(AppConfig): name = "application.licenses" verbose_name = "Licenses" - def ready(self): + def ready(self) -> None: try: import application.licenses.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import except ImportError: diff --git a/backend/application/licenses/management/commands/initial_license_load.py b/backend/application/licenses/management/commands/initial_license_load.py index a6f7e9d39..84f13eeb7 100644 --- a/backend/application/licenses/management/commands/initial_license_load.py +++ b/backend/application/licenses/management/commands/initial_license_load.py @@ -1,9 +1,13 @@ import logging +import traceback +from typing import Any -from django.core.management import call_command from django.core.management.base import BaseCommand from application.licenses.models import License, License_Group, License_Policy +from application.licenses.services.license import import_licenses +from application.licenses.services.license_group import import_scancode_licensedb +from application.licenses.services.license_policy import create_scancode_standard_policy logger = logging.getLogger("secobserve.licenses") @@ -12,17 +16,23 @@ class Command(BaseCommand): help = "Initial load of licenses, license groups and license policies." - def handle(self, *args, **options): + def handle(self, *args: Any, **options: Any) -> None: licenses_exist = License.objects.exists() license_groups_exist = License_Group.objects.exists() license_policies_exist = License_Policy.objects.exists() - if ( - not licenses_exist - and not license_groups_exist - and not license_policies_exist - ): - logger.info( - "Importing initial licenses, license groups and license policies..." - ) - call_command("loaddata", "application/licenses/fixtures/initial_data.json") + if not licenses_exist and not license_groups_exist and not license_policies_exist: + logger.info("Importing licenses, license groups and license policies ...") + + try: + import_licenses() + logger.info("... licenses imported from SPDX") + + import_scancode_licensedb() + logger.info("... license groups imported from ScanCode LicenseDB") + + create_scancode_standard_policy() + logger.info("... standard license policy created") + except Exception as e: + logger.error(str(e)) + logger.error(traceback.format_exc()) diff --git a/backend/application/licenses/migrations/0001_initial.py b/backend/application/licenses/migrations/0001_initial.py index df26a1bd8..8ad97935d 100644 --- a/backend/application/licenses/migrations/0001_initial.py +++ b/backend/application/licenses/migrations/0001_initial.py @@ -115,9 +115,7 @@ class Migration(migrations.Migration): ), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.PROTECT, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="core.product"), ), ], ), @@ -138,9 +136,7 @@ class Migration(migrations.Migration): ("is_public", models.BooleanField(default=False)), ( "licenses", - models.ManyToManyField( - related_name="license_groups", to="licenses.license" - ), + models.ManyToManyField(related_name="license_groups", to="licenses.license"), ), ], ), diff --git a/backend/application/licenses/migrations/0005_license_component_created_and_more.py b/backend/application/licenses/migrations/0005_license_component_created_and_more.py index 67fd295a2..6c22da2a1 100644 --- a/backend/application/licenses/migrations/0005_license_component_created_and_more.py +++ b/backend/application/licenses/migrations/0005_license_component_created_and_more.py @@ -14,9 +14,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="license_component", name="created", - field=models.DateTimeField( - auto_now_add=True, default=django.utils.timezone.now - ), + field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( diff --git a/backend/application/licenses/migrations/0011_update_license_names.py b/backend/application/licenses/migrations/0011_update_license_names.py index a6e2b6df7..3263509ef 100644 --- a/backend/application/licenses/migrations/0011_update_license_names.py +++ b/backend/application/licenses/migrations/0011_update_license_names.py @@ -4,9 +4,7 @@ def update_license_names(apps, schema_editor): License_Component = apps.get_model("licenses", "License_Component") - license_components = License_Component.objects.filter(license_name="").order_by( - "id" - ) + license_components = License_Component.objects.filter(license_name="").order_by("id") paginator = Paginator(license_components, 1000) for page_number in paginator.page_range: diff --git a/backend/application/licenses/migrations/0016_license_component_multiple_licenses.py b/backend/application/licenses/migrations/0016_license_component_multiple_licenses.py new file mode 100644 index 000000000..aa33435f2 --- /dev/null +++ b/backend/application/licenses/migrations/0016_license_component_multiple_licenses.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.7 on 2025-03-29 11:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("licenses", "0015_rename_cpe_license_component_component_cpe_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="license_component", + name="multiple_licenses", + field=models.CharField(blank=True, max_length=512), + ), + ] diff --git a/backend/application/licenses/migrations/0017_license_component_origin_service.py b/backend/application/licenses/migrations/0017_license_component_origin_service.py new file mode 100644 index 000000000..e2f1ddc2c --- /dev/null +++ b/backend/application/licenses/migrations/0017_license_component_origin_service.py @@ -0,0 +1,20 @@ +# Generated by Django 5.2.1 on 2025-05-10 16:39 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0062_alter_branch_osv_linux_distribution_and_more"), + ("licenses", "0016_license_component_multiple_licenses"), + ] + + operations = [ + migrations.AddField( + model_name="license_component", + name="origin_service", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to="core.service"), + ), + ] diff --git a/backend/application/licenses/migrations/0018_rename_license_license_component_declared_license_and_more.py b/backend/application/licenses/migrations/0018_rename_license_license_component_declared_license_and_more.py new file mode 100644 index 000000000..5db7e9d17 --- /dev/null +++ b/backend/application/licenses/migrations/0018_rename_license_license_component_declared_license_and_more.py @@ -0,0 +1,192 @@ +# Generated by Django 5.2.5 on 2025-08-07 05:49 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0064_product_description_markdown"), + ("licenses", "0017_license_component_origin_service"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.RenameField( + model_name="license_component", + old_name="license", + new_name="imported_declared_spdx_license", + ), + migrations.RenameField( + model_name="license_component", + old_name="license_expression", + new_name="imported_declared_license_expression", + ), + migrations.RenameField( + model_name="license_component", + old_name="license_name", + new_name="imported_declared_license_name", + ), + migrations.RenameField( + model_name="license_component", + old_name="multiple_licenses", + new_name="imported_declared_multiple_licenses", + ), + migrations.RenameField( + model_name="license_component", + old_name="non_spdx_license", + new_name="imported_declared_non_spdx_license", + ), + migrations.AlterField( + model_name="license_component", + name="imported_declared_license_name", + field=models.CharField(blank=True, default="No license information", max_length=255), + ), + migrations.AlterField( + model_name="license_component", + name="imported_declared_spdx_license", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="imported_declared_license_components", + to="licenses.license", + ), + ), + migrations.AddField( + model_name="license_component", + name="imported_concluded_spdx_license", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="imported_concluded_license_components", + to="licenses.license", + ), + ), + migrations.AddField( + model_name="license_component", + name="imported_concluded_license_expression", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="imported_concluded_license_name", + field=models.CharField(blank=True, default="No license information", max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="imported_concluded_multiple_licenses", + field=models.CharField(blank=True, max_length=512), + ), + migrations.AddField( + model_name="license_component", + name="imported_concluded_non_spdx_license", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="manual_concluded_spdx_license", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="manual_concluded_license_components", + to="licenses.license", + ), + ), + migrations.AddField( + model_name="license_component", + name="manual_concluded_license_expression", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="manual_concluded_license_name", + field=models.CharField(blank=True, default="No license information", max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="manual_concluded_non_spdx_license", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="effective_spdx_license", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="effective_license_components", + to="licenses.license", + ), + ), + migrations.AddField( + model_name="license_component", + name="effective_license_expression", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="effective_license_name", + field=models.CharField(blank=True, default="No license information", max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="effective_multiple_licenses", + field=models.CharField(blank=True, max_length=512), + ), + migrations.AddField( + model_name="license_component", + name="effective_non_spdx_license", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="license_component", + name="manual_concluded_comment", + field=models.CharField(blank=True, max_length=255), + ), + migrations.CreateModel( + name="Concluded_License", + fields=[ + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("component_purl_type", models.CharField(blank=True, max_length=16)), + ("component_name", models.CharField(max_length=255)), + ("component_version", models.CharField(blank=True, max_length=255)), + ("manual_concluded_license_expression", models.CharField(blank=True, max_length=255)), + ("manual_concluded_non_spdx_license", models.CharField(blank=True, max_length=255)), + ("last_updated", models.DateTimeField(auto_now=True)), + ( + "manual_concluded_spdx_license", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="manual_concluded_licenses", + to="licenses.license", + ), + ), + ( + "product", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="concluded_licenses", + to="core.product", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="concluded_licenses", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "unique_together": {("product", "component_purl_type", "component_name", "component_version")}, + }, + ), + ] diff --git a/backend/application/licenses/migrations/0019_initialize_effective_licenses.py b/backend/application/licenses/migrations/0019_initialize_effective_licenses.py new file mode 100644 index 000000000..aa69f27e2 --- /dev/null +++ b/backend/application/licenses/migrations/0019_initialize_effective_licenses.py @@ -0,0 +1,53 @@ +from django.core.paginator import Paginator +from django.db import migrations + +from application.licenses.services.license_component import get_identity_hash + + +def initialize_effective_licenses(apps, schema_editor): + License_Component = apps.get_model("licenses", "License_Component") + license_components = License_Component.objects.all().order_by("id") + + paginator = Paginator(license_components, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for license_component in page.object_list: + license_component.identity_hash = get_identity_hash(license_component) + + license_component.effective_license_name = license_component.imported_declared_license_name + license_component.effective_spdx_license = license_component.imported_declared_spdx_license + license_component.effective_license_expression = license_component.imported_declared_license_expression + license_component.effective_non_spdx_license = license_component.imported_declared_non_spdx_license + license_component.effective_multiple_licenses = license_component.imported_declared_multiple_licenses + + updates.append(license_component) + + License_Component.objects.bulk_update( + updates, + [ + "identity_hash", + "effective_license_name", + "effective_spdx_license", + "effective_license_expression", + "effective_non_spdx_license", + "effective_multiple_licenses", + ], + ) + + +class Migration(migrations.Migration): + dependencies = [ + ( + "licenses", + "0018_rename_license_license_component_declared_license_and_more", + ), + ] + + operations = [ + migrations.RunPython( + initialize_effective_licenses, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/backend/application/licenses/migrations/0020_license_component_component_cyclonedx_bom_link.py b/backend/application/licenses/migrations/0020_license_component_component_cyclonedx_bom_link.py new file mode 100644 index 000000000..4d716ffa9 --- /dev/null +++ b/backend/application/licenses/migrations/0020_license_component_component_cyclonedx_bom_link.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.5 on 2025-08-17 14:20 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("licenses", "0019_initialize_effective_licenses"), + ] + + operations = [ + migrations.AddField( + model_name="license_component", + name="component_cyclonedx_bom_link", + field=models.CharField(blank=True, max_length=512), + ), + ] diff --git a/backend/application/licenses/models.py b/backend/application/licenses/models.py index 364a9830c..ff0d8ef05 100644 --- a/backend/application/licenses/models.py +++ b/backend/application/licenses/models.py @@ -1,3 +1,5 @@ +from typing import Any + from django.core.validators import MaxValueValidator, MinValueValidator from django.db.models import ( CASCADE, @@ -15,8 +17,11 @@ from django.utils import timezone from application.access_control.models import Authorization_Group, User -from application.core.models import Branch, Product -from application.licenses.types import License_Policy_Evaluation_Result +from application.core.models import Branch, Product, Service +from application.licenses.types import ( + NO_LICENSE_INFORMATION, + License_Policy_Evaluation_Result, +) class License(Model): @@ -27,11 +32,9 @@ class License(Model): is_deprecated = BooleanField(null=True) class Meta: - indexes = [ - Index(fields=["name"]), - ] + indexes = [Index(fields=["name"])] - def __str__(self): + def __str__(self) -> str: return self.spdx_id @@ -41,10 +44,7 @@ class License_Group(Model): is_public = BooleanField(default=False) licenses = ManyToManyField(License, related_name="license_groups") users: ManyToManyField = ManyToManyField( - User, - through="License_Group_Member", - related_name="license_groups", - blank=True, + User, through="License_Group_Member", related_name="license_groups", blank=True ) authorization_groups: ManyToManyField = ManyToManyField( Authorization_Group, @@ -53,47 +53,35 @@ class License_Group(Model): blank=True, ) - def __str__(self): + def __str__(self) -> str: return self.name class License_Group_Member(Model): - license_group = ForeignKey( - License_Group, related_name="license_group_members", on_delete=CASCADE - ) + license_group = ForeignKey(License_Group, related_name="license_group_members", on_delete=CASCADE) user = ForeignKey(User, related_name="license_group_members", on_delete=CASCADE) is_manager = BooleanField(default=False) class Meta: - unique_together = ( - "license_group", - "user", - ) + unique_together = ("license_group", "user") - def __str__(self): + def __str__(self) -> str: return f"{self.license_group} / {self.user}" class License_Group_Authorization_Group_Member(Model): license_group = ForeignKey( - License_Group, - related_name="license_group_authorization_group_members", - on_delete=CASCADE, + License_Group, related_name="license_group_authorization_group_members", on_delete=CASCADE ) authorization_group = ForeignKey( - Authorization_Group, - related_name="license_group_authorization_group_members", - on_delete=CASCADE, + Authorization_Group, related_name="license_group_authorization_group_members", on_delete=CASCADE ) is_manager = BooleanField(default=False) class Meta: - unique_together = ( - "license_group", - "authorization_group", - ) + unique_together = ("license_group", "authorization_group") - def __str__(self): + def __str__(self) -> str: return f"{self.license_group} / {self.authorization_group}" @@ -101,9 +89,7 @@ class License_Component(Model): identity_hash = CharField(max_length=64) product = ForeignKey(Product, related_name="license_components", on_delete=PROTECT) - branch = ForeignKey( - Branch, related_name="license_components", on_delete=CASCADE, null=True - ) + branch = ForeignKey(Branch, related_name="license_components", on_delete=CASCADE, null=True) upload_filename = CharField(max_length=255, blank=True) component_name = CharField(max_length=255) @@ -113,74 +99,102 @@ class License_Component(Model): component_purl_type = CharField(max_length=16, blank=True) component_cpe = CharField(max_length=255, blank=True) component_dependencies = TextField(max_length=32768, blank=True) + component_cyclonedx_bom_link = CharField(max_length=512, blank=True) - license_name = CharField(max_length=255, blank=True) - license = ForeignKey( - License, - related_name="license_components", - on_delete=CASCADE, - blank=True, - null=True, + imported_declared_license_name = CharField(max_length=255, blank=True, default=NO_LICENSE_INFORMATION) + imported_declared_spdx_license = ForeignKey( + License, related_name="imported_declared_license_components", on_delete=PROTECT, blank=True, null=True ) - license_expression = CharField(max_length=255, blank=True) - non_spdx_license = CharField(max_length=255, blank=True) - evaluation_result = CharField( - max_length=16, - choices=License_Policy_Evaluation_Result.RESULT_CHOICES, - blank=True, + imported_declared_license_expression = CharField(max_length=255, blank=True) + imported_declared_non_spdx_license = CharField(max_length=255, blank=True) + imported_declared_multiple_licenses = CharField(max_length=512, blank=True) + + imported_concluded_license_name = CharField(max_length=255, blank=True, default=NO_LICENSE_INFORMATION) + imported_concluded_spdx_license = ForeignKey( + License, related_name="imported_concluded_license_components", on_delete=PROTECT, blank=True, null=True + ) + imported_concluded_license_expression = CharField(max_length=255, blank=True) + imported_concluded_non_spdx_license = CharField(max_length=255, blank=True) + imported_concluded_multiple_licenses = CharField(max_length=512, blank=True) + + manual_concluded_license_name = CharField(max_length=255, blank=True, default=NO_LICENSE_INFORMATION) + manual_concluded_spdx_license = ForeignKey( + License, related_name="manual_concluded_license_components", on_delete=PROTECT, blank=True, null=True ) - numerical_evaluation_result = IntegerField( - validators=[MinValueValidator(1), MaxValueValidator(5)] + manual_concluded_license_expression = CharField(max_length=255, blank=True) + manual_concluded_non_spdx_license = CharField(max_length=255, blank=True) + manual_concluded_comment = CharField(max_length=255, blank=True) + + effective_license_name = CharField(max_length=255, blank=True, default=NO_LICENSE_INFORMATION) + effective_spdx_license = ForeignKey( + License, related_name="effective_license_components", on_delete=PROTECT, blank=True, null=True ) + effective_license_expression = CharField(max_length=255, blank=True) + effective_non_spdx_license = CharField(max_length=255, blank=True) + effective_multiple_licenses = CharField(max_length=512, blank=True) + + evaluation_result = CharField(max_length=16, choices=License_Policy_Evaluation_Result.RESULT_CHOICES, blank=True) + numerical_evaluation_result = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(5)]) + + origin_service = ForeignKey(Service, on_delete=PROTECT, null=True) created = DateTimeField(auto_now_add=True) import_last_seen = DateTimeField(default=timezone.now) last_change = DateTimeField(default=timezone.now) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.unsaved_license = "" - self.unsaved_evidences = [] + self.unsaved_declared_licenses: list[str] = [] + self.unsaved_concluded_licenses: list[str] = [] + self.unsaved_evidences: list[list[str]] = [] - def __str__(self): + def __str__(self) -> str: return self.component_name_version - def save(self, *args, **kwargs) -> None: - self.numerical_evaluation_result = ( - License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( - self.evaluation_result, 3 - ) - ) - return super().save(*args, **kwargs) + def __setattr__(self, attrname: str, val: Any) -> None: + super().__setattr__(attrname, val) + + if attrname == "evaluation_result": + self.numerical_evaluation_result = License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get(val, 3) class License_Component_Evidence(Model): - license_component = ForeignKey( - License_Component, related_name="evidences", on_delete=CASCADE - ) + license_component = ForeignKey(License_Component, related_name="evidences", on_delete=CASCADE) name = CharField(max_length=255) evidence = TextField() class Meta: - indexes = [ - Index(fields=["name"]), - ] + indexes = [Index(fields=["name"])] -class License_Policy(Model): - parent = ForeignKey( - "self", on_delete=PROTECT, related_name="children", null=True, blank=True +class Concluded_License(Model): + product = ForeignKey(Product, related_name="concluded_licenses", on_delete=CASCADE) + component_purl_type = CharField(max_length=16, blank=True) + component_name = CharField(max_length=255) + component_version = CharField(max_length=255, blank=True) + + manual_concluded_spdx_license = ForeignKey( + License, related_name="manual_concluded_licenses", on_delete=CASCADE, blank=True, null=True ) + manual_concluded_license_expression = CharField(max_length=255, blank=True) + manual_concluded_non_spdx_license = CharField(max_length=255, blank=True) + + user = ForeignKey(User, related_name="concluded_licenses", on_delete=PROTECT) + last_updated = DateTimeField(auto_now=True) + + class Meta: + unique_together = ("product", "component_purl_type", "component_name", "component_version") + + +class License_Policy(Model): + parent = ForeignKey("self", on_delete=PROTECT, related_name="children", null=True, blank=True) name = CharField(max_length=255, unique=True) description = TextField(max_length=2048, blank=True) is_public = BooleanField(default=False) ignore_component_types = CharField(max_length=255, blank=True) users: ManyToManyField = ManyToManyField( - User, - through="License_Policy_Member", - related_name="license_policies", - blank=True, + User, through="License_Policy_Member", related_name="license_policies", blank=True ) authorization_groups: ManyToManyField = ManyToManyField( Authorization_Group, @@ -189,83 +203,53 @@ class License_Policy(Model): blank=True, ) - def __str__(self): + def __str__(self) -> str: return self.name class License_Policy_Item(Model): - license_policy = ForeignKey( - License_Policy, related_name="license_policy_items", on_delete=CASCADE - ) + license_policy = ForeignKey(License_Policy, related_name="license_policy_items", on_delete=CASCADE) license_group = ForeignKey( - License_Group, - related_name="license_policy_items", - on_delete=PROTECT, - blank=True, - null=True, - ) - license = ForeignKey( - License, - related_name="license_policy_items", - on_delete=PROTECT, - blank=True, - null=True, + License_Group, related_name="license_policy_items", on_delete=PROTECT, blank=True, null=True ) + license = ForeignKey(License, related_name="license_policy_items", on_delete=PROTECT, blank=True, null=True) license_expression = CharField(max_length=255, blank=True) non_spdx_license = CharField(max_length=255, blank=True) - evaluation_result = CharField( - max_length=16, choices=License_Policy_Evaluation_Result.RESULT_CHOICES - ) - numerical_evaluation_result = IntegerField( - validators=[MinValueValidator(1), MaxValueValidator(5)] - ) + evaluation_result = CharField(max_length=16, choices=License_Policy_Evaluation_Result.RESULT_CHOICES) + numerical_evaluation_result = IntegerField(validators=[MinValueValidator(1), MaxValueValidator(5)]) comment = CharField(max_length=255, blank=True) - def save(self, *args, **kwargs) -> None: - self.numerical_evaluation_result = ( - License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( - self.evaluation_result, License_Policy_Evaluation_Result.RESULT_UNKNOWN - ) + def save(self, *args: Any, **kwargs: Any) -> None: + self.numerical_evaluation_result = License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( + self.evaluation_result, License_Policy_Evaluation_Result.RESULT_UNKNOWN ) return super().save(*args, **kwargs) class License_Policy_Member(Model): - license_policy = ForeignKey( - License_Policy, related_name="license_policy_members", on_delete=CASCADE - ) + license_policy = ForeignKey(License_Policy, related_name="license_policy_members", on_delete=CASCADE) user = ForeignKey(User, related_name="license_policy_members", on_delete=CASCADE) is_manager = BooleanField(default=False) class Meta: - unique_together = ( - "license_policy", - "user", - ) + unique_together = ("license_policy", "user") - def __str__(self): + def __str__(self) -> str: return f"{self.license_policy} / {self.user}" class License_Policy_Authorization_Group_Member(Model): license_policy = ForeignKey( - License_Policy, - related_name="license_policy_authorization_group_members", - on_delete=CASCADE, + License_Policy, related_name="license_policy_authorization_group_members", on_delete=CASCADE ) authorization_group = ForeignKey( - Authorization_Group, - related_name="license_policy_authorization_group_members", - on_delete=CASCADE, + Authorization_Group, related_name="license_policy_authorization_group_members", on_delete=CASCADE ) is_manager = BooleanField(default=False) class Meta: - unique_together = ( - "license_policy", - "authorization_group", - ) + unique_together = ("license_policy", "authorization_group") - def __str__(self): + def __str__(self) -> str: return f"{self.license_policy} / {self.authorization_group}" diff --git a/backend/application/licenses/queries/concluded_license.py b/backend/application/licenses/queries/concluded_license.py new file mode 100644 index 000000000..15029b28f --- /dev/null +++ b/backend/application/licenses/queries/concluded_license.py @@ -0,0 +1,52 @@ +from typing import Optional + +from django.db.models import Exists, OuterRef, Q +from django.db.models.query import QuerySet + +from application.access_control.services.current_user import get_current_user +from application.core.models import Product_Authorization_Group_Member, Product_Member +from application.licenses.models import Concluded_License + + +def get_concluded_license(concluded_license_id: int) -> Optional[Concluded_License]: + try: + return Concluded_License.objects.get(id=concluded_license_id) + except Concluded_License.DoesNotExist: + return None + + +def get_concluded_licenses() -> QuerySet[Concluded_License]: + user = get_current_user() + + if user is None: + return Concluded_License.objects.none() + + components = Concluded_License.objects.all() + + if not user.is_superuser: + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) + + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), authorization_group__users=user + ) + + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), authorization_group__users=user + ) + + components = components.annotate( + product__member=Exists(product_members), + product__product_group__member=Exists(product_group_members), + product__authorization_group_member=Exists(product_authorization_group_members), + product__product_group_authorization_group_member=Exists(product_group_authorization_group_members), + ) + + components = components.filter( + Q(product__member=True) + | Q(product__product_group__member=True) + | Q(product__authorization_group_member=True) + | Q(product__product_group_authorization_group_member=True) + ) + + return components diff --git a/backend/application/licenses/queries/license_component.py b/backend/application/licenses/queries/license_component.py index 7dc8ea5c4..1f176ba44 100644 --- a/backend/application/licenses/queries/license_component.py +++ b/backend/application/licenses/queries/license_component.py @@ -3,7 +3,7 @@ from django.db.models import Count, Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import ( Branch, Product, @@ -13,6 +13,13 @@ from application.licenses.models import License_Component +def get_license_component(license_component_id: int) -> Optional[License_Component]: + try: + return License_Component.objects.get(id=license_component_id) + except License_Component.DoesNotExist: + return None + + def get_license_components() -> QuerySet[License_Component]: user = get_current_user() @@ -22,38 +29,22 @@ def get_license_components() -> QuerySet[License_Component]: components = License_Component.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), - user=user, - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), - user=user, - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), authorization_group__users=user ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), authorization_group__users=user ) components = components.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), - product__authorization_group_member=Exists( - product_authorization_group_members - ), - product__product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product__authorization_group_member=Exists(product_authorization_group_members), + product__product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) components = components.filter( @@ -67,30 +58,23 @@ def get_license_components() -> QuerySet[License_Component]: def get_license_component_licenses( - product: Product, - branch: Optional[Branch], - order_by_1: str, - order_by2: str, - order_by_3: str, + product: Product, branch: Optional[Branch], order_by_1: str, order_by2: str, order_by_3: str ) -> QuerySet: - license_components = get_license_components().filter( - product=product, - ) + license_components = get_license_components().filter(product=product) if branch: license_components = license_components.filter(branch=branch) license_components_overview = license_components.values( "branch__name", - "license__spdx_id", - "license__name", - "license_expression", - "non_spdx_license", + "effective_spdx_license__spdx_id", + "effective_spdx_license__name", + "effective_license_expression", + "effective_non_spdx_license", + "effective_multiple_licenses", "evaluation_result", ).annotate(Count("id")) if order_by_1: return license_components_overview.order_by(order_by_1, order_by2, order_by_3) - return license_components_overview.order_by( - "numerical_evaluation_result", "license_name", "branch__name" - ) + return license_components_overview.order_by("numerical_evaluation_result", "effective_license_name", "branch__name") diff --git a/backend/application/licenses/queries/license_component_evidence.py b/backend/application/licenses/queries/license_component_evidence.py index b10fd1da0..e638aba6d 100644 --- a/backend/application/licenses/queries/license_component_evidence.py +++ b/backend/application/licenses/queries/license_component_evidence.py @@ -1,7 +1,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Product_Authorization_Group_Member, Product_Member from application.licenses.models import License_Component_Evidence @@ -15,37 +15,23 @@ def get_license_component_evidences() -> QuerySet[License_Component_Evidence]: components = License_Component_Evidence.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("license_component__product_id"), - user=user, - ) + product_members = Product_Member.objects.filter(product=OuterRef("license_component__product_id"), user=user) product_group_members = Product_Member.objects.filter( - product=OuterRef("license_component__product__product_group"), - user=user, + product=OuterRef("license_component__product__product_group"), user=user ) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("license_component__product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("license_component__product_id"), authorization_group__users=user ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("license_component__product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("license_component__product__product_group"), authorization_group__users=user ) components = components.annotate( license_component__product__member=Exists(product_members), - license_component__product__product_group__member=Exists( - product_group_members - ), - license_component__product__authorization_group_member=Exists( - product_authorization_group_members - ), + license_component__product__product_group__member=Exists(product_group_members), + license_component__product__authorization_group_member=Exists(product_authorization_group_members), license_component__product__product_group_authorization_group_member=Exists( product_group_authorization_group_members ), @@ -55,9 +41,7 @@ def get_license_component_evidences() -> QuerySet[License_Component_Evidence]: Q(license_component__product__member=True) | Q(license_component__product__product_group__member=True) | Q(license_component__product__authorization_group_member=True) - | Q( - license_component__product__product_group_authorization_group_member=True - ) + | Q(license_component__product__product_group_authorization_group_member=True) ) return components diff --git a/backend/application/licenses/queries/license_group.py b/backend/application/licenses/queries/license_group.py index b15db6e4c..49856c836 100644 --- a/backend/application/licenses/queries/license_group.py +++ b/backend/application/licenses/queries/license_group.py @@ -3,7 +3,7 @@ from django.db.models import Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import License_Group from application.licenses.queries.license_policy_item import get_license_policy_items diff --git a/backend/application/licenses/queries/license_group_authorization_group_member.py b/backend/application/licenses/queries/license_group_authorization_group_member.py index 9c4f7f0aa..fda264809 100644 --- a/backend/application/licenses/queries/license_group_authorization_group_member.py +++ b/backend/application/licenses/queries/license_group_authorization_group_member.py @@ -6,7 +6,7 @@ from application.access_control.queries.authorization_group import ( get_authorization_groups, ) -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import ( License_Group, License_Group_Authorization_Group_Member, @@ -25,17 +25,13 @@ def get_license_group_authorization_group_member( return None -def get_license_group_authorization_group_members() -> ( - QuerySet[License_Group_Authorization_Group_Member] -): +def get_license_group_authorization_group_members() -> QuerySet[License_Group_Authorization_Group_Member]: user = get_current_user() if user is None: return License_Group_Authorization_Group_Member.objects.none() - license_group_authorization_group_members = ( - License_Group_Authorization_Group_Member.objects.all().order_by("id") - ) + license_group_authorization_group_members = License_Group_Authorization_Group_Member.objects.all().order_by("id") if user.is_superuser: return license_group_authorization_group_members diff --git a/backend/application/licenses/queries/license_group_member.py b/backend/application/licenses/queries/license_group_member.py index 9ad7fc6ba..eb543c503 100644 --- a/backend/application/licenses/queries/license_group_member.py +++ b/backend/application/licenses/queries/license_group_member.py @@ -4,14 +4,12 @@ from application.access_control.models import User from application.access_control.queries.user import get_users -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import License_Group, License_Group_Member from application.licenses.queries.license_group import get_license_groups -def get_license_group_member( - license_group: License_Group, user: User -) -> Optional[License_Group_Member]: +def get_license_group_member(license_group: License_Group, user: User) -> Optional[License_Group_Member]: try: return License_Group_Member.objects.get(license_group=license_group, user=user) except License_Group_Member.DoesNotExist: @@ -32,6 +30,4 @@ def get_license_group_members() -> QuerySet[License_Group_Member]: license_groups = get_license_groups() users = get_users() - return license_group_members.filter( - license_group__in=license_groups, user__in=users - ) + return license_group_members.filter(license_group__in=license_groups, user__in=users) diff --git a/backend/application/licenses/queries/license_policy.py b/backend/application/licenses/queries/license_policy.py index 4545f3260..78cf68351 100644 --- a/backend/application/licenses/queries/license_policy.py +++ b/backend/application/licenses/queries/license_policy.py @@ -3,7 +3,7 @@ from django.db.models import Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.queries.product import get_products from application.licenses.models import License_Policy @@ -29,8 +29,5 @@ def get_license_policies() -> QuerySet[License_Policy]: products = get_products(is_product_group=False) return license_policies.filter( - Q(users=user) - | Q(authorization_groups__users=user) - | Q(is_public=True) - | Q(product__in=products) + Q(users=user) | Q(authorization_groups__users=user) | Q(is_public=True) | Q(product__in=products) ).distinct() diff --git a/backend/application/licenses/queries/license_policy_authorization_group_member.py b/backend/application/licenses/queries/license_policy_authorization_group_member.py index dfdbc62d1..84ce6bb55 100644 --- a/backend/application/licenses/queries/license_policy_authorization_group_member.py +++ b/backend/application/licenses/queries/license_policy_authorization_group_member.py @@ -6,7 +6,7 @@ from application.access_control.queries.authorization_group import ( get_authorization_groups, ) -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import ( License_Policy, License_Policy_Authorization_Group_Member, @@ -25,17 +25,13 @@ def get_license_policy_authorization_group_member( return None -def get_license_policy_authorization_group_members() -> ( - QuerySet[License_Policy_Authorization_Group_Member] -): +def get_license_policy_authorization_group_members() -> QuerySet[License_Policy_Authorization_Group_Member]: user = get_current_user() if user is None: return License_Policy_Authorization_Group_Member.objects.none() - license_policy_authorization_group_members = ( - License_Policy_Authorization_Group_Member.objects.all().order_by("id") - ) + license_policy_authorization_group_members = License_Policy_Authorization_Group_Member.objects.all().order_by("id") if user.is_superuser: return license_policy_authorization_group_members @@ -43,6 +39,5 @@ def get_license_policy_authorization_group_members() -> ( authorization_groups = get_authorization_groups() license_policies = get_license_policies() return license_policy_authorization_group_members.filter( - authorization_group__in=authorization_groups, - license_policy__in=license_policies, + authorization_group__in=authorization_groups, license_policy__in=license_policies ) diff --git a/backend/application/licenses/queries/license_policy_item.py b/backend/application/licenses/queries/license_policy_item.py index 130b3b613..3f3a88a11 100644 --- a/backend/application/licenses/queries/license_policy_item.py +++ b/backend/application/licenses/queries/license_policy_item.py @@ -1,6 +1,6 @@ from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import License_Policy_Item from application.licenses.queries.license_policy import get_license_policies diff --git a/backend/application/licenses/queries/license_policy_member.py b/backend/application/licenses/queries/license_policy_member.py index a6114f005..f9e262679 100644 --- a/backend/application/licenses/queries/license_policy_member.py +++ b/backend/application/licenses/queries/license_policy_member.py @@ -4,18 +4,14 @@ from application.access_control.models import User from application.access_control.queries.user import get_users -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import License_Policy, License_Policy_Member from application.licenses.queries.license_policy import get_license_policies -def get_license_policy_member( - license_policy: License_Policy, user: User -) -> Optional[License_Policy_Member]: +def get_license_policy_member(license_policy: License_Policy, user: User) -> Optional[License_Policy_Member]: try: - return License_Policy_Member.objects.get( - license_policy=license_policy, user=user - ) + return License_Policy_Member.objects.get(license_policy=license_policy, user=user) except License_Policy_Member.DoesNotExist: return None @@ -33,6 +29,4 @@ def get_license_policy_members() -> QuerySet[License_Policy_Member]: license_policies = get_license_policies() users = get_users() - return license_policy_members.filter( - license_policy__in=license_policies, user__in=users - ) + return license_policy_members.filter(license_policy__in=license_policies, user__in=users) diff --git a/backend/application/licenses/services/concluded_license.py b/backend/application/licenses/services/concluded_license.py new file mode 100644 index 000000000..b39f8e122 --- /dev/null +++ b/backend/application/licenses/services/concluded_license.py @@ -0,0 +1,130 @@ +from application.access_control.services.current_user import get_current_user +from application.core.models import Product +from application.licenses.models import Concluded_License, License_Component +from application.licenses.types import NO_LICENSE_INFORMATION + + +class ConcludeLicenseApplicator: + def __init__(self, product: Product) -> None: + self.product_name_version: dict[str, Concluded_License] = {} + self.product_name: dict[str, Concluded_License] = {} + self.product_group_name_version: dict[str, Concluded_License] = {} + self.product_group_name: dict[str, Concluded_License] = {} + + concluded_licenses = Concluded_License.objects.filter(product=product).order_by( + "last_updated", "component_purl_type", "component_name", "component_version" + ) + for concluded_license in concluded_licenses: + self.product_name_version[ + f"{concluded_license.component_purl_type}|{concluded_license.component_name}|" + + f"{concluded_license.component_version}" + ] = concluded_license + self.product_name[f"{concluded_license.component_purl_type}|{concluded_license.component_name}"] = ( + concluded_license + ) + + product_group_products = [] + if product.product_group: + product_group_products = list( + Product.objects.filter(product_group=product.product_group).exclude(pk=product.pk) + ) + concluded_licenses = Concluded_License.objects.filter(product__in=product_group_products).order_by( + "last_updated", "component_purl_type", "component_name", "component_version" + ) + for concluded_license in concluded_licenses: + self.product_group_name_version[ + f"{concluded_license.component_purl_type}|{concluded_license.component_name}|" + + f"{concluded_license.component_version}" + ] = concluded_license + self.product_group_name[ + f"{concluded_license.component_purl_type}|{concluded_license.component_name}" + ] = concluded_license + + def apply_concluded_license(self, component: License_Component) -> None: + concluded_license = None + manual_concluded_comment = "" + + concluded_license = self.product_name_version.get( + f"{component.component_purl_type}|{component.component_name}|{component.component_version}" + ) + if concluded_license: + manual_concluded_comment = f"Set manually by {str(concluded_license.user)}" + + if not concluded_license: + concluded_license = self.product_group_name_version.get( + f"{component.component_purl_type}|{component.component_name}|{component.component_version}" + ) + if concluded_license: + manual_concluded_comment = ( + f"Copied from product {concluded_license.product}, set by {str(concluded_license.user)}" + ) + + if not concluded_license: + concluded_license = self.product_name.get(f"{component.component_purl_type}|{component.component_name}") + if concluded_license: + manual_concluded_comment = ( + f"Copied from version {concluded_license.component_version}, set by {str(concluded_license.user)}" + ) + + if not concluded_license: + concluded_license = self.product_group_name.get( + f"{component.component_purl_type}|{component.component_name}" + ) + if concluded_license: + manual_concluded_comment = ( + f"Copied from product {concluded_license.product} and " + + f"version {concluded_license.component_version}, set by {str(concluded_license.user)}" + ) + + if concluded_license: + if ( + concluded_license.manual_concluded_spdx_license + and component.effective_spdx_license != concluded_license.manual_concluded_spdx_license + ): + component.manual_concluded_spdx_license = concluded_license.manual_concluded_spdx_license + component.manual_concluded_license_name = concluded_license.manual_concluded_spdx_license.spdx_id + component.manual_concluded_comment = manual_concluded_comment + elif ( + concluded_license.manual_concluded_license_expression + and component.effective_license_expression != concluded_license.manual_concluded_license_expression + ): + component.manual_concluded_license_expression = concluded_license.manual_concluded_license_expression + component.manual_concluded_license_name = concluded_license.manual_concluded_license_expression + component.manual_concluded_comment = manual_concluded_comment + elif ( + concluded_license.manual_concluded_non_spdx_license + and component.effective_non_spdx_license != concluded_license.manual_concluded_non_spdx_license + ): + component.manual_concluded_non_spdx_license = concluded_license.manual_concluded_non_spdx_license + component.manual_concluded_license_name = concluded_license.manual_concluded_non_spdx_license + component.manual_concluded_comment = manual_concluded_comment + + +def update_concluded_license(component: License_Component) -> None: + if component.manual_concluded_license_name == NO_LICENSE_INFORMATION: + component.manual_concluded_comment = "" + try: + concluded_license = Concluded_License.objects.get( + product=component.product, + component_purl_type=component.component_purl_type, + component_name=component.component_name, + component_version=component.component_version, + ) + concluded_license.delete() + + except Concluded_License.DoesNotExist: + pass + else: + concluded_license, _ = Concluded_License.objects.update_or_create( + product=component.product, + component_purl_type=component.component_purl_type, + component_name=component.component_name, + component_version=component.component_version, + defaults={ + "manual_concluded_spdx_license": component.manual_concluded_spdx_license, + "manual_concluded_license_expression": component.manual_concluded_license_expression, + "manual_concluded_non_spdx_license": component.manual_concluded_non_spdx_license, + "user": get_current_user(), + }, + ) + component.manual_concluded_comment = f"Set manually by {str(concluded_license.user)}" diff --git a/backend/application/licenses/services/export_license_components.py b/backend/application/licenses/services/export_license_components.py index 3f8a961ad..761c62460 100644 --- a/backend/application/licenses/services/export_license_components.py +++ b/backend/application/licenses/services/export_license_components.py @@ -9,47 +9,37 @@ def export_license_components_excel(product: Product) -> Workbook: license_components = _get_license_components(product) - return export_excel( - license_components, "License Components", _get_excludes(), _get_foreign_keys() - ) + return export_excel(license_components, "License Components", _get_excludes(), _get_foreign_keys()) def export_license_components_csv(response: HttpResponse, product: Product) -> None: license_components = _get_license_components(product) - export_csv( - response, - license_components, - _get_excludes(), - _get_foreign_keys(), - ) + export_csv(response, license_components, _get_excludes(), _get_foreign_keys()) def _get_license_components(product: Product) -> QuerySet: if product.is_product_group: - license_components = License_Component.objects.filter( - product__product_group=product - ) + license_components = License_Component.objects.filter(product__product_group=product) else: license_components = License_Component.objects.filter(product=product) license_components = license_components.order_by( - "numerical_evaluation_result", - "license_name", - "component_name_version", + "numerical_evaluation_result", "effective_license_name", "component_name_version" ) return license_components -def _get_excludes(): +def _get_excludes() -> list[str]: return [ "identity_hash", "pk", "objects", - "unsaved_license", + "unsaved_declared_licenses", + "unsaved_concluded_licenses", "unsaved_evidences", ] -def _get_foreign_keys(): - return ["branch", "license", "product"] +def _get_foreign_keys() -> list[str]: + return ["branch", "declared_spdx_license", "concluded_spdx_license", "product", "origin_service"] diff --git a/backend/application/licenses/services/export_license_policy_sbom_utility.py b/backend/application/licenses/services/export_license_policy_sbom_utility.py new file mode 100644 index 000000000..2cdbdacd5 --- /dev/null +++ b/backend/application/licenses/services/export_license_policy_sbom_utility.py @@ -0,0 +1,118 @@ +import logging +from dataclasses import dataclass +from typing import Optional + +from application.commons.services.export import object_to_json +from application.licenses.models import License, License_Policy +from application.licenses.services.license_policy import ( + LicensePolicyEvaluationResult, + get_license_evaluation_results_for_license_policy, +) +from application.licenses.types import License_Policy_Evaluation_Result + +logger = logging.getLogger("secobserve.licenses") + + +class USAGE_POLICY: + POLICY_ALLOW = "allow" + POLICY_DENY = "deny" + POLICY_NEEDS_REVIEW = "needs-review" + + +@dataclass +class License_Policy_Export_Item: + id: str + name: str + family: str + reference: str + osi: bool + deprecated: bool + usagePolicy: str + annotationRefs: list[str] + notes: Optional[list[str]] + + +@dataclass +class License_Policy_Export: + policies: list[License_Policy_Export_Item] + + +def export_license_policy_sbom_utility(license_policy: License_Policy) -> str: + return object_to_json(_create_license_policy_export(license_policy)) + + +def _create_license_policy_export(license_policy: License_Policy) -> License_Policy_Export: + license_policy_export = License_Policy_Export(policies=[]) + + license_evaluation_results: dict[str, LicensePolicyEvaluationResult] = {} + + if license_policy.parent: + get_license_evaluation_results_for_license_policy(license_policy.parent, True, license_evaluation_results) + + get_license_evaluation_results_for_license_policy(license_policy, False, license_evaluation_results) + + license_ids = set() + for license_string in license_evaluation_results: + if license_string.startswith("spdx_"): + license_ids.add(license_string.replace("spdx_", "")) + + spdx_licenses = License.objects.filter(spdx_id__in=license_ids) + spdx_license_dict = {license.spdx_id: license for license in spdx_licenses} + + for license_string, evaluation_result in license_evaluation_results.items(): + license_id = license_string.replace("spdx_", "") if license_string.startswith("spdx_") else "" + if license_id: + spdx_license = spdx_license_dict.get(license_id) + if spdx_license: + license_name = spdx_license.name + reference = spdx_license.reference + osi = spdx_license.is_osi_approved if spdx_license.is_osi_approved is not None else False + deprecated = spdx_license.is_deprecated if spdx_license.is_deprecated is not None else False + else: + logger.warning("SPDX license %s not found in database.", license_id) + continue + else: + license_name = license_string.replace("expression_", "") if license_string.startswith("expression_") else "" + if not license_name: + license_name = license_string.replace("non_spdx_", "") if license_string.startswith("non_spdx_") else "" + reference = "" + osi = False + deprecated = False + + family = license_id if license_id else license_name + # replace everything that is not a letter, number or dash with a dash + family = "".join(char if char.isalnum() or char == "-" else "-" for char in family) + notes = [evaluation_result.comment] if evaluation_result.comment else None + usagePolicy = _get_usage_policy(evaluation_result) + annotationRefs = [evaluation_result.evaluation_result.upper()] + + license_policy_export_item = License_Policy_Export_Item( + id=license_id, + name=license_name, + family=family, + reference=reference, + osi=osi, + deprecated=deprecated, + usagePolicy=usagePolicy, + annotationRefs=annotationRefs, + notes=notes, + ) + license_policy_export.policies.append(license_policy_export_item) + + return license_policy_export + + +def _get_usage_policy(evaluation_result: LicensePolicyEvaluationResult) -> str: + if evaluation_result.evaluation_result == License_Policy_Evaluation_Result.RESULT_ALLOWED: + usagePolicy = USAGE_POLICY.POLICY_ALLOW + elif evaluation_result.evaluation_result == License_Policy_Evaluation_Result.RESULT_FORBIDDEN: + usagePolicy = USAGE_POLICY.POLICY_DENY + elif evaluation_result.evaluation_result == License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED: + usagePolicy = USAGE_POLICY.POLICY_NEEDS_REVIEW + elif evaluation_result.evaluation_result == License_Policy_Evaluation_Result.RESULT_UNKNOWN: + usagePolicy = USAGE_POLICY.POLICY_NEEDS_REVIEW + elif evaluation_result.evaluation_result == License_Policy_Evaluation_Result.RESULT_IGNORED: + usagePolicy = USAGE_POLICY.POLICY_ALLOW + else: + usagePolicy = USAGE_POLICY.POLICY_NEEDS_REVIEW + return usagePolicy diff --git a/backend/application/licenses/services/export_license_policy.py b/backend/application/licenses/services/export_license_policy_secobserve.py similarity index 67% rename from backend/application/licenses/services/export_license_policy.py rename to backend/application/licenses/services/export_license_policy_secobserve.py index c2a1b5041..6f36a5b8e 100644 --- a/backend/application/licenses/services/export_license_policy.py +++ b/backend/application/licenses/services/export_license_policy_secobserve.py @@ -5,10 +5,10 @@ import yaml from application.commons.services.export import object_to_json +from application.commons.services.functions import get_comma_separated_as_list from application.licenses.models import License_Policy from application.licenses.services.license_policy import ( LicensePolicyEvaluationResult, - get_ignore_component_type_list, get_license_evaluation_results_for_license_policy, ) @@ -24,38 +24,29 @@ class License_Policy_Export_Item: comment: Optional[str] = None -@dataclass -class License_Policy_Export_Ignore_Component_Type: - component_type: str - - @dataclass class License_Policy_Export: name: str description: str items: list[License_Policy_Export_Item] - ignore_component_types: list[License_Policy_Export_Ignore_Component_Type] + ignore_component_types: list[str] parent: Optional[str] = None -def export_license_policy_yaml(license_policy: License_Policy) -> str: - return yaml.dump(json.loads(export_license_policy_json(license_policy))) +def export_license_policy_secobserve_yaml(license_policy: License_Policy) -> str: + return yaml.dump(json.loads(export_license_policy_secobserve_json(license_policy))) -def export_license_policy_json(license_policy: License_Policy) -> str: +def export_license_policy_secobserve_json(license_policy: License_Policy) -> str: return object_to_json(_create_license_policy_export(license_policy)) -def _create_license_policy_export( - license_policy: License_Policy, -) -> License_Policy_Export: +def _create_license_policy_export(license_policy: License_Policy) -> License_Policy_Export: license_policy_export = License_Policy_Export( name=license_policy.name, description=license_policy.description, items=[], - ignore_component_types=get_ignore_component_type_list( - license_policy.ignore_component_types - ), + ignore_component_types=get_comma_separated_as_list(license_policy.ignore_component_types), ) if license_policy.parent: license_policy_export.parent = license_policy.parent.name @@ -63,13 +54,9 @@ def _create_license_policy_export( license_evaluation_results: dict[str, LicensePolicyEvaluationResult] = {} if license_policy.parent: - get_license_evaluation_results_for_license_policy( - license_policy.parent, True, license_evaluation_results - ) + get_license_evaluation_results_for_license_policy(license_policy.parent, True, license_evaluation_results) - get_license_evaluation_results_for_license_policy( - license_policy, False, license_evaluation_results - ) + get_license_evaluation_results_for_license_policy(license_policy, False, license_evaluation_results) for license_string, evaluation_result in license_evaluation_results.items(): license_policy_export_item = License_Policy_Export_Item( @@ -79,17 +66,11 @@ def _create_license_policy_export( comment=evaluation_result.comment, ) if license_string.startswith("spdx_"): - license_policy_export_item.spdx_license = license_string.replace( - "spdx_", "" - ) + license_policy_export_item.spdx_license = license_string.replace("spdx_", "") elif license_string.startswith("expression_"): - license_policy_export_item.license_expression = license_string.replace( - "expression_", "" - ) + license_policy_export_item.license_expression = license_string.replace("expression_", "") elif license_string.startswith("non_spdx_"): - license_policy_export_item.non_spdx_license = license_string.replace( - "non_spdx_", "" - ) + license_policy_export_item.non_spdx_license = license_string.replace("non_spdx_", "") else: continue diff --git a/backend/application/licenses/services/license.py b/backend/application/licenses/services/license.py index aaf9db7cb..2b84752e7 100644 --- a/backend/application/licenses/services/license.py +++ b/backend/application/licenses/services/license.py @@ -5,7 +5,11 @@ from application.licenses.models import License -def import_licenses() -> None: +def import_licenses() -> str: + + licenses_updated = 0 + licenses_created = 0 + response = requests.get( "https://raw.githubusercontent.com/spdx/license-list-data/refs/heads/main/json/licenses.json", timeout=60, @@ -29,15 +33,12 @@ def import_licenses() -> None: if secobserve_license.is_osi_approved != spdx_license.get("isOsiApproved"): secobserve_license.is_osi_approved = spdx_license.get("isOsiApproved") license_changed = True - if secobserve_license.is_deprecated != spdx_license.get( - "isDeprecatedLicenseId" - ): - secobserve_license.is_deprecated = spdx_license.get( - "isDeprecatedLicenseId" - ) + if secobserve_license.is_deprecated != spdx_license.get("isDeprecatedLicenseId"): + secobserve_license.is_deprecated = spdx_license.get("isDeprecatedLicenseId") license_changed = True if license_changed: secobserve_license.save() + licenses_updated += 1 except License.DoesNotExist: License.objects.create( spdx_id=spdx_id, @@ -46,3 +47,6 @@ def import_licenses() -> None: is_osi_approved=spdx_license.get("isOsiApproved"), is_deprecated=spdx_license.get("isDeprecatedLicenseId"), ) + licenses_created += 1 + + return f"Licenses updated: {licenses_updated}, licenses created: {licenses_created}" diff --git a/backend/application/licenses/services/license_component.py b/backend/application/licenses/services/license_component.py index dc9c386ac..c4709a470 100644 --- a/backend/application/licenses/services/license_component.py +++ b/backend/application/licenses/services/license_component.py @@ -1,157 +1,41 @@ import hashlib -from typing import Optional, Tuple from django.db.models.query import QuerySet -from django.utils import timezone from license_expression import get_spdx_licensing from packageurl import PackageURL from rest_framework.exceptions import ValidationError -from application.commons.services.functions import clip_fields +from application.commons.services.functions import get_comma_separated_as_list from application.core.models import Product -from application.import_observations.models import Vulnerability_Check -from application.licenses.models import License_Component, License_Component_Evidence -from application.licenses.queries.license import get_license_by_spdx_id +from application.licenses.models import License_Component +from application.licenses.services.concluded_license import update_concluded_license from application.licenses.services.license_policy import ( apply_license_policy_to_component, - get_ignore_component_type_list, get_license_evaluation_results_for_product, + get_license_policy, +) +from application.licenses.services.spdx_license_cache import SPDXLicenseCache +from application.licenses.types import ( + NO_LICENSE_INFORMATION, + License_Policy_Evaluation_Result, ) -def get_identity_hash(observation) -> str: - hash_string = _get_string_to_hash(observation) +def get_identity_hash(license_component: License_Component) -> str: + hash_string = _get_string_to_hash(license_component) return hashlib.sha256(hash_string.casefold().encode("utf-8").strip()).hexdigest() -def _get_string_to_hash( - license_component: License_Component, -): # pylint: disable=too-many-branches +def _get_string_to_hash(license_component: License_Component) -> str: # pylint: disable=too-many-branches hash_string = license_component.component_name_version - if license_component.component_purl: - hash_string += license_component.component_purl if license_component.component_dependencies: hash_string += license_component.component_dependencies - if license_component.license: - hash_string += license_component.license.spdx_id - if license_component.non_spdx_license: - hash_string += license_component.non_spdx_license - + if license_component.origin_service: + hash_string += license_component.origin_service.name return hash_string -def process_license_components( - license_components: list[License_Component], - vulnerability_check: Vulnerability_Check, -) -> Tuple[int, int, int]: - existing_components = License_Component.objects.filter( - product=vulnerability_check.product, - branch=vulnerability_check.branch, - upload_filename=vulnerability_check.filename, - ) - existing_component: Optional[License_Component] = None - existing_components_dict: dict[str, License_Component] = {} - for existing_component in existing_components: - existing_components_dict[existing_component.identity_hash] = existing_component - - license_evaluation_results = get_license_evaluation_results_for_product( - vulnerability_check.product - ) - - components_new = 0 - components_updated = 0 - - license_policy = vulnerability_check.product.license_policy - ignore_component_types = ( - get_ignore_component_type_list(license_policy.ignore_component_types) - if license_policy - else [] - ) - - for unsaved_component in license_components: - _prepare_component(unsaved_component) - existing_component = existing_components_dict.get( - unsaved_component.identity_hash - ) - if existing_component: - license_before = existing_component.license - non_spdx_license_before = existing_component.non_spdx_license - evaluation_result_before = existing_component.evaluation_result - existing_component.component_name = unsaved_component.component_name - existing_component.component_version = unsaved_component.component_version - existing_component.component_purl = unsaved_component.component_purl - existing_component.component_purl_type = ( - unsaved_component.component_purl_type - ) - existing_component.component_cpe = unsaved_component.component_cpe - existing_component.component_dependencies = ( - unsaved_component.component_dependencies - ) - existing_component.license_name = unsaved_component.license_name - existing_component.license = unsaved_component.license - existing_component.license_expression = unsaved_component.license_expression - existing_component.non_spdx_license = unsaved_component.non_spdx_license - apply_license_policy_to_component( - existing_component, - license_evaluation_results, - ignore_component_types, - ) - existing_component.import_last_seen = timezone.now() - if ( - license_before != existing_component.license - or non_spdx_license_before != existing_component.non_spdx_license - or evaluation_result_before != existing_component.evaluation_result - ): - existing_component.last_change = timezone.now() - clip_fields("licenses", "License_Component", existing_component) - existing_component.save() - - existing_component.evidences.all().delete() - _process_evidences(unsaved_component, existing_component) - - existing_components_dict.pop(unsaved_component.identity_hash) - components_updated += 1 - else: - unsaved_component.product = vulnerability_check.product - unsaved_component.branch = vulnerability_check.branch - unsaved_component.upload_filename = vulnerability_check.filename - apply_license_policy_to_component( - unsaved_component, - license_evaluation_results, - ignore_component_types, - ) - - unsaved_component.import_last_seen = timezone.now() - unsaved_component.last_change = timezone.now() - clip_fields("licenses", "License_Component", unsaved_component) - unsaved_component.save() - - _process_evidences(unsaved_component, unsaved_component) - - components_new += 1 - - components_deleted = len(existing_components_dict) - for existing_component in existing_components_dict.values(): - existing_component.delete() - - return components_new, components_updated, components_deleted - - -def _process_evidences( - source_component: License_Component, target_component: License_Component -) -> None: - if source_component.unsaved_evidences: - for evidence in source_component.unsaved_evidences: - evidence = License_Component_Evidence( - license_component=target_component, - name=evidence[0], - evidence=evidence[1], - ) - clip_fields("licenses", "License_Component_Evidence", evidence) - evidence.save() - - -def _prepare_component(component: License_Component) -> None: +def prepare_license_component(component: License_Component, spdx_cache: SPDXLicenseCache) -> None: _prepare_name_version(component) if component.component_name_version is None: @@ -178,7 +62,10 @@ def _prepare_component(component: License_Component) -> None: if component.component_purl_type is None: component.component_purl_type = "" - _prepare_license(component) + _prepare_license(component, spdx_cache) + + if component.evaluation_result in [None, ""]: + component.evaluation_result = License_Policy_Evaluation_Result.RESULT_UNKNOWN component.identity_hash = get_identity_hash(component) @@ -186,9 +73,7 @@ def _prepare_component(component: License_Component) -> None: def _prepare_name_version(component: License_Component) -> None: if not component.component_name_version: if component.component_name and component.component_version: - component.component_name_version = ( - component.component_name + ":" + component.component_version - ) + component.component_name_version = component.component_name + ":" + component.component_version elif component.component_name: component.component_name_version = component.component_name else: @@ -204,30 +89,93 @@ def _prepare_name_version(component: License_Component) -> None: component.component_version = "" -def _prepare_license(component: License_Component) -> None: - component.license_expression = "" - component.non_spdx_license = "" +def _prepare_license(component: License_Component, spdx_cache: SPDXLicenseCache) -> None: + _prepare_imported_declared_license(component, spdx_cache) + _prepare_imported_concluded_license(component, spdx_cache) + set_effective_license(component) + + +def _prepare_imported_declared_license(component: License_Component, spdx_cache: SPDXLicenseCache) -> None: + component.imported_declared_spdx_license = None + component.imported_declared_license_expression = "" + component.imported_declared_non_spdx_license = "" + component.imported_declared_multiple_licenses = "" + + if not component.unsaved_declared_licenses: + component.imported_declared_license_name = NO_LICENSE_INFORMATION + elif len(component.unsaved_declared_licenses) == 1: + component.imported_declared_spdx_license = spdx_cache.get(component.unsaved_declared_licenses[0]) + if component.imported_declared_spdx_license: + component.imported_declared_license_name = component.imported_declared_spdx_license.spdx_id + else: + licensing = get_spdx_licensing() + try: + expression_info = licensing.validate(component.unsaved_declared_licenses[0], strict=True) + if not expression_info.errors: + component.imported_declared_license_expression = expression_info.normalized_expression + component.imported_declared_license_name = component.imported_declared_license_expression + else: + component.imported_declared_non_spdx_license = component.unsaved_declared_licenses[0] + component.imported_declared_license_name = component.imported_declared_non_spdx_license + except Exception: + component.imported_declared_non_spdx_license = component.unsaved_declared_licenses[0] + component.imported_declared_license_name = component.imported_declared_non_spdx_license + + else: + component.imported_declared_multiple_licenses = ", ".join(component.unsaved_declared_licenses) + component.imported_declared_license_name = component.imported_declared_multiple_licenses - component.license_name = component.unsaved_license - if component.unsaved_license: - component.license = get_license_by_spdx_id(component.unsaved_license) - if not component.license: +def _prepare_imported_concluded_license(component: License_Component, spdx_cache: SPDXLicenseCache) -> None: + if not component.unsaved_concluded_licenses: + component.imported_concluded_license_name = NO_LICENSE_INFORMATION + elif len(component.unsaved_concluded_licenses) == 1: + component.imported_concluded_spdx_license = spdx_cache.get(component.unsaved_concluded_licenses[0]) + if component.imported_concluded_spdx_license: + component.imported_concluded_license_name = component.imported_concluded_spdx_license.spdx_id + else: licensing = get_spdx_licensing() try: - expression_info = licensing.validate( - component.unsaved_license, strict=True - ) + expression_info = licensing.validate(component.unsaved_concluded_licenses[0], strict=True) if not expression_info.errors: - component.license_expression = expression_info.normalized_expression - component.license_name = component.license_expression + component.imported_concluded_license_expression = expression_info.normalized_expression + component.imported_concluded_license_name = component.imported_concluded_license_expression else: - component.non_spdx_license = component.unsaved_license + component.imported_concluded_non_spdx_license = component.unsaved_concluded_licenses[0] + component.imported_concluded_license_name = component.imported_concluded_non_spdx_license except Exception: - component.non_spdx_license = component.unsaved_license + component.imported_concluded_non_spdx_license = component.unsaved_concluded_licenses[0] + component.imported_concluded_license_name = component.imported_concluded_non_spdx_license - if not component.license_name: - component.license_name = "No license information" + else: + component.imported_concluded_multiple_licenses = ", ".join(component.unsaved_concluded_licenses) + component.imported_concluded_license_name = component.imported_concluded_multiple_licenses + + +def set_effective_license(component: License_Component) -> None: + component.effective_license_name = NO_LICENSE_INFORMATION + component.effective_spdx_license = None + component.effective_license_expression = "" + component.effective_non_spdx_license = "" + component.effective_multiple_licenses = "" + + if component.manual_concluded_license_name != NO_LICENSE_INFORMATION: + component.effective_license_name = component.manual_concluded_license_name + component.effective_spdx_license = component.manual_concluded_spdx_license + component.effective_license_expression = component.manual_concluded_license_expression + component.effective_non_spdx_license = component.manual_concluded_non_spdx_license + elif component.imported_concluded_license_name != NO_LICENSE_INFORMATION: + component.effective_license_name = component.imported_concluded_license_name + component.effective_spdx_license = component.imported_concluded_spdx_license + component.effective_license_expression = component.imported_concluded_license_expression + component.effective_non_spdx_license = component.imported_concluded_non_spdx_license + component.effective_multiple_licenses = component.imported_concluded_multiple_licenses + elif component.imported_declared_license_name != NO_LICENSE_INFORMATION: + component.effective_license_name = component.imported_declared_license_name + component.effective_spdx_license = component.imported_declared_spdx_license + component.effective_license_expression = component.imported_declared_license_expression + component.effective_non_spdx_license = component.imported_declared_non_spdx_license + component.effective_multiple_licenses = component.imported_declared_multiple_licenses def license_components_bulk_delete(product: Product, component_ids: list[int]) -> None: @@ -235,17 +183,44 @@ def license_components_bulk_delete(product: Product, component_ids: list[int]) - components.delete() -def _check_components( - product: Product, component_ids: list[int] -) -> QuerySet[License_Component]: +def _check_components(product: Product, component_ids: list[int]) -> QuerySet[License_Component]: components = License_Component.objects.filter(id__in=component_ids) if len(components) != len(component_ids): raise ValidationError("Some components do not exist") for component in components: if component.product != product: - raise ValidationError( - f"Component {component.pk} does not belong to product {product.pk}" - ) + raise ValidationError(f"Component {component.pk} does not belong to product {product.pk}") return components + + +def save_concluded_license(component: License_Component) -> None: + component.manual_concluded_license_name = NO_LICENSE_INFORMATION + + if component.manual_concluded_spdx_license: + component.manual_concluded_license_name = component.manual_concluded_spdx_license.spdx_id + elif component.manual_concluded_license_expression: + licensing = get_spdx_licensing() + expression_info = licensing.validate(component.manual_concluded_license_expression, strict=True) + if not expression_info.errors: + component.manual_concluded_license_name = component.manual_concluded_license_expression + else: + raise ValidationError("Invalid concluded license expression") + elif component.manual_concluded_non_spdx_license: + component.manual_concluded_license_name = component.manual_concluded_non_spdx_license + + set_effective_license(component) + update_concluded_license(component) + + license_policy = get_license_policy(component.product) + if license_policy: + license_evaluation_results = get_license_evaluation_results_for_product(component.product) + apply_license_policy_to_component( + component, + license_evaluation_results, + get_comma_separated_as_list(license_policy.ignore_component_types), + SPDXLicenseCache(), + ) + + component.save() diff --git a/backend/application/licenses/services/license_group.py b/backend/application/licenses/services/license_group.py index b29c2378b..8c8edf13f 100644 --- a/backend/application/licenses/services/license_group.py +++ b/backend/application/licenses/services/license_group.py @@ -7,9 +7,7 @@ def copy_license_group(source_license_group: License_Group, name: str) -> License_Group: new_license_group = License_Group.objects.create( - name=name, - description=source_license_group.description, - is_public=source_license_group.is_public, + name=name, description=source_license_group.description, is_public=source_license_group.is_public ) for license_to_be_added in source_license_group.licenses.all(): @@ -18,9 +16,7 @@ def copy_license_group(source_license_group: License_Group, name: str) -> Licens members = License_Group_Member.objects.filter(license_group=source_license_group) for member in members: License_Group_Member.objects.update_or_create( - license_group=new_license_group, - user=member.user, - is_manager=member.is_manager, + license_group=new_license_group, user=member.user, is_manager=member.is_manager ) return new_license_group @@ -29,11 +25,7 @@ def copy_license_group(source_license_group: License_Group, name: str) -> Licens def import_scancode_licensedb() -> None: license_groups: dict[str, License_Group] = {} - response = requests.get( - "https://scancode-licensedb.aboutcode.org/index.json", - timeout=60, - stream=True, - ) + response = requests.get("https://scancode-licensedb.aboutcode.org/index.json", timeout=60, stream=True) response.raise_for_status() data = loads(response.content) @@ -48,17 +40,19 @@ def import_scancode_licensedb() -> None: _add_license_to_group(license_groups, category, other_spdx_license_key) -def _add_license_to_group(license_groups, category, spdx_license_key): +def _add_license_to_group(license_groups: dict[str, License_Group], category: str, spdx_license_key: str) -> None: try: spdx_license = License.objects.get(spdx_id=spdx_license_key) license_group = license_groups.get(category) if not license_group: license_group, _ = License_Group.objects.get_or_create( name=f"{category} (ScanCode LicenseDB)", - description="Do not edit! " - + "Imported from [ScanCode LicenseDB](https://scancode-licensedb.aboutcode.org/) " - + "under the CC-BY-4.0 license.", - is_public=True, + defaults={ + "description": "**Do not edit!** " + + "Imported from [ScanCode LicenseDB](https://scancode-licensedb.aboutcode.org/) " + + "under the CC-BY-4.0 license and updated every 24 hours.", + "is_public": True, + }, ) license_groups[category] = license_group license_group.licenses.clear() diff --git a/backend/application/licenses/services/license_policy.py b/backend/application/licenses/services/license_policy.py index 3611e735a..9784252df 100644 --- a/backend/application/licenses/services/license_policy.py +++ b/backend/application/licenses/services/license_policy.py @@ -1,6 +1,7 @@ from dataclasses import dataclass from typing import Optional +from django.core.paginator import Paginator from django.db.models import Q from django.utils import timezone from license_expression import ( @@ -11,14 +12,16 @@ get_spdx_licensing, ) -from application.core.models import Product +from application.commons.services.functions import get_comma_separated_as_list +from application.core.models import Branch, Product from application.licenses.models import ( License_Component, + License_Group, License_Policy, License_Policy_Item, License_Policy_Member, ) -from application.licenses.queries.license import get_license_by_spdx_id +from application.licenses.services.spdx_license_cache import SPDXLicenseCache from application.licenses.types import License_Policy_Evaluation_Result @@ -30,9 +33,28 @@ class LicensePolicyEvaluationResult: comment: Optional[str] = None -def copy_license_policy( - source_license_policy: License_Policy, name: str -) -> License_Policy: +def create_scancode_standard_policy() -> None: + try: + License_Policy.objects.get(name="Standard") + except License_Policy.DoesNotExist: + license_groups = License_Group.objects.filter(name__contains="(ScanCode LicenseDB)") + if license_groups: + license_policy = License_Policy( + name="Standard", description="Created automatically during initial startup", is_public=True + ) + license_policy.save() + for license_group in license_groups: + evaluation_result = License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED + if license_group.name.startswith("Permissive") or license_group.name.startswith("Public Domain"): + evaluation_result = License_Policy_Evaluation_Result.RESULT_ALLOWED + if license_group.name.startswith("Copyleft"): + evaluation_result = License_Policy_Evaluation_Result.RESULT_FORBIDDEN + License_Policy_Item( + license_policy=license_policy, license_group=license_group, evaluation_result=evaluation_result + ).save() + + +def copy_license_policy(source_license_policy: License_Policy, name: str) -> License_Policy: new_license_policy = License_Policy.objects.create( name=name, description=source_license_policy.description, @@ -53,29 +75,23 @@ def copy_license_policy( members = License_Policy_Member.objects.filter(license_policy=source_license_policy) for member in members: License_Policy_Member.objects.update_or_create( - license_policy=new_license_policy, - user=member.user, - is_manager=member.is_manager, + license_policy=new_license_policy, user=member.user, is_manager=member.is_manager ) return new_license_policy def get_license_evaluation_results_for_product(product: Product) -> dict: - license_policy = _get_license_policy(product) + license_policy = get_license_policy(product) if not license_policy: return {} license_evaluation_results: dict[str, LicensePolicyEvaluationResult] = {} if license_policy.parent: - get_license_evaluation_results_for_license_policy( - license_policy.parent, True, license_evaluation_results - ) + get_license_evaluation_results_for_license_policy(license_policy.parent, True, license_evaluation_results) - get_license_evaluation_results_for_license_policy( - license_policy, False, license_evaluation_results - ) + get_license_evaluation_results_for_license_policy(license_policy, False, license_evaluation_results) return license_evaluation_results @@ -91,114 +107,117 @@ def get_license_evaluation_results_for_license_policy( for item in items_license_groups: if item.license_group: for my_license in item.license_group.licenses.all(): - license_evaluation_results[f"spdx_{my_license.spdx_id}"] = ( - LicensePolicyEvaluationResult( - evaluation_result=item.evaluation_result, - from_parent=is_parent, - license_group_name=item.license_group.name, - comment=item.comment if item.comment else None, - ) - ) - - items_licenses = License_Policy_Item.objects.filter( - license_policy=license_policy, license__isnull=False - ) - for item in items_licenses: - if item.license: - license_evaluation_results[f"spdx_{item.license.spdx_id}"] = ( - LicensePolicyEvaluationResult( + license_evaluation_results[f"spdx_{my_license.spdx_id}"] = LicensePolicyEvaluationResult( evaluation_result=item.evaluation_result, from_parent=is_parent, + license_group_name=item.license_group.name, comment=item.comment if item.comment else None, ) - ) - items_license_expressions = License_Policy_Item.objects.filter( - license_policy=license_policy - ).exclude(license_expression="") - for item in items_license_expressions: - license_evaluation_results[f"expression_{item.license_expression}"] = ( - LicensePolicyEvaluationResult( + items_licenses = License_Policy_Item.objects.filter(license_policy=license_policy, license__isnull=False) + for item in items_licenses: + if item.license: + license_evaluation_results[f"spdx_{item.license.spdx_id}"] = LicensePolicyEvaluationResult( evaluation_result=item.evaluation_result, from_parent=is_parent, comment=item.comment if item.comment else None, ) + + items_license_expressions = License_Policy_Item.objects.filter(license_policy=license_policy).exclude( + license_expression="" + ) + for item in items_license_expressions: + license_evaluation_results[f"expression_{item.license_expression}"] = LicensePolicyEvaluationResult( + evaluation_result=item.evaluation_result, + from_parent=is_parent, + comment=item.comment if item.comment else None, ) - items_non_spdx_licenses = License_Policy_Item.objects.filter( - license_policy=license_policy - ).exclude(non_spdx_license="") + items_non_spdx_licenses = License_Policy_Item.objects.filter(license_policy=license_policy).exclude( + non_spdx_license="" + ) for item in items_non_spdx_licenses: - license_evaluation_results[f"non_spdx_{item.non_spdx_license}"] = ( - LicensePolicyEvaluationResult( - evaluation_result=item.evaluation_result, - from_parent=is_parent, - comment=item.comment if item.comment else None, - ) + license_evaluation_results[f"non_spdx_{item.non_spdx_license}"] = LicensePolicyEvaluationResult( + evaluation_result=item.evaluation_result, + from_parent=is_parent, + comment=item.comment if item.comment else None, ) def apply_license_policy(license_policy: License_Policy) -> None: products = Product.objects.filter( Q(license_policy=license_policy) - | ( - Q(product_group__license_policy=license_policy) - & Q(license_policy__isnull=True) - ) + | (Q(product_group__license_policy=license_policy) & Q(license_policy__isnull=True)) ) for product in products: - apply_license_policy_product(product) + apply_license_policy_product(SPDXLicenseCache(), product) + +def apply_license_policy_product( + spdx_cache: SPDXLicenseCache, product: Product, branch: Optional[Branch] = None +) -> None: + license_policy = get_license_policy(product) -def apply_license_policy_product(product: Product) -> None: license_evaluation_results = get_license_evaluation_results_for_product(product) - components = License_Component.objects.filter(product=product) - for component in components: - license_before = component.license - non_spdx_license_before = component.non_spdx_license - evaluation_result_before = component.evaluation_result - - license_policy = _get_license_policy(product) - if license_policy: - apply_license_policy_to_component( - component, - license_evaluation_results, - get_ignore_component_type_list(license_policy.ignore_component_types), - ) - else: - component.evaluation_result = ( - License_Policy_Evaluation_Result.RESULT_UNKNOWN - ) - if ( - license_before != component.license - or non_spdx_license_before != component.non_spdx_license - or evaluation_result_before != component.evaluation_result - ): - component.last_change = timezone.now() + components = ( + License_Component.objects.filter(product=product).order_by("pk").select_related("effective_spdx_license") + ) + if branch: + components = components.filter(branch=branch) + + paginator = Paginator(components, 1000) + for page_number in paginator.page_range: + page = paginator.page(page_number) + updates = [] + + for component in page.object_list: + evaluation_result_before = component.evaluation_result + + if license_policy: + apply_license_policy_to_component( + component, + license_evaluation_results, + get_comma_separated_as_list(license_policy.ignore_component_types), + spdx_cache, + ) + else: + component.evaluation_result = License_Policy_Evaluation_Result.RESULT_UNKNOWN - component.save() + if evaluation_result_before != component.evaluation_result: + component.last_change = timezone.now() + + updates.append(component) + + License_Component.objects.bulk_update( + updates, ["evaluation_result", "numerical_evaluation_result", "last_change"] + ) def apply_license_policy_to_component( component: License_Component, evaluation_results: dict[str, LicensePolicyEvaluationResult], ignore_component_types: list, + spdx_cache: SPDXLicenseCache, ) -> None: evaluation_result = None if component.component_purl_type in ignore_component_types: evaluation_result = License_Policy_Evaluation_Result.RESULT_IGNORED - elif component.license: + elif component.effective_spdx_license: evaluation_result = _get_license_evaluation_result( - f"spdx_{component.license.spdx_id}", evaluation_results + f"spdx_{component.effective_spdx_license.spdx_id}", evaluation_results ) - elif component.license_expression: + elif component.effective_license_expression: evaluation_result = _evaluate_license_expression( - component.license_expression, evaluation_results + component.effective_license_expression, evaluation_results, spdx_cache ) - elif component.non_spdx_license: + elif component.effective_non_spdx_license: evaluation_result = _get_license_evaluation_result( - f"non_spdx_{component.non_spdx_license}", evaluation_results + f"non_spdx_{component.effective_non_spdx_license}", evaluation_results + ) + elif component.effective_multiple_licenses: + evaluation_result = _get_multiple_licenses_evaluation_result( + component.effective_multiple_licenses, evaluation_results, spdx_cache ) if not evaluation_result: evaluation_result = License_Policy_Evaluation_Result.RESULT_UNKNOWN @@ -206,15 +225,7 @@ def apply_license_policy_to_component( component.evaluation_result = evaluation_result -def get_ignore_component_type_list(ignore_component_types: str) -> list: - ignore_component_types_list = ( - ignore_component_types.split(",") if ignore_component_types else [] - ) - ignore_component_types_list = [x.strip() for x in ignore_component_types_list] - return ignore_component_types_list - - -def _get_license_policy(product: Product) -> Optional[License_Policy]: +def get_license_policy(product: Product) -> Optional[License_Policy]: if product.license_policy: return product.license_policy @@ -234,24 +245,37 @@ def _get_license_evaluation_result( return License_Policy_Evaluation_Result.RESULT_UNKNOWN +def _get_multiple_licenses_evaluation_result( + multiple_licenses: str, evaluation_results: dict[str, LicensePolicyEvaluationResult], spdx_cache: SPDXLicenseCache +) -> str: + licenses = get_comma_separated_as_list(multiple_licenses) + spdx_licenses = [] + for my_license in licenses: + spdx_license = spdx_cache.get(my_license) + if spdx_license: + spdx_licenses.append(spdx_license.spdx_id) + + evaluation_result_set = set() + for license_string in licenses: + if license_string in spdx_licenses: + evaluation_result_set.add(_get_license_evaluation_result(f"spdx_{license_string}", evaluation_results)) + else: + evaluation_result_set.add(_get_license_evaluation_result(f"non_spdx_{license_string}", evaluation_results)) + + return _evaluate_and_expression(evaluation_result_set) + + def _evaluate_license_expression( - license_expression: str, - evaluation_results: dict[str, LicensePolicyEvaluationResult], + license_expression: str, evaluation_results: dict[str, LicensePolicyEvaluationResult], spdx_cache: SPDXLicenseCache ) -> Optional[str]: evaluation_result = License_Policy_Evaluation_Result.RESULT_UNKNOWN try: licensing = get_spdx_licensing() - parsed_expression = licensing.parse( - license_expression, validate=True, strict=True - ) - evaluation_result = _evaluate_parsed_license_expression( - parsed_expression, evaluation_results - ) + parsed_expression = licensing.parse(license_expression, validate=True, strict=True) + evaluation_result = _evaluate_parsed_license_expression(parsed_expression, evaluation_results, spdx_cache) if evaluation_result == License_Policy_Evaluation_Result.RESULT_UNKNOWN: - evaluation_result = _get_license_evaluation_result( - f"expression_{license_expression}", evaluation_results - ) + evaluation_result = _get_license_evaluation_result(f"expression_{license_expression}", evaluation_results) except Exception: # nosec B110 # a meaningful return value is set as a default in case on an exception pass @@ -262,6 +286,7 @@ def _evaluate_license_expression( def _evaluate_parsed_license_expression( parsed_expression: LicenseExpression, evaluation_results: dict[str, LicensePolicyEvaluationResult], + spdx_cache: SPDXLicenseCache, ) -> str: evaluation_result = License_Policy_Evaluation_Result.RESULT_UNKNOWN @@ -271,19 +296,15 @@ def _evaluate_parsed_license_expression( if parsed_expression_type == LicenseSymbol: license_symbol = str(parsed_expression) - spdx_license = get_license_by_spdx_id(license_symbol) + spdx_license = spdx_cache.get(license_symbol) if spdx_license: - return _get_license_evaluation_result( - f"spdx_{spdx_license.spdx_id}", evaluation_results - ) + return _get_license_evaluation_result(f"spdx_{spdx_license.spdx_id}", evaluation_results) return License_Policy_Evaluation_Result.RESULT_UNKNOWN if parsed_expression_type in [AND, OR]: evaluation_result_set = set() for arg in parsed_expression.args: - evaluation_result_set.add( - _evaluate_parsed_license_expression(arg, evaluation_results) - ) + evaluation_result_set.add(_evaluate_parsed_license_expression(arg, evaluation_results, spdx_cache)) if parsed_expression_type == AND: evaluation_result = _evaluate_and_expression(evaluation_result_set) if parsed_expression_type == OR: diff --git a/backend/application/licenses/services/licenselynx.py b/backend/application/licenses/services/licenselynx.py new file mode 100644 index 000000000..a5d243656 --- /dev/null +++ b/backend/application/licenses/services/licenselynx.py @@ -0,0 +1,62 @@ +from typing import Optional + +from licenselynx.licenselynx import LicenseLynx + +from application.commons.services.functions import get_comma_separated_as_list +from application.licenses.models import License, License_Component +from application.licenses.services.license_component import set_effective_license +from application.licenses.services.spdx_license_cache import SPDXLicenseCache +from application.licenses.types import NO_LICENSE_INFORMATION + +SET_BY_LICENSELYNX = "Set by LicenseLynx" + + +def apply_licenselynx(component: License_Component, spdx_cache: SPDXLicenseCache) -> None: + if ( + component.manual_concluded_license_name + and component.manual_concluded_license_name != NO_LICENSE_INFORMATION + and component.manual_concluded_comment != SET_BY_LICENSELYNX + ): + return + + if component.imported_declared_non_spdx_license: + mapped_license = _get_mapped_licence(component.imported_declared_non_spdx_license, spdx_cache) + if mapped_license: + component.manual_concluded_spdx_license = mapped_license + component.manual_concluded_comment = SET_BY_LICENSELYNX + component.manual_concluded_license_name = mapped_license.spdx_id + elif component.imported_declared_multiple_licenses: + licenses = get_comma_separated_as_list(component.imported_declared_multiple_licenses) + for i, license_string in enumerate(licenses): + mapped_license_string = _get_mapped_licence_string(license_string) + if mapped_license_string: + licenses[i] = mapped_license_string + component.imported_declared_multiple_licenses = ", ".join(licenses) + component.imported_declared_license_name = component.imported_declared_multiple_licenses + + if component.imported_concluded_non_spdx_license: + mapped_license = _get_mapped_licence(component.imported_concluded_non_spdx_license, spdx_cache) + if mapped_license: + component.manual_concluded_spdx_license = mapped_license + component.manual_concluded_comment = SET_BY_LICENSELYNX + component.manual_concluded_license_name = mapped_license.spdx_id + elif component.imported_concluded_multiple_licenses: + licenses = get_comma_separated_as_list(component.imported_concluded_multiple_licenses) + for i, license_string in enumerate(licenses): + mapped_license_string = _get_mapped_licence_string(license_string) + if mapped_license_string: + licenses[i] = mapped_license_string + component.imported_concluded_multiple_licenses = ", ".join(licenses) + component.imported_concluded_license_name = component.imported_concluded_multiple_licenses + + set_effective_license(component) + + +def _get_mapped_licence_string(license_string: str) -> Optional[str]: + license_object = LicenseLynx.map(license_string) + return license_object.id if license_object else None + + +def _get_mapped_licence(license_string: str, spdx_cache: SPDXLicenseCache) -> Optional[License]: + mapped_license_string = _get_mapped_licence_string(license_string) + return spdx_cache.get(mapped_license_string) if mapped_license_string else None diff --git a/backend/application/licenses/services/spdx_license_cache.py b/backend/application/licenses/services/spdx_license_cache.py new file mode 100644 index 000000000..103e2db8b --- /dev/null +++ b/backend/application/licenses/services/spdx_license_cache.py @@ -0,0 +1,31 @@ +import re +from typing import Optional + +from application.licenses.models import License +from application.licenses.queries.license import get_license_by_spdx_id + +SPDX_ID_REGEX = re.compile("^[A-Za-z0-9-.+:]+$") + + +class SPDXLicenseCache: + + NO_ENTRY = "no_entry" + + def __init__(self) -> None: + self.cache: dict[str, License | str] = {} + + def get(self, spdx_id: str) -> Optional[License]: + if not SPDX_ID_REGEX.match(spdx_id): + return None + + spdx_license = self.cache.get(spdx_id) + if spdx_license: + return spdx_license if isinstance(spdx_license, License) else None + + spdx_license = get_license_by_spdx_id(spdx_id) + if spdx_license: + self.cache[spdx_id] = spdx_license + return spdx_license + + self.cache[spdx_id] = SPDXLicenseCache.NO_ENTRY + return None diff --git a/backend/application/licenses/signals.py b/backend/application/licenses/signals.py index 4aad23578..c10033bb6 100644 --- a/backend/application/licenses/signals.py +++ b/backend/application/licenses/signals.py @@ -1,7 +1,9 @@ +from typing import Any + from django.db.models.signals import post_save from django.dispatch import receiver -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.licenses.models import ( License_Group, License_Group_Member, @@ -12,25 +14,21 @@ @receiver(post_save, sender=License_Group) def license_group_post_save( # pylint: disable=unused-argument - sender, instance: License_Group, created: bool, **kwargs + sender: Any, instance: License_Group, created: bool, **kwargs: Any ) -> None: # sender is needed according to Django documentation if created: user = get_current_user() if user and not user.is_superuser: - License_Group_Member.objects.update_or_create( - license_group=instance, user=user, is_manager=True - ) + License_Group_Member.objects.update_or_create(license_group=instance, user=user, is_manager=True) @receiver(post_save, sender=License_Policy) def license_policy_post_save( # pylint: disable=unused-argument - sender, instance: License_Policy, created: bool, **kwargs + sender: Any, instance: License_Policy, created: bool, **kwargs: Any ) -> None: # sender is needed according to Django documentation if created: user = get_current_user() if user and not user.is_superuser: - License_Policy_Member.objects.update_or_create( - license_policy=instance, user=user, is_manager=True - ) + License_Policy_Member.objects.update_or_create(license_policy=instance, user=user, is_manager=True) diff --git a/backend/application/licenses/tasks.py b/backend/application/licenses/tasks.py deleted file mode 100644 index 0150e5683..000000000 --- a/backend/application/licenses/tasks.py +++ /dev/null @@ -1,33 +0,0 @@ -import logging - -from huey import crontab -from huey.contrib.djhuey import db_periodic_task, lock_task - -from application.commons import settings_static -from application.commons.models import Settings -from application.commons.services.tasks import handle_task_exception -from application.licenses.services.license import import_licenses - -logger = logging.getLogger("secobserve.import_licenses") - - -@db_periodic_task( - crontab( - minute=settings_static.license_import_crontab_minute, - hour=settings_static.license_import_crontab_hour, - ) -) -@lock_task("license_import") -def task_api_import() -> None: - settings = Settings.load() - if not settings.feature_license_management: - return - - logger.info("--- License import - start ---") - - try: - import_licenses() - except Exception as e: - handle_task_exception(e) - - logger.info("--- License import - finished ---") diff --git a/backend/application/licenses/types.py b/backend/application/licenses/types.py index afeed7f92..7a03bc1d2 100644 --- a/backend/application/licenses/types.py +++ b/backend/application/licenses/types.py @@ -20,3 +20,6 @@ class License_Policy_Evaluation_Result: RESULT_ALLOWED: 4, RESULT_IGNORED: 5, } + + +NO_LICENSE_INFORMATION = "No license information" diff --git a/backend/application/metrics/api/views.py b/backend/application/metrics/api/views.py index 481ceb53b..756e8fd8e 100644 --- a/backend/application/metrics/api/views.py +++ b/backend/application/metrics/api/views.py @@ -1,15 +1,18 @@ import csv from tempfile import NamedTemporaryFile +from typing import Optional from django.http import HttpResponse from rest_framework.decorators import action from rest_framework.exceptions import ValidationError +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.views import APIView -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions from application.commons.models import Settings +from application.core.models import Product from application.core.queries.product import get_product_by_id from application.core.types import Severity from application.metrics.models import Product_Metrics_Status @@ -26,7 +29,7 @@ class ProductMetricsTimelineView(APIView): @action(detail=False, methods=["get"]) - def get(self, request): + def get(self, request: Request) -> Response: product = _get_and_check_product(request) age = request.query_params.get("age", "") return Response(get_product_metrics_timeline(product, age)) @@ -34,22 +37,20 @@ def get(self, request): class ProductMetricsCurrentView(APIView): @action(detail=False, methods=["get"]) - def get(self, request): + def get(self, request: Request) -> Response: product = _get_and_check_product(request) return Response(get_product_metrics_current(product)) class ProductMetricsExportExcelView(APIView): @action(detail=False, methods=["get"]) - def get(self, request): + def get(self, request: Request) -> HttpResponse: product = _get_and_check_product(request) workbook = export_product_metrics_excel(product) with NamedTemporaryFile() as tmp: - workbook.save( - tmp.name # nosemgrep: python.lang.correctness.tempfile.flush.tempfile-without-flush - ) + workbook.save(tmp.name) # nosemgrep: python.lang.correctness.tempfile.flush.tempfile-without-flush # export works fine without .flush() tmp.seek(0) stream = tmp.read() @@ -65,7 +66,7 @@ def get(self, request): class ProductMetricsExportCsvView(APIView): @action(detail=False, methods=["get"]) - def get(self, request): + def get(self, request: Request) -> HttpResponse: product = _get_and_check_product(request) response = HttpResponse(content_type="text/csv") @@ -78,15 +79,13 @@ def get(self, request): class ProductMetricsExportCodeChartaView(APIView): @action(detail=False, methods=["get"]) - def get(self, request): + def get(self, request: Request) -> HttpResponse: product = _get_and_check_product(request) if not product: raise ValidationError("Product not found") response = HttpResponse(content_type="text/csv") - response["Content-Disposition"] = ( - 'attachment; filename="secobserve_codecharta_metrics.csv"' - ) + response["Content-Disposition"] = 'attachment; filename="secobserve_codecharta_metrics.csv"' writer = csv.DictWriter( response, @@ -113,7 +112,7 @@ def get(self, request): class ProductMetricsStatusView(APIView): @action(detail=False, methods=["get"]) - def get(self, request): + def get(self, request: Request) -> Response: settings = Settings.load() status = Product_Metrics_Status.load() @@ -125,10 +124,12 @@ def get(self, request): ) -def _get_and_check_product(request): +def _get_and_check_product(request: Request) -> Optional[Product]: product_id = request.query_params.get("product_id") + if product_id and not product_id.isdigit(): + raise ValidationError("product_id must be a number") if product_id: - product = get_product_by_id(product_id) + product = get_product_by_id(int(product_id)) else: product = None if product: diff --git a/backend/application/metrics/migrations/0001_initial.py b/backend/application/metrics/migrations/0001_initial.py index dc78b8e13..2c46fc216 100644 --- a/backend/application/metrics/migrations/0001_initial.py +++ b/backend/application/metrics/migrations/0001_initial.py @@ -41,9 +41,7 @@ class Migration(migrations.Migration): ("risk_accepted", models.IntegerField(default=0)), ( "product", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.product" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.product"), ), ], options={ diff --git a/backend/application/metrics/migrations/0003_product_metrics_affected.py b/backend/application/metrics/migrations/0003_product_metrics_affected.py new file mode 100644 index 000000000..49b6aab34 --- /dev/null +++ b/backend/application/metrics/migrations/0003_product_metrics_affected.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.11 on 2026-02-15 17:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("metrics", "0002_product_metrics_status"), + ] + + operations = [ + migrations.AddField( + model_name="product_metrics", + name="affected", + field=models.IntegerField(default=0), + ), + ] diff --git a/backend/application/metrics/migrations/0004_rename_open_critical_product_metrics_active_critical_and_more.py b/backend/application/metrics/migrations/0004_rename_open_critical_product_metrics_active_critical_and_more.py new file mode 100644 index 000000000..8833e3a44 --- /dev/null +++ b/backend/application/metrics/migrations/0004_rename_open_critical_product_metrics_active_critical_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 5.2.11 on 2026-02-16 07:05 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("metrics", "0003_product_metrics_affected"), + ] + + operations = [ + migrations.RenameField( + model_name="product_metrics", + old_name="open_critical", + new_name="active_critical", + ), + migrations.RenameField( + model_name="product_metrics", + old_name="open_high", + new_name="active_high", + ), + migrations.RenameField( + model_name="product_metrics", + old_name="open_low", + new_name="active_low", + ), + migrations.RenameField( + model_name="product_metrics", + old_name="open_medium", + new_name="active_medium", + ), + migrations.RenameField( + model_name="product_metrics", + old_name="open_none", + new_name="active_none", + ), + migrations.RenameField( + model_name="product_metrics", + old_name="open_unknown", + new_name="active_unknown", + ), + ] diff --git a/backend/application/metrics/models.py b/backend/application/metrics/models.py index aede1b9a9..f1fca5e40 100644 --- a/backend/application/metrics/models.py +++ b/backend/application/metrics/models.py @@ -1,3 +1,5 @@ +from typing import Any + from django.db.models import ( CASCADE, DateField, @@ -15,14 +17,15 @@ class Product_Metrics(Model): product = ForeignKey(Product, on_delete=CASCADE) date = DateField() - open_critical = IntegerField(default=0) - open_high = IntegerField(default=0) - open_medium = IntegerField(default=0) - open_low = IntegerField(default=0) - open_none = IntegerField(default=0) - open_unknown = IntegerField(default=0) + active_critical = IntegerField(default=0) + active_high = IntegerField(default=0) + active_medium = IntegerField(default=0) + active_low = IntegerField(default=0) + active_none = IntegerField(default=0) + active_unknown = IntegerField(default=0) open = IntegerField(default=0) + affected = IntegerField(default=0) resolved = IntegerField(default=0) duplicate = IntegerField(default=0) false_positive = IntegerField(default=0) @@ -41,12 +44,12 @@ class Meta: class Product_Metrics_Status(Model): last_calculated = DateTimeField(default=timezone.now) - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: self.pk = 1 super().save(*args, **kwargs) - def delete(self, *args, **kwargs): - pass + def delete(self, *args: Any, **kwargs: Any) -> tuple[int, dict[str, int]]: + return 0, {} @classmethod def load(cls) -> "Product_Metrics_Status": diff --git a/backend/application/metrics/queries/product_metrics.py b/backend/application/metrics/queries/product_metrics.py index 48aba50a3..1355becd1 100644 --- a/backend/application/metrics/queries/product_metrics.py +++ b/backend/application/metrics/queries/product_metrics.py @@ -2,7 +2,7 @@ from django.db.models.query import QuerySet from django.utils import timezone -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Product_Authorization_Group_Member, Product_Member from application.metrics.models import Product_Metrics @@ -16,34 +16,24 @@ def get_product_metrics() -> QuerySet[Product_Metrics]: product_metrics = Product_Metrics.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) product_metrics = product_metrics.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) product_metrics = product_metrics.filter( diff --git a/backend/application/metrics/services/export_metrics.py b/backend/application/metrics/services/export_metrics.py index 80560564e..5dcc580cd 100644 --- a/backend/application/metrics/services/export_metrics.py +++ b/backend/application/metrics/services/export_metrics.py @@ -1,23 +1,21 @@ from typing import Optional +from django.db.models import QuerySet from django.http import HttpResponse from openpyxl import Workbook from application.commons.services.export import export_csv, export_excel from application.core.models import Product +from application.metrics.models import Product_Metrics from application.metrics.queries.product_metrics import get_product_metrics def export_product_metrics_excel(product: Optional[Product]) -> Workbook: product_metrics = _get_product_metrics(product) - return export_excel( - product_metrics, "Product Metrics", _get_excludes(), _get_foreign_keys() - ) + return export_excel(product_metrics, "Product Metrics", _get_excludes(), _get_foreign_keys()) -def export_product_metrics_csv( - response: HttpResponse, product: Optional[Product] -) -> None: +def export_product_metrics_csv(response: HttpResponse, product: Optional[Product]) -> None: product_metrics = _get_product_metrics(product) return export_csv( response, @@ -27,7 +25,7 @@ def export_product_metrics_csv( ) -def _get_product_metrics(product: Optional[Product]): +def _get_product_metrics(product: Optional[Product]) -> QuerySet[Product_Metrics]: product_metrics = get_product_metrics() if product: if product.is_product_group: @@ -38,7 +36,7 @@ def _get_product_metrics(product: Optional[Product]): return product_metrics -def _get_excludes(): +def _get_excludes() -> list[str]: return [ "id", "pk", @@ -46,5 +44,5 @@ def _get_excludes(): ] -def _get_foreign_keys(): +def _get_foreign_keys() -> list[str]: return ["product"] diff --git a/backend/application/metrics/services/metrics.py b/backend/application/metrics/services/metrics.py index 80c77bc54..2329aa1ec 100644 --- a/backend/application/metrics/services/metrics.py +++ b/backend/application/metrics/services/metrics.py @@ -13,20 +13,28 @@ from application.metrics.services.age import get_days -def calculate_product_metrics() -> None: +def calculate_product_metrics() -> str: + + num_products = 0 for product in Product.objects.filter(is_product_group=False): - calculate_metrics_for_product(product) + num_products += bool(calculate_metrics_for_product(product)) product_metrics_status = Product_Metrics_Status.load() product_metrics_status.last_calculated = timezone.now() product_metrics_status.save() + if num_products == 1: + return "Calculated metrics for 1 product." + + return f"Calculated metrics for {num_products} products." + def calculate_metrics_for_product( # pylint: disable=too-many-branches product: Product, -) -> None: +) -> bool: # There are quite a lot of branches, but at least they are not nested too much + metrics_calculated = False today = timezone.localdate() latest_product_metrics = _get_latest_product_metrics(product) @@ -39,13 +47,14 @@ def calculate_metrics_for_product( # pylint: disable=too-many-branches Product_Metrics.objects.create( product=product, date=iteration_date, - open_critical=latest_product_metrics.open_critical, - open_high=latest_product_metrics.open_high, - open_medium=latest_product_metrics.open_medium, - open_low=latest_product_metrics.open_low, - open_none=latest_product_metrics.open_none, - open_unknown=latest_product_metrics.open_unknown, + active_critical=latest_product_metrics.active_critical, + active_high=latest_product_metrics.active_high, + active_medium=latest_product_metrics.active_medium, + active_low=latest_product_metrics.active_low, + active_none=latest_product_metrics.active_none, + active_unknown=latest_product_metrics.active_unknown, open=latest_product_metrics.open, + affected=latest_product_metrics.affected, resolved=latest_product_metrics.resolved, duplicate=latest_product_metrics.duplicate, false_positive=latest_product_metrics.false_positive, @@ -55,6 +64,7 @@ def calculate_metrics_for_product( # pylint: disable=too-many-branches risk_accepted=latest_product_metrics.risk_accepted, ) iteration_date += timedelta(days=1) + metrics_calculated = True else: # Either there are relevant changes of observations today or there are no metrics yet at all, # so we need to calculate the metrics for today. @@ -62,13 +72,14 @@ def calculate_metrics_for_product( # pylint: disable=too-many-branches product=product, date=today, defaults={ - "open_critical": 0, - "open_high": 0, - "open_medium": 0, - "open_low": 0, - "open_none": 0, - "open_unknown": 0, + "active_critical": 0, + "active_high": 0, + "active_medium": 0, + "active_low": 0, + "active_none": 0, + "active_unknown": 0, "open": 0, + "affected": 0, "resolved": 0, "duplicate": 0, "false_positive": 0, @@ -85,20 +96,23 @@ def calculate_metrics_for_product( # pylint: disable=too-many-branches ).values("current_severity", "current_status") for observation in observations: - if observation.get("current_status") == Status.STATUS_OPEN: - todays_product_metrics.open += 1 + if observation.get("current_status") in Status.STATUS_ACTIVE: if observation.get("current_severity") == Severity.SEVERITY_CRITICAL: - todays_product_metrics.open_critical += 1 + todays_product_metrics.active_critical += 1 elif observation.get("current_severity") == Severity.SEVERITY_HIGH: - todays_product_metrics.open_high += 1 + todays_product_metrics.active_high += 1 elif observation.get("current_severity") == Severity.SEVERITY_MEDIUM: - todays_product_metrics.open_medium += 1 + todays_product_metrics.active_medium += 1 elif observation.get("current_severity") == Severity.SEVERITY_LOW: - todays_product_metrics.open_low += 1 + todays_product_metrics.active_low += 1 elif observation.get("current_severity") == Severity.SEVERITY_NONE: - todays_product_metrics.open_none += 1 + todays_product_metrics.active_none += 1 elif observation.get("current_severity") == Severity.SEVERITY_UNKNOWN: - todays_product_metrics.open_unknown += 1 + todays_product_metrics.active_unknown += 1 + if observation.get("current_status") == Status.STATUS_OPEN: + todays_product_metrics.open += 1 + elif observation.get("current_status") == Status.STATUS_AFFECTED: + todays_product_metrics.affected += 1 elif observation.get("current_status") == Status.STATUS_RESOLVED: todays_product_metrics.resolved += 1 elif observation.get("current_status") == Status.STATUS_DUPLICATE: @@ -115,6 +129,9 @@ def calculate_metrics_for_product( # pylint: disable=too-many-branches todays_product_metrics.risk_accepted += 1 todays_product_metrics.save() + metrics_calculated = True + + return metrics_calculated def _get_latest_product_metrics(product: Product) -> Optional[Product_Metrics]: @@ -143,58 +160,34 @@ def get_product_metrics_timeline(product: Optional[Product], age: str) -> dict: for product_metric in product_metrics: if not product or product.is_product_group: response_metric = response_data.get(product_metric.date.isoformat(), {}) - response_metric["open_critical"] = ( - response_metric.get("open_critical", 0) + product_metric.open_critical - ) - response_metric["open_high"] = ( - response_metric.get("open_high", 0) + product_metric.open_high - ) - response_metric["open_medium"] = ( - response_metric.get("open_medium", 0) + product_metric.open_medium - ) - response_metric["open_low"] = ( - response_metric.get("open_low", 0) + product_metric.open_low - ) - response_metric["open_none"] = ( - response_metric.get("open_none", 0) + product_metric.open_none - ) - response_metric["open_unknown"] = ( - response_metric.get("open_unknown", 0) + product_metric.open_unknown - ) - response_metric["open"] = ( - response_metric.get("open", 0) + product_metric.open - ) - response_metric["resolved"] = ( - response_metric.get("resolved", 0) + product_metric.resolved - ) - response_metric["duplicate"] = ( - response_metric.get("duplicate", 0) + product_metric.duplicate - ) - response_metric["false_positive"] = ( - response_metric.get("false_positive", 0) + product_metric.false_positive - ) - response_metric["in_review"] = ( - response_metric.get("in_review", 0) + product_metric.in_review - ) - response_metric["not_affected"] = ( - response_metric.get("not_affected", 0) + product_metric.not_affected - ) - response_metric["not_security"] = ( - response_metric.get("not_security", 0) + product_metric.not_security - ) - response_metric["risk_accepted"] = ( - response_metric.get("risk_accepted", 0) + product_metric.risk_accepted + response_metric["active_critical"] = ( + response_metric.get("active_critical", 0) + product_metric.active_critical ) + response_metric["active_high"] = response_metric.get("active_high", 0) + product_metric.active_high + response_metric["active_medium"] = response_metric.get("active_medium", 0) + product_metric.active_medium + response_metric["active_low"] = response_metric.get("active_low", 0) + product_metric.active_low + response_metric["active_none"] = response_metric.get("active_none", 0) + product_metric.active_none + response_metric["active_unknown"] = response_metric.get("active_unknown", 0) + product_metric.active_unknown + response_metric["open"] = response_metric.get("open", 0) + product_metric.open + response_metric["affected"] = response_metric.get("affected", 0) + product_metric.affected + response_metric["resolved"] = response_metric.get("resolved", 0) + product_metric.resolved + response_metric["duplicate"] = response_metric.get("duplicate", 0) + product_metric.duplicate + response_metric["false_positive"] = response_metric.get("false_positive", 0) + product_metric.false_positive + response_metric["in_review"] = response_metric.get("in_review", 0) + product_metric.in_review + response_metric["not_affected"] = response_metric.get("not_affected", 0) + product_metric.not_affected + response_metric["not_security"] = response_metric.get("not_security", 0) + product_metric.not_security + response_metric["risk_accepted"] = response_metric.get("risk_accepted", 0) + product_metric.risk_accepted response_data[product_metric.date.isoformat()] = response_metric else: response_metric = {} - response_metric["open_critical"] = product_metric.open_critical - response_metric["open_high"] = product_metric.open_high - response_metric["open_medium"] = product_metric.open_medium - response_metric["open_low"] = product_metric.open_low - response_metric["open_none"] = product_metric.open_none - response_metric["open_unknown"] = product_metric.open_unknown + response_metric["active_critical"] = product_metric.active_critical + response_metric["active_high"] = product_metric.active_high + response_metric["active_medium"] = product_metric.active_medium + response_metric["active_low"] = product_metric.active_low + response_metric["active_none"] = product_metric.active_none + response_metric["active_unknown"] = product_metric.active_unknown response_metric["open"] = product_metric.open + response_metric["affected"] = product_metric.affected response_metric["resolved"] = product_metric.resolved response_metric["duplicate"] = product_metric.duplicate response_metric["false_positive"] = product_metric.false_positive @@ -217,13 +210,14 @@ def get_product_metrics_current(product: Optional[Product]) -> dict: response_data: dict = _initialize_response_data() if len(product_metrics) > 0: for product_metric in product_metrics: - response_data["open_critical"] += product_metric.open_critical - response_data["open_high"] += product_metric.open_high - response_data["open_medium"] += product_metric.open_medium - response_data["open_low"] += product_metric.open_low - response_data["open_none"] += product_metric.open_none - response_data["open_unknown"] += product_metric.open_unknown + response_data["active_critical"] += product_metric.active_critical + response_data["active_high"] += product_metric.active_high + response_data["active_medium"] += product_metric.active_medium + response_data["active_low"] += product_metric.active_low + response_data["active_none"] += product_metric.active_none + response_data["active_unknown"] += product_metric.active_unknown response_data["open"] += product_metric.open + response_data["affected"] += product_metric.affected response_data["resolved"] += product_metric.resolved response_data["duplicate"] += product_metric.duplicate response_data["false_positive"] += product_metric.false_positive @@ -237,13 +231,14 @@ def get_product_metrics_current(product: Optional[Product]) -> dict: def _initialize_response_data() -> dict: response_data: dict = {} - response_data["open_critical"] = 0 - response_data["open_high"] = 0 - response_data["open_medium"] = 0 - response_data["open_low"] = 0 - response_data["open_none"] = 0 - response_data["open_unknown"] = 0 + response_data["active_critical"] = 0 + response_data["active_high"] = 0 + response_data["active_medium"] = 0 + response_data["active_low"] = 0 + response_data["active_none"] = 0 + response_data["active_unknown"] = 0 response_data["open"] = 0 + response_data["affected"] = 0 response_data["resolved"] = 0 response_data["duplicate"] = 0 response_data["false_positive"] = 0 @@ -259,78 +254,42 @@ def get_codecharta_metrics(product: Product) -> list[dict]: observations = Observation.objects.filter( product=product, branch=product.repository_default_branch, - current_status=Status.STATUS_OPEN, + current_status__in=Status.STATUS_ACTIVE, ) for observation in observations: if observation.origin_source_file: - file_severities_value = file_severities_dict.get( - observation.origin_source_file - ) + file_severities_value = file_severities_dict.get(observation.origin_source_file) if not file_severities_value: file_severities_value = {} file_severities_value["source_file"] = observation.origin_source_file file_severities_value["Vulnerabilities_Total".lower()] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_CRITICAL}".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_HIGH}".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_LOW}".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_NONE}".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_UNKNOWN}".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_HIGH}_and_above".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}_and_above".lower() - ] = 0 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower() - ] = 0 - file_severities_dict[observation.origin_source_file] = ( - file_severities_value - ) + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_CRITICAL}".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_HIGH}".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_LOW}".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_NONE}".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_UNKNOWN}".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_HIGH}_and_above".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}_and_above".lower()] = 0 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower()] = 0 + file_severities_dict[observation.origin_source_file] = file_severities_value file_severities_value["Vulnerabilities_Total".lower()] += 1 - file_severities_value[ - f"Vulnerabilities_{observation.current_severity}".lower() - ] += 1 + file_severities_value[f"Vulnerabilities_{observation.current_severity}".lower()] += 1 if observation.current_severity in ( Severity.SEVERITY_CRITICAL, Severity.SEVERITY_HIGH, ): - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_HIGH}_and_above".lower() - ] += 1 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}_and_above".lower() - ] += 1 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower() - ] += 1 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_HIGH}_and_above".lower()] += 1 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}_and_above".lower()] += 1 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower()] += 1 if observation.current_severity == Severity.SEVERITY_MEDIUM: - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}_and_above".lower() - ] += 1 - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower() - ] += 1 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_MEDIUM}_and_above".lower()] += 1 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower()] += 1 if observation.current_severity == Severity.SEVERITY_LOW: - file_severities_value[ - f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower() - ] += 1 + file_severities_value[f"Vulnerabilities_{Severity.SEVERITY_LOW}_and_above".lower()] += 1 return list(file_severities_dict.values()) diff --git a/backend/application/metrics/tasks.py b/backend/application/metrics/tasks.py deleted file mode 100644 index 73ccaa3e2..000000000 --- a/backend/application/metrics/tasks.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging - -from huey import crontab -from huey.contrib.djhuey import db_periodic_task, lock_task - -from application.commons import settings_static -from application.commons.services.tasks import handle_task_exception -from application.metrics.services.metrics import calculate_product_metrics - -logger = logging.getLogger("secobserve.metrics") - - -@db_periodic_task( - crontab(minute=f"*/{settings_static.background_product_metrics_interval_minutes}") -) -@lock_task("calculate_product_metrics") -def task_calculate_product_metrics() -> None: - logger.info("--- Calculate_product_metrics - start ---") - - try: - calculate_product_metrics() - except Exception as e: - handle_task_exception(e) - - logger.info("--- Calculate_product_metrics - finished ---") diff --git a/backend/application/notifications/__init__.py b/backend/application/notifications/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/notifications/api/__init__.py b/backend/application/notifications/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/commons/api/exception_handler.py b/backend/application/notifications/api/exception_handler.py similarity index 75% rename from backend/application/commons/api/exception_handler.py rename to backend/application/notifications/api/exception_handler.py index 86472c2af..f54768fa7 100644 --- a/backend/application/commons/api/exception_handler.py +++ b/backend/application/notifications/api/exception_handler.py @@ -14,13 +14,16 @@ ) from rest_framework.views import exception_handler +from application.access_control.services.current_user import get_current_username from application.commons.services.log_message import format_log_message -from application.commons.services.send_notifications import send_exception_notification +from application.notifications.services.send_notifications import ( + send_exception_notification, +) logger = logging.getLogger("secobserve.exception_handler") -def custom_exception_handler(exc, context): +def custom_exception_handler(exc: Exception, context: dict) -> Response: response: Optional[Response] if isinstance(exc, ProtectedError): # An object cannot be deleted because it has dependent objects. @@ -41,13 +44,15 @@ def custom_exception_handler(exc, context): response.status_code = HTTP_500_INTERNAL_SERVER_ERROR response.data = {} response.data["message"] = "Internal server error, check logs for details" - logger.error(format_log_message(response=response, exception=exc)) + logger.error(format_log_message(response=response, exception=exc, username=get_current_username())) logger.error(traceback.format_exc()) send_exception_notification(exc) else: if response.status_code < 500: if response.status_code in (HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN): - logger.warning(format_log_message(response=response, exception=exc)) + logger.warning( + format_log_message(response=response, exception=exc, username=get_current_username()) + ) # HTTP status codes lower than 500 are no technical errors. # They need not to be logged and we provide the exception @@ -58,45 +63,27 @@ def custom_exception_handler(exc, context): # HTTP status code 500 or higher are technical errors. # They get logged but no details are given to the user, # to avoid leaking internal technical information. - logger.error(format_log_message(response=response, exception=exc)) + logger.error(format_log_message(response=response, exception=exc, username=get_current_username())) logger.error(traceback.format_exc()) send_exception_notification(exc) response.data = {} - response.data["message"] = ( - "Internal server error, check logs for details" - ) + response.data["message"] = "Internal server error, check logs for details" return response -def format_exception_message(exc): - if ( - hasattr(exc, "detail") - and exc.detail - and type(exc.detail).__name__ == "ReturnDict" - ): +def format_exception_message(exc: Exception) -> str: + if hasattr(exc, "detail") and exc.detail and type(exc.detail).__name__ == "ReturnDict": message_list = [] for key in exc.detail: for message in exc.detail.get(key): - message_list.append( - f'{key.replace("_", " ").capitalize()}: {str(message)}' - ) + message_list.append(f'{key.replace("_", " ").capitalize()}: {str(message)}') return "\n".join(message_list) - if ( - hasattr(exc, "detail") - and exc.detail - and isinstance(exc.detail, list) - and len(exc.detail) > 0 - ): + if hasattr(exc, "detail") and exc.detail and isinstance(exc.detail, list) and len(exc.detail) > 0: return " / ".join(exc.detail) - if ( - hasattr(exc, "args") - and exc.args - and isinstance(exc.args[0], str) - and "protected foreign keys" in exc.args[0] - ): + if hasattr(exc, "args") and exc.args and isinstance(exc.args[0], str) and "protected foreign keys" in exc.args[0]: return _format_protected_foreign_keys(exc.args[0]) return str(exc) diff --git a/backend/application/notifications/api/filters.py b/backend/application/notifications/api/filters.py new file mode 100644 index 000000000..ad5adac87 --- /dev/null +++ b/backend/application/notifications/api/filters.py @@ -0,0 +1,58 @@ +from typing import Any + +from django.db.models import QuerySet, Subquery +from django_filters import BooleanFilter, CharFilter, FilterSet, OrderingFilter + +from application.access_control.services.current_user import get_current_user +from application.notifications.models import Notification, Notification_Viewed + + +class NotificationFilter(FilterSet): + name = CharFilter(field_name="name", lookup_expr="icontains") + message = CharFilter(field_name="message", lookup_expr="icontains") + function = CharFilter(field_name="function", lookup_expr="icontains") + exclude_already_viewed = BooleanFilter(field_name="exclude_already_viewed", method="get_exclude_already_viewed") + + def get_exclude_already_viewed( + self, + queryset: QuerySet, + name: Any, # pylint: disable=unused-argument + value: Any, + ) -> QuerySet: + # field_name is used as a positional argument + user = get_current_user() + if not user: + return queryset + + if value: + return queryset.exclude( + id__in=Subquery(Notification_Viewed.objects.filter(user=user).values_list("notification_id", flat=True)) + ) + + return queryset + + ordering = OrderingFilter( + fields=( + ("name", "name"), + ("created", "created"), + ("message", "message"), + ("type", "type"), + ("function", "function"), + ("product__name", "product_name"), + ("observation__title", "observation_title"), + ("user__full_name", "user_full_name"), + ), + ) + + class Meta: + model = Notification + fields = [ + "name", + "created", + "message", + "type", + "function", + "product", + "observation", + "user", + ] diff --git a/backend/application/notifications/api/permissions.py b/backend/application/notifications/api/permissions.py new file mode 100644 index 000000000..8c3d8d83e --- /dev/null +++ b/backend/application/notifications/api/permissions.py @@ -0,0 +1,25 @@ +from typing import Any + +from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView + +from application.authorization.api.permissions_base import check_object_permission +from application.authorization.services.roles_permissions import Permissions + + +class UserHasNotificationPermission(BasePermission): + def has_object_permission(self, request: Request, view: APIView, obj: Any) -> bool: + if obj.product: + return check_object_permission( + request=request, + object_to_check=obj.product, + get_permission=Permissions.Product_View, + put_permission=None, + delete_permission=Permissions.Product_Delete, + ) + + if request.user and request.user.is_superuser: + return True + + return False diff --git a/backend/application/notifications/api/serializers.py b/backend/application/notifications/api/serializers.py new file mode 100644 index 000000000..bcbd3416c --- /dev/null +++ b/backend/application/notifications/api/serializers.py @@ -0,0 +1,66 @@ +from typing import Optional + +from rest_framework.serializers import ( + IntegerField, + ListField, + ModelSerializer, + Serializer, + SerializerMethodField, +) + +from application.access_control.services.current_user import get_current_user +from application.notifications.models import Notification, Notification_Viewed + + +class NotificationSerializer(ModelSerializer): + message = SerializerMethodField() + product_name = SerializerMethodField() + observation_title = SerializerMethodField() + user_full_name = SerializerMethodField() + new_viewed = SerializerMethodField() + + class Meta: + model = Notification + fields = "__all__" + + def get_message(self, obj: Notification) -> Optional[str]: + if not obj.message: + return obj.message + + user = get_current_user() + if user and user.is_superuser: + return obj.message + + return "..." + + def get_product_name(self, obj: Notification) -> Optional[str]: + if obj.product: + return obj.product.name + + if obj.observation: + return obj.observation.product.name + + return None + + def get_observation_title(self, obj: Notification) -> Optional[str]: + if obj.observation: + return obj.observation.title + + return None + + def get_user_full_name(self, obj: Notification) -> Optional[str]: + if obj.user: + return obj.user.full_name + + return None + + def get_new_viewed(self, obj: Notification) -> str: + user = get_current_user() + if user: + if Notification_Viewed.objects.filter(notification=obj, user=user).exists(): + return "Viewed" + return "New" + + +class NotificationBulkSerializer(Serializer): + notifications = ListField(child=IntegerField(min_value=1), min_length=0, max_length=250, required=True) diff --git a/backend/application/notifications/api/views.py b/backend/application/notifications/api/views.py new file mode 100644 index 000000000..d9e502f7e --- /dev/null +++ b/backend/application/notifications/api/views.py @@ -0,0 +1,74 @@ +from django.db.models.query import QuerySet +from django_filters.rest_framework import DjangoFilterBackend +from drf_spectacular.utils import extend_schema +from rest_framework.decorators import action +from rest_framework.exceptions import ValidationError +from rest_framework.filters import SearchFilter +from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND +from rest_framework.viewsets import GenericViewSet + +from application.notifications.api.filters import NotificationFilter +from application.notifications.api.permissions import UserHasNotificationPermission +from application.notifications.api.serializers import ( + NotificationBulkSerializer, + NotificationSerializer, +) +from application.notifications.models import Notification, Notification_Viewed +from application.notifications.queries.notification import ( + get_notification_by_id, + get_notifications, +) +from application.notifications.services.notification import bulk_mark_as_viewed + + +class NotificationViewSet(GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin): + serializer_class = NotificationSerializer + filterset_class = NotificationFilter + permission_classes = (IsAuthenticated, UserHasNotificationPermission) + queryset = Notification.objects.all() + filter_backends = [SearchFilter, DjangoFilterBackend] + search_fields = ["name"] + + def get_queryset(self) -> QuerySet[Notification]: + return ( + get_notifications() + .select_related("product") + .select_related("observation") + .select_related("observation__product") + .select_related("user") + ) + + @extend_schema( + methods=["POST"], + request=NotificationBulkSerializer, + responses={HTTP_204_NO_CONTENT: None}, + ) + @action(detail=False, methods=["post"]) + def bulk_mark_as_viewed(self, request: Request) -> Response: + request_serializer = NotificationBulkSerializer(data=request.data) + if not request_serializer.is_valid(): + raise ValidationError(request_serializer.errors) + + bulk_mark_as_viewed(request_serializer.validated_data.get("notifications")) + + return Response(status=HTTP_204_NO_CONTENT) + + @extend_schema( + methods=["POST"], + request=None, + responses={HTTP_204_NO_CONTENT: None}, + ) + @action(detail=True, methods=["post"]) + def mark_as_viewed(self, request: Request, pk: int) -> Response: + if not get_notification_by_id(pk): + return Response(status=HTTP_404_NOT_FOUND) + + Notification_Viewed.objects.update_or_create( + notification_id=pk, + user=request.user, + ) + return Response(status=HTTP_204_NO_CONTENT) diff --git a/backend/application/notifications/apps.py b/backend/application/notifications/apps.py new file mode 100644 index 000000000..4a2df5ee0 --- /dev/null +++ b/backend/application/notifications/apps.py @@ -0,0 +1,14 @@ +from django.apps import AppConfig + + +class NotificationsConfig(AppConfig): + name = "application.notifications" + verbose_name = "Notifications" + + def ready(self) -> None: + try: + import application.notifications.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import + except ImportError: + pass + + import config.schema # noqa: F401 pylint: disable=import-outside-toplevel, unused-import diff --git a/backend/application/notifications/migrations/0001_initial.py b/backend/application/notifications/migrations/0001_initial.py new file mode 100644 index 000000000..39343a940 --- /dev/null +++ b/backend/application/notifications/migrations/0001_initial.py @@ -0,0 +1,100 @@ +# Generated by Django 5.1.8 on 2025-04-15 06:24 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ("core", "0062_alter_branch_osv_linux_distribution_and_more"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.CreateModel( + name="Notification", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("name", models.CharField(max_length=255)), + ("created", models.DateTimeField(auto_now_add=True)), + ("message", models.TextField(max_length=4096)), + ( + "type", + models.CharField( + choices=[ + ("Exception", "Exception"), + ("Security gate", "Security gate"), + ("Task", "Task"), + ], + max_length=20, + ), + ), + ("function", models.CharField(blank=True, max_length=255)), + ("arguments", models.TextField(blank=True, max_length=4096)), + ( + "observation", + models.ForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="core.observation" + ), + ), + ( + "product", + models.ForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="core.product" + ), + ), + ( + "user", + models.ForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), + ], + options={ + "db_table": "commons_notification", + }, + ), + ], + database_operations=[], + ), + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.CreateModel( + name="Notification_Viewed", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "notification", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="notifications.notification" + ), + ), + ( + "user", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), + ], + options={ + "db_table": "commons_notification_viewed", + }, + ), + ], + database_operations=[], + ), + ] diff --git a/backend/application/notifications/migrations/__init__.py b/backend/application/notifications/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/notifications/models.py b/backend/application/notifications/models.py new file mode 100644 index 000000000..72cf62eb0 --- /dev/null +++ b/backend/application/notifications/models.py @@ -0,0 +1,44 @@ +from django.db.models import ( + CASCADE, + CharField, + DateTimeField, + ForeignKey, + Model, + TextField, +) + +from application.access_control.models import User +from application.core.models import Observation, Product + + +class Notification(Model): + TYPE_EXCEPTION = "Exception" + TYPE_SECURITY_GATE = "Security gate" + TYPE_TASK = "Task" + + TYPE_CHOICES = [ + (TYPE_EXCEPTION, TYPE_EXCEPTION), + (TYPE_SECURITY_GATE, TYPE_SECURITY_GATE), + (TYPE_TASK, TYPE_TASK), + ] + + name = CharField(max_length=255) + created = DateTimeField(auto_now_add=True) + message = TextField(max_length=4096) + user = ForeignKey(User, on_delete=CASCADE, null=True) + product = ForeignKey(Product, on_delete=CASCADE, null=True) + observation = ForeignKey(Observation, on_delete=CASCADE, null=True) + type = CharField(max_length=20, choices=TYPE_CHOICES) + function = CharField(max_length=255, blank=True) + arguments = TextField(max_length=4096, blank=True) + + class Meta: + db_table = "commons_notification" + + +class Notification_Viewed(Model): + notification = ForeignKey(Notification, on_delete=CASCADE) + user = ForeignKey(User, on_delete=CASCADE) + + class Meta: + db_table = "commons_notification_viewed" diff --git a/backend/application/notifications/queries/__init__.py b/backend/application/notifications/queries/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/notifications/queries/notification.py b/backend/application/notifications/queries/notification.py new file mode 100644 index 000000000..edff380ba --- /dev/null +++ b/backend/application/notifications/queries/notification.py @@ -0,0 +1,57 @@ +from typing import Optional + +from django.db.models import Exists, OuterRef, Q +from django.db.models.query import QuerySet + +from application.access_control.services.current_user import get_current_user +from application.core.models import Product_Authorization_Group_Member, Product_Member +from application.notifications.models import Notification + + +def get_notification_by_id(notification_id: int) -> Optional[Notification]: + try: + return Notification.objects.get(id=notification_id) + except Notification.DoesNotExist: + return None + + +def get_notifications() -> QuerySet[Notification]: + user = get_current_user() + + if user is None: + return Notification.objects.none() + + notifications = Notification.objects.all().order_by("-created") + + if not user.is_superuser: + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) + + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, + ) + + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, + ) + + notifications = notifications.annotate( + product__member=Exists(product_members), + product__product_group__member=Exists(product_group_members), + authorization_group_member=Exists(product_authorization_group_members), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), + ) + + notifications = notifications.filter( + ( + Q(product__member=True) + | Q(product__product_group__member=True) + | Q(authorization_group_member=True) + | Q(product_group_authorization_group_member=True) + ) + & (Q(type=Notification.TYPE_SECURITY_GATE) | Q(type=Notification.TYPE_TASK)) + ) + + return notifications diff --git a/backend/application/notifications/services/__init__.py b/backend/application/notifications/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/application/notifications/services/notification.py b/backend/application/notifications/services/notification.py new file mode 100644 index 000000000..4946ce186 --- /dev/null +++ b/backend/application/notifications/services/notification.py @@ -0,0 +1,28 @@ +from django.db.models.query import QuerySet +from rest_framework.exceptions import ValidationError + +from application.access_control.services.current_user import get_current_user +from application.notifications.models import Notification, Notification_Viewed +from application.notifications.queries.notification import get_notifications + + +def bulk_mark_as_viewed(notification_ids: list[int]) -> None: + notifications = _check_notifications(notification_ids) + user = get_current_user() + + for notification in notifications: + Notification_Viewed.objects.update_or_create( + notification=notification, + user=user, + ) + + +def _check_notifications(notification_ids: list[int]) -> QuerySet[Notification]: + notifications = get_notifications().filter(id__in=notification_ids) + if len(notifications) != len(notification_ids): + raise ValidationError("Some notifications do not exist") + + if not get_current_user(): + raise ValidationError("No user in backend request") + + return notifications diff --git a/backend/application/commons/services/send_notifications.py b/backend/application/notifications/services/send_notifications.py similarity index 88% rename from backend/application/commons/services/send_notifications.py rename to backend/application/notifications/services/send_notifications.py index e5f4cf867..71afb04b3 100644 --- a/backend/application/commons/services/send_notifications.py +++ b/backend/application/notifications/services/send_notifications.py @@ -1,21 +1,25 @@ import logging import traceback from datetime import datetime, timedelta -from typing import Optional +from typing import Any, Optional +import environ import requests from django.core.mail import send_mail from django.template.loader import render_to_string +from huey.contrib.djhuey import db_task, task from application.access_control.models import User from application.access_control.queries.user import get_user_by_email -from application.commons.models import Notification, Settings +from application.access_control.services.current_user import get_current_user +from application.commons.models import Settings from application.commons.services.functions import get_base_url_frontend, get_classname -from application.commons.services.global_request import get_current_user from application.commons.services.log_message import format_log_message from application.core.models import Product +from application.notifications.models import Notification + +logger = logging.getLogger("secobserve.notifications") -logger = logging.getLogger("secobserve.commons") LAST_EXCEPTIONS: dict[str, datetime] = {} @@ -35,7 +39,7 @@ def send_product_security_gate_notification(product: Product) -> None: if email_to_addresses and settings.email_from: for email_to_address in email_to_addresses: first_name = _get_first_name(email_to_address) - _send_email_notification( + send_email_notification( email_to_address, f"Security gate for {product.name} has changed to {security_gate_status}", "email_product_security_gate.tpl", @@ -47,7 +51,7 @@ def send_product_security_gate_notification(product: Product) -> None: notification_ms_teams_webhook = _get_notification_ms_teams_webhook(product) if notification_ms_teams_webhook: - _send_msteams_notification( + send_msteams_notification( notification_ms_teams_webhook, "msteams_product_security_gate.tpl", product=product, @@ -57,7 +61,7 @@ def send_product_security_gate_notification(product: Product) -> None: notification_slack_webhook = _get_notification_slack_webhook(product) if notification_slack_webhook: - _send_slack_notification( + send_slack_notification( notification_slack_webhook, "slack_product_security_gate.tpl", product=product, @@ -81,7 +85,7 @@ def send_exception_notification(exception: Exception) -> None: if email_to_adresses and settings.email_from: for notification_email_to in email_to_adresses: first_name = _get_first_name(notification_email_to) - _send_email_notification( + send_email_notification( notification_email_to, f'Exception "{get_classname(exception)}" has occured', "email_exception.tpl", @@ -93,7 +97,7 @@ def send_exception_notification(exception: Exception) -> None: ) if settings.exception_ms_teams_webhook: - _send_msteams_notification( + send_msteams_notification( settings.exception_ms_teams_webhook, "msteams_exception.tpl", exception_class=get_classname(exception), @@ -103,7 +107,7 @@ def send_exception_notification(exception: Exception) -> None: ) if settings.exception_slack_webhook: - _send_slack_notification( + send_slack_notification( settings.exception_slack_webhook, "slack_exception.tpl", exception_class=get_classname(exception), @@ -125,6 +129,7 @@ def send_task_exception_notification( arguments: Optional[dict], user: Optional[User], exception: Exception, + product: Optional[Product] = None, ) -> None: settings = Settings.load() @@ -133,7 +138,7 @@ def send_task_exception_notification( if email_to_adresses and settings.email_from: for notification_email_to in email_to_adresses: first_name = _get_first_name(notification_email_to) - _send_email_notification( + send_email_notification( notification_email_to, f'Exception "{get_classname(exception)}" has occured in background task', "email_task_exception.tpl", @@ -148,7 +153,7 @@ def send_task_exception_notification( ) if settings.exception_ms_teams_webhook: - _send_msteams_notification( + send_msteams_notification( settings.exception_ms_teams_webhook, "msteams_task_exception.tpl", function=function, @@ -161,7 +166,7 @@ def send_task_exception_notification( ) if settings.exception_slack_webhook: - _send_slack_notification( + send_slack_notification( settings.exception_slack_webhook, "slack_task_exception.tpl", function=function, @@ -173,11 +178,14 @@ def send_task_exception_notification( date_time=datetime.now(), ) - product = None observation = None + if arguments: + if not product: + product = arguments.get("product") + observation = arguments.get("observation") - if observation: + if observation and not product: product = observation.product Notification.objects.create( @@ -192,100 +200,10 @@ def send_task_exception_notification( ) -def _send_email_notification( - notification_email_to: str, subject: str, template: str, **kwargs -) -> None: +def _ratelimit_exception(exception: Exception, function: str = None, arguments: dict = None) -> bool: settings = Settings.load() - notification_message = _create_notification_message(template, **kwargs) - if notification_message: - try: - send_mail( - subject=subject, - message=notification_message, - from_email=settings.email_from, - recipient_list=[notification_email_to], - fail_silently=False, - ) - except Exception as e: - logger.error( - format_log_message( - message=f"Error while sending email to {notification_email_to}", - exception=e, - ) - ) - - -def _send_msteams_notification(webhook: str, template: str, **kwargs) -> None: - notification_message = _create_notification_message(template, **kwargs) - if notification_message: - notification_message = notification_message.replace(""", '\\"') - try: - response = requests.request( - method="POST", - url=webhook, - data=notification_message, - timeout=60, - ) - response.raise_for_status() - except Exception as e: - logger.error( - format_log_message( - message=f"Error while calling MS Teams webhook {webhook}", - exception=e, - ) - ) - - -def _send_slack_notification(webhook: str, template: str, **kwargs) -> None: - notification_message = _create_notification_message(template, **kwargs) - if notification_message: - notification_message = notification_message.replace("'", "\\'") - notification_message = notification_message.replace(""", '\\"') - try: - response = requests.request( - method="POST", - url=webhook, - data=notification_message, - timeout=60, - ) - response.raise_for_status() - except Exception as e: - logger.error( - format_log_message( - message=f"Error while calling Slack webhook {webhook}", - exception=e, - ) - ) - - -def _create_notification_message(template: str, **kwargs) -> Optional[str]: - try: - return render_to_string(template, kwargs) - except Exception as e: - logger.error( - format_log_message( - message=f"Error while rendering template {template}", - exception=e, - ) - ) - return None - - -def _ratelimit_exception( - exception: Exception, function: str = None, arguments: dict = None -) -> bool: - settings = Settings.load() - - key = ( - get_classname(exception) - + "/" - + str(exception) - + "/" - + str(function) - + "/" - + _get_arguments_string(arguments) - ) + key = get_classname(exception) + "/" + str(exception) + "/" + str(function) + "/" + _get_arguments_string(arguments) now = datetime.now() if key in LAST_EXCEPTIONS: @@ -362,3 +280,84 @@ def _get_arguments_string(arguments: Optional[dict]) -> str: if arguments: return str(arguments) return "" + + +@db_task() +def send_email_notification(notification_email_to: str, subject: str, template: str, **kwargs: Any) -> None: + settings = Settings.load() + notification_message = _create_notification_message(template, **kwargs) + env = environ.Env() + if (env("EMAIL_HOST", default="") or env("EMAIL_PORT", default="")) and notification_message: + try: + send_mail( + subject=subject, + message=notification_message, + from_email=settings.email_from, + recipient_list=[notification_email_to], + fail_silently=False, + ) + except Exception as e: + logger.error( + format_log_message( + message=f"Error while sending email to {notification_email_to}", + exception=e, + ) + ) + + +@task() +def send_msteams_notification(webhook: str, template: str, **kwargs: Any) -> None: + notification_message = _create_notification_message(template, **kwargs) + if notification_message: + notification_message = notification_message.replace(""", '\\"') + try: + response = requests.request( + method="POST", + url=webhook, + data=notification_message, + timeout=60, + ) + response.raise_for_status() + except Exception as e: + logger.error( + format_log_message( + message=f"Error while calling MS Teams webhook {webhook}", + exception=e, + ) + ) + + +@task() +def send_slack_notification(webhook: str, template: str, **kwargs: Any) -> None: + notification_message = _create_notification_message(template, **kwargs) + if notification_message: + notification_message = notification_message.replace("'", "\\'") + notification_message = notification_message.replace(""", '\\"') + try: + response = requests.request( + method="POST", + url=webhook, + data=notification_message, + timeout=60, + ) + response.raise_for_status() + except Exception as e: + logger.error( + format_log_message( + message=f"Error while calling Slack webhook {webhook}", + exception=e, + ) + ) + + +def _create_notification_message(template: str, **kwargs: Any) -> Optional[str]: + try: + return render_to_string(template, kwargs) + except Exception as e: + logger.error( + format_log_message( + message=f"Error while rendering template {template}", + exception=e, + ) + ) + return None diff --git a/backend/application/commons/services/tasks.py b/backend/application/notifications/services/tasks.py similarity index 72% rename from backend/application/commons/services/tasks.py rename to backend/application/notifications/services/tasks.py index bbff929cb..8d2c96e94 100644 --- a/backend/application/commons/services/tasks.py +++ b/backend/application/notifications/services/tasks.py @@ -5,14 +5,15 @@ from application.access_control.models import User from application.commons.services.log_message import format_log_message -from application.commons.services.send_notifications import ( +from application.core.models import Product +from application.notifications.services.send_notifications import ( send_task_exception_notification, ) logger = logging.getLogger("secobserve.tasks") -def handle_task_exception(e: Exception, user: User = None) -> None: +def handle_task_exception(e: Exception, user: User = None, product: Product = None) -> None: data: dict[str, Any] = {} function = None arguments = None @@ -34,11 +35,9 @@ def handle_task_exception(e: Exception, user: User = None) -> None: message="Error while executing background task", data=data, exception=e, - user=user, + username=user.username if user else None, ) ) logger.error(traceback.format_exc()) - send_task_exception_notification( - function=function, arguments=arguments, user=user, exception=e - ) + send_task_exception_notification(function=function, arguments=arguments, user=user, exception=e, product=product) diff --git a/backend/application/rules/api/permissions.py b/backend/application/rules/api/permissions.py index 434d939dd..2293b00be 100644 --- a/backend/application/rules/api/permissions.py +++ b/backend/application/rules/api/permissions.py @@ -1,28 +1,40 @@ +from django.contrib.auth.models import AnonymousUser from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView -from application.access_control.api.permissions_base import ( +from application.authorization.api.permissions_base import ( check_object_permission, check_post_permission, ) -from application.access_control.services.roles_permissions import Permissions +from application.authorization.services.roles_permissions import Permissions from application.core.models import Product +from application.rules.models import Rule class UserHasGeneralRulePermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "GET": return True - return request.user and request.user.is_superuser + if not request.user: + return False + + if isinstance(request.user, AnonymousUser): + return False + + return request.user.is_superuser class UserHasProductRulePermission(BasePermission): - def has_permission(self, request, view): - return check_post_permission( - request, Product, "product", Permissions.Product_Rule_Create - ) + def has_permission(self, request: Request, view: APIView) -> bool: + if "/simulate" in request.path: + # Permission will be checked in the view + return True + + return check_post_permission(request, Product, "product", Permissions.Product_Rule_Create) - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: Rule) -> bool: return check_object_permission( request=request, object_to_check=obj, diff --git a/backend/application/rules/api/serializers.py b/backend/application/rules/api/serializers.py index aa523fb16..c698d1f9e 100644 --- a/backend/application/rules/api/serializers.py +++ b/backend/application/rules/api/serializers.py @@ -1,18 +1,23 @@ +import platform from typing import Optional from rest_framework.serializers import ( CharField, ChoiceField, DateTimeField, + IntegerField, + ListField, ModelSerializer, Serializer, SerializerMethodField, ValidationError, ) +from application.core.api.serializers_observation import ObservationListSerializer from application.core.api.serializers_product import NestedProductSerializer +from application.core.models import Product from application.rules.models import Rule -from application.rules.types import Rule_Status +from application.rules.types import Rule_Status, Rule_Type class GeneralRuleSerializer(ModelSerializer): @@ -40,10 +45,29 @@ def get_approval_user_full_name(self, obj: Rule) -> Optional[str]: return None - def validate(self, attrs): - if not attrs.get("parser") and not attrs.get("scanner_prefix"): + def validate_description(self, value: str) -> str: + if not value: + raise ValidationError("Must be set") + + return value + + def validate_type(self, value: str) -> str: + if value == Rule_Type.RULE_TYPE_REGO and platform.machine() not in ["x86_64", "AMD64"]: + raise ValidationError("Rego rules are only supported on 'x86_64' or 'AMD64' architectures") + + return value + + def validate(self, attrs: dict) -> dict: + if ( + attrs.get("type") == Rule_Type.RULE_TYPE_FIELDS + and not attrs.get("parser") + and not attrs.get("scanner_prefix") + ): raise ValidationError("Either Parser or Scanner Prefix must be set") + if attrs.get("type") == Rule_Type.RULE_TYPE_REGO and not attrs.get("rego_module"): + raise ValidationError("Rego module must be set") + return super().validate(attrs) def update(self, instance: Rule, validated_data: dict) -> Rule: @@ -77,17 +101,30 @@ def get_approval_user_full_name(self, obj: Rule) -> Optional[str]: return None - def validate_product(self, value): + def validate_description(self, value: str) -> str: + if not value: + raise ValidationError("Must be set") + + return value + + def validate_product(self, value: Product) -> Product: self.instance: Rule if self.instance and self.instance.product != value: raise ValidationError("Product cannot be changed") return value - def validate(self, attrs): - if not attrs.get("parser") and not attrs.get("scanner_prefix"): + def validate(self, attrs: dict) -> dict: + if ( + attrs.get("type") == Rule_Type.RULE_TYPE_FIELDS + and not attrs.get("parser") + and not attrs.get("scanner_prefix") + ): raise ValidationError("Either Parser or Scanner Prefix must be set") + if attrs.get("type") == Rule_Type.RULE_TYPE_REGO and not attrs.get("rego_module"): + raise ValidationError("Rego module must be set") + return super().validate(attrs) def update(self, instance: Rule, validated_data: dict) -> Rule: @@ -96,7 +133,10 @@ def update(self, instance: Rule, validated_data: dict) -> Rule: class RuleApprovalSerializer(Serializer): - approval_status = ChoiceField( - choices=Rule_Status.RULE_STATUS_CHOICES_APPROVAL, required=True - ) + approval_status = ChoiceField(choices=Rule_Status.RULE_STATUS_CHOICES_APPROVAL, required=True) approval_remark = CharField(max_length=255, required=True) + + +class SimulationResultSerializer(Serializer): + count = IntegerField() + results = ListField(child=ObservationListSerializer()) diff --git a/backend/application/rules/api/views.py b/backend/application/rules/api/views.py index 008f8fde3..9700c6d52 100644 --- a/backend/application/rules/api/views.py +++ b/backend/application/rules/api/views.py @@ -1,3 +1,6 @@ +from dataclasses import dataclass + +from django.db.models import QuerySet from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.utils import extend_schema from rest_framework import status @@ -5,11 +8,14 @@ from rest_framework.exceptions import NotFound, ValidationError from rest_framework.filters import SearchFilter from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK from rest_framework.viewsets import ModelViewSet -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions +from application.core.models import Observation from application.rules.api.filters import GeneralRuleFilter, ProductRuleFilter from application.rules.api.permissions import ( UserHasGeneralRulePermission, @@ -19,6 +25,7 @@ GeneralRuleSerializer, ProductRuleSerializer, RuleApprovalSerializer, + SimulationResultSerializer, ) from application.rules.models import Rule from application.rules.queries.rule import ( @@ -28,6 +35,13 @@ get_product_rules, ) from application.rules.services.approval import rule_approval +from application.rules.services.simulator import simulate_rule + + +@dataclass +class SimulationResult: + count: int + results: list[Observation] class GeneralRuleViewSet(ModelViewSet): @@ -38,7 +52,7 @@ class GeneralRuleViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Rule]: return get_general_rules() @extend_schema( @@ -47,7 +61,7 @@ def get_queryset(self): responses={status.HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["patch"]) - def approval(self, request, pk=None): + def approval(self, request: Request, pk: int) -> Response: request_serializer = RuleApprovalSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -64,6 +78,18 @@ def approval(self, request, pk=None): return Response(status=status.HTTP_204_NO_CONTENT) + @extend_schema( + methods=["POST"], + responses={200: SimulationResultSerializer}, + ) + @action(detail=True, methods=["post"]) + def simulate(self, request: Request, pk: int) -> Response: + rule = get_general_rule_by_id(pk) + if not rule: + raise NotFound() + + return _do_simulation(rule) + class ProductRuleViewSet(ModelViewSet): serializer_class = ProductRuleSerializer @@ -73,7 +99,7 @@ class ProductRuleViewSet(ModelViewSet): filter_backends = [SearchFilter, DjangoFilterBackend] search_fields = ["name"] - def get_queryset(self): + def get_queryset(self) -> QuerySet[Rule]: return get_product_rules() @extend_schema( @@ -82,7 +108,7 @@ def get_queryset(self): responses={status.HTTP_204_NO_CONTENT: None}, ) @action(detail=True, methods=["patch"]) - def approval(self, request, pk=None): + def approval(self, request: Request, pk: int) -> Response: request_serializer = RuleApprovalSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -100,3 +126,25 @@ def approval(self, request, pk=None): ) return Response(status=status.HTTP_204_NO_CONTENT) + + @extend_schema( + methods=["POST"], + responses={200: SimulationResultSerializer}, + ) + @action(detail=True, methods=["post"]) + def simulate(self, request: Request, pk: int) -> Response: + rule = get_product_rule_by_id(pk) + if not rule: + raise NotFound() + + user_has_permission_or_403(rule.product, Permissions.Observation_View) + + return _do_simulation(rule) + + +def _do_simulation(rule: Rule) -> Response: + num_observations, observations = simulate_rule(rule) + simulation_result = SimulationResult(count=num_observations, results=observations) + response_serializer = SimulationResultSerializer(simulation_result) + + return Response(status=HTTP_200_OK, data=response_serializer.data) diff --git a/backend/application/rules/apps.py b/backend/application/rules/apps.py index f67e7b5a7..578078818 100644 --- a/backend/application/rules/apps.py +++ b/backend/application/rules/apps.py @@ -6,7 +6,7 @@ class RulesConfig(AppConfig): name = "application.rules" verbose_name = _("Rules") - def ready(self): + def ready(self) -> None: try: import application.rules.signals # noqa F401 pylint: disable=import-outside-toplevel,unused-import except ImportError: diff --git a/backend/application/rules/migrations/0001_initial.py b/backend/application/rules/migrations/0001_initial.py index 9d754b892..24afcf03f 100644 --- a/backend/application/rules/migrations/0001_initial.py +++ b/backend/application/rules/migrations/0001_initial.py @@ -61,9 +61,7 @@ class Migration(migrations.Migration): ), ( "parser", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="core.parser" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.parser"), ), ( "product", diff --git a/backend/application/rules/migrations/0008_rule_description_observation_alter_rule_parser.py b/backend/application/rules/migrations/0008_rule_description_observation_alter_rule_parser.py index 67316f17f..425844e75 100644 --- a/backend/application/rules/migrations/0008_rule_description_observation_alter_rule_parser.py +++ b/backend/application/rules/migrations/0008_rule_description_observation_alter_rule_parser.py @@ -19,8 +19,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="rule", name="parser", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="core.parser" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="core.parser"), ), ] diff --git a/backend/application/rules/migrations/0016_alter_rule_new_vex_justification.py b/backend/application/rules/migrations/0016_alter_rule_new_vex_justification.py new file mode 100644 index 000000000..5f1660821 --- /dev/null +++ b/backend/application/rules/migrations/0016_alter_rule_new_vex_justification.py @@ -0,0 +1,40 @@ +# Generated by Django 5.2.5 on 2025-08-26 19:17 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("rules", "0015_convert_unknown_data"), + ] + + operations = [ + migrations.AlterField( + model_name="rule", + name="new_vex_justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + ] diff --git a/backend/application/rules/migrations/0017_rule_origin_component_purl.py b/backend/application/rules/migrations/0017_rule_origin_component_purl.py new file mode 100644 index 000000000..c4f4707b1 --- /dev/null +++ b/backend/application/rules/migrations/0017_rule_origin_component_purl.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.10 on 2026-01-14 18:51 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("rules", "0016_alter_rule_new_vex_justification"), + ] + + operations = [ + migrations.AddField( + model_name="rule", + name="origin_component_purl", + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/backend/application/rules/migrations/0018_rule_rego_module_rule_type.py b/backend/application/rules/migrations/0018_rule_rego_module_rule_type.py new file mode 100644 index 000000000..83f898598 --- /dev/null +++ b/backend/application/rules/migrations/0018_rule_rego_module_rule_type.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.11 on 2026-02-11 10:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("rules", "0017_rule_origin_component_purl"), + ] + + operations = [ + migrations.AddField( + model_name="rule", + name="rego_module", + field=models.TextField(blank=True), + ), + migrations.AddField( + model_name="rule", + name="type", + field=models.CharField(choices=[("Fields", "Fields"), ("Rego", "Rego")], default="Fields", max_length=8), + ), + ] diff --git a/backend/application/rules/migrations/0019_alter_rule_new_status.py b/backend/application/rules/migrations/0019_alter_rule_new_status.py new file mode 100644 index 000000000..9e96d46f1 --- /dev/null +++ b/backend/application/rules/migrations/0019_alter_rule_new_status.py @@ -0,0 +1,32 @@ +# Generated by Django 5.2.11 on 2026-02-15 16:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("rules", "0018_rule_rego_module_rule_type"), + ] + + operations = [ + migrations.AlterField( + model_name="rule", + name="new_status", + field=models.CharField( + blank=True, + choices=[ + ("Open", "Open"), + ("Affected", "Affected"), + ("Resolved", "Resolved"), + ("Duplicate", "Duplicate"), + ("False positive", "False positive"), + ("In review", "In review"), + ("Not affected", "Not affected"), + ("Not security", "Not security"), + ("Risk accepted", "Risk accepted"), + ], + max_length=16, + ), + ), + ] diff --git a/backend/application/rules/models.py b/backend/application/rules/models.py index 79ef398dd..3ade1fa3f 100644 --- a/backend/application/rules/models.py +++ b/backend/application/rules/models.py @@ -1,3 +1,5 @@ +from typing import Any + from django.db.models import ( CASCADE, PROTECT, @@ -11,36 +13,37 @@ ) from application.access_control.models import User +from application.access_control.services.current_user import get_current_user from application.commons.models import Settings -from application.commons.services.global_request import get_current_user from application.core.models import Product -from application.core.types import Severity, Status, VexJustification -from application.import_observations.models import Parser -from application.rules.types import Rule_Status +from application.core.types import Severity, Status, VEX_Justification +from application.rules.types import Rule_Status, Rule_Type class Rule(Model): name = CharField(max_length=255) description = TextField(max_length=2048, blank=True) product = ForeignKey(Product, blank=True, null=True, on_delete=CASCADE) - parser = ForeignKey(Parser, null=True, on_delete=CASCADE) + type = CharField(max_length=8, choices=Rule_Type.RULE_TYPE_CHOICES, default=Rule_Type.RULE_TYPE_FIELDS) + + parser = ForeignKey("import_observations.Parser", null=True, on_delete=CASCADE) scanner_prefix = CharField(max_length=255, blank=True) title = CharField(max_length=255, blank=True) description_observation = CharField(max_length=255, blank=True) origin_component_name_version = CharField(max_length=513, blank=True) + origin_component_purl = CharField(max_length=255, blank=True) origin_docker_image_name_tag = CharField(max_length=513, blank=True) origin_endpoint_url = TextField(max_length=2048, blank=True) origin_service_name = CharField(max_length=255, blank=True) origin_source_file = CharField(max_length=255, blank=True) origin_cloud_qualified_resource = CharField(max_length=255, blank=True) origin_kubernetes_qualified_resource = CharField(max_length=255, blank=True) - new_severity = CharField( - max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True - ) + new_severity = CharField(max_length=12, choices=Severity.SEVERITY_CHOICES, blank=True) new_status = CharField(max_length=16, choices=Status.STATUS_CHOICES, blank=True) - new_vex_justification = CharField( - max_length=64, choices=VexJustification.VEX_JUSTIFICATION_CHOICES, blank=True - ) + new_vex_justification = CharField(max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True) + + rego_module = TextField(blank=True) + enabled = BooleanField(default=True) user = ForeignKey( User, @@ -67,7 +70,7 @@ class Meta: Index(fields=["name"]), ] - def save(self, *args, **kwargs) -> None: + def save(self, *args: Any, **kwargs: Any) -> None: if not self.approval_status: self.user = get_current_user() @@ -81,12 +84,9 @@ def save(self, *args, **kwargs) -> None: needs_approval = settings.feature_general_rules_need_approval else: if self.product.product_group: - product_group_product_rules_needs_approval = ( - self.product.product_group.product_rules_need_approval - ) + product_group_product_rules_needs_approval = self.product.product_group.product_rules_need_approval needs_approval = ( - self.product.product_rules_need_approval - or product_group_product_rules_needs_approval + self.product.product_rules_need_approval or product_group_product_rules_needs_approval ) else: needs_approval = self.product.product_rules_need_approval @@ -98,5 +98,5 @@ def save(self, *args, **kwargs) -> None: return super().save(*args, **kwargs) - def __str__(self): + def __str__(self) -> str: return self.name diff --git a/backend/application/rules/queries/rule.py b/backend/application/rules/queries/rule.py index 6842b5d29..1fc8fee11 100644 --- a/backend/application/rules/queries/rule.py +++ b/backend/application/rules/queries/rule.py @@ -3,7 +3,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Product_Authorization_Group_Member, Product_Member from application.rules.models import Rule @@ -28,34 +28,24 @@ def get_product_rules() -> QuerySet[Rule]: product_rules = Rule.objects.filter(product__isnull=False) if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) product_rules = product_rules.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) product_rules = product_rules.filter( diff --git a/backend/application/rules/services/approval.py b/backend/application/rules/services/approval.py index 146e86165..347774ca6 100644 --- a/backend/application/rules/services/approval.py +++ b/backend/application/rules/services/approval.py @@ -1,7 +1,7 @@ from django.utils import timezone from rest_framework.exceptions import ValidationError -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.rules.models import Rule from application.rules.types import Rule_Status diff --git a/backend/application/rules/services/rego_interpreter.py b/backend/application/rules/services/rego_interpreter.py new file mode 100644 index 000000000..d296f385c --- /dev/null +++ b/backend/application/rules/services/rego_interpreter.py @@ -0,0 +1,39 @@ +from typing import Any, Optional + +from regopy import Input, Interpreter +from regopy.rego_shared import RegoError +from rest_framework.exceptions import ValidationError + + +class RegoException(ValidationError): + pass + + +class RegoInterpreter: + def __init__(self, rego_module: str) -> None: + self.policy = rego_module + try: + rego = Interpreter() + rego.log_level = 1 + rego.add_module("rule", rego_module) + self.rego_bundle = rego.build("data") + except RegoError as e: + raise RegoException(f"Error while building rego bundle: {str(e)}") from e + + def query(self, data: Optional[Any] = None) -> dict: + try: + rego_run = Interpreter() + rego_run.set_input(Input(data)) + output = rego_run.query_bundle(self.rego_bundle) + + node = output.results + if not node: + raise RegoException("Rego output has no results") + if not node[0].expressions: + raise RegoException("Rego results have no expressions") + result = node[0].expressions[0].get("rule") + if result is None: + raise RegoException("Rego expressions have no 'rule' element") + return result + except RegoError as e: + raise RegoException(f"Error while querying rego module: {str(e)}") from e diff --git a/backend/application/rules/services/rule_engine.py b/backend/application/rules/services/rule_engine.py index 70ef45596..c1f4f39e2 100644 --- a/backend/application/rules/services/rule_engine.py +++ b/backend/application/rules/services/rule_engine.py @@ -1,10 +1,14 @@ +import json import re -from datetime import date -from typing import Optional +from copy import copy +from typing import Any, Optional -from application.commons.services.global_request import get_current_user +import jsonpickle + +from application.access_control.services.current_user import get_current_user from application.core.models import Observation, Product from application.core.services.observation import ( + get_current_priority, get_current_severity, get_current_status, get_current_vex_justification, @@ -13,16 +17,20 @@ from application.core.services.risk_acceptance_expiry import ( calculate_risk_acceptance_expiry_date, ) +from application.core.services.security_gate import check_security_gate from application.core.types import Assessment_Status, Status from application.issue_tracker.services.issue_tracker import ( push_observation_to_issue_tracker, ) from application.rules.models import Rule -from application.rules.types import Rule_Status +from application.rules.services.rego_interpreter import RegoInterpreter +from application.rules.types import Rule_Status, Rule_Type class Rule_Engine: - def __init__(self, product: Product): + def __init__(self, product: Product) -> None: + self.product = product + product_parser_rules = Rule.objects.filter( product=product, enabled=True, @@ -51,127 +59,57 @@ def __init__(self, product: Product): ) self.rules += list(general_rules) - self.product = product + self.rego_interpreters: dict[Any, RegoInterpreter] = {} + for rule in self.rules: + if rule.type == Rule_Type.RULE_TYPE_REGO: + self.rego_interpreters[rule.pk] = RegoInterpreter(rule.rego_module) def apply_rules_for_observation(self, observation: Observation) -> None: - previous_product_rule = ( - observation.product_rule if observation.product_rule else None - ) - previous_general_rule = ( - observation.general_rule if observation.general_rule else None - ) + observation_before = copy(observation) - observation.product_rule = None + observation.rule_severity = "" + observation.rule_rego_severity = "" + observation.rule_status = "" + observation.rule_rego_status = "" + observation.rule_priority = None + observation.rule_rego_priority = None + observation.rule_vex_justification = "" + observation.rule_rego_vex_justification = "" observation.general_rule = None + observation.general_rule_rego = None + observation.product_rule = None + observation.product_rule_rego = None - rule_found = False + rule_fields_found = False for rule in self.rules: - if ( # pylint: disable=too-many-boolean-expressions - (not rule.parser or observation.parser == rule.parser) - and ( - not rule.scanner_prefix - or observation.scanner.lower().startswith( - rule.scanner_prefix.lower() - ) - ) - and self._check_regex(rule.title, observation.title) - and self._check_regex( - rule.description_observation, observation.description - ) - and self._check_regex( - rule.origin_component_name_version, - observation.origin_component_name_version, - ) - and self._check_regex( - rule.origin_docker_image_name_tag, - observation.origin_docker_image_name_tag, - ) - and self._check_regex( - rule.origin_endpoint_url, observation.origin_endpoint_url - ) - and self._check_regex( - rule.origin_service_name, observation.origin_service_name - ) - and self._check_regex( - rule.origin_source_file, observation.origin_source_file - ) - and self._check_regex( - rule.origin_cloud_qualified_resource, - observation.origin_cloud_qualified_resource, - ) - and self._check_regex( - rule.origin_kubernetes_qualified_resource, - observation.origin_kubernetes_qualified_resource, - ) - ): - previous_severity = observation.current_severity - previous_rule_severity = observation.rule_severity - if rule.new_severity: - observation.rule_severity = rule.new_severity - observation.current_severity = get_current_severity(observation) - - previous_status = observation.current_status - previous_rule_status = observation.rule_status - if rule.new_status: - observation.rule_status = rule.new_status - observation.current_status = get_current_status(observation) - - previous_vex_justification = observation.current_vex_justification - previous_rule_vex_justification = observation.rule_vex_justification - if rule.new_vex_justification: - observation.rule_vex_justification = rule.new_vex_justification - observation.current_vex_justification = ( - get_current_vex_justification(observation) - ) - - previous_risk_acceptance_expiry_date = ( - observation.risk_acceptance_expiry_date - ) - if observation.current_status == Status.STATUS_RISK_ACCEPTED: - if previous_status != Status.STATUS_RISK_ACCEPTED: - observation.risk_acceptance_expiry_date = ( - calculate_risk_acceptance_expiry_date(observation.product) - ) - else: - observation.risk_acceptance_expiry_date = None + if rule.type == Rule_Type.RULE_TYPE_FIELDS: + rule_fields_found = self.check_rule_for_observation(rule, observation, observation_before) + if rule_fields_found: + break - if rule.product: - observation.product_rule = rule - else: - observation.general_rule = rule - - # Write observation and observation and push to issue tracker log if status or severity has been changed - if ( # pylint: disable=too-many-boolean-expressions - previous_rule_status != observation.rule_status - or previous_rule_severity != observation.rule_severity - or previous_status != observation.current_status - or previous_severity != observation.current_severity - or previous_general_rule != observation.general_rule - or previous_product_rule != observation.product_rule - or previous_rule_vex_justification - != observation.rule_vex_justification - or previous_vex_justification - != observation.current_vex_justification - ): - self._write_observation_log( - observation=observation, - rule=rule, - previous_severity=previous_severity, - previous_status=previous_status, - previous_vex_justification=previous_vex_justification, - previous_risk_acceptance_expiry_date=previous_risk_acceptance_expiry_date, - ) - push_observation_to_issue_tracker(observation, get_current_user()) - rule_found = True - break + # Write observation and observation log if no rule was found but there was one before + if not rule_fields_found and ( + observation_before.general_rule != observation.general_rule + or observation_before.product_rule != observation.product_rule + ): + _write_observation_log_no_rule( + observation, observation_before.product_rule, observation_before.general_rule + ) + + rule_rego_found = False + for rule in self.rules: + if rule.type == Rule_Type.RULE_TYPE_REGO: + rule_rego_found = self.check_rule_for_observation(rule, observation, observation_before) + if rule_rego_found: + break # Write observation and observation log if no rule was found but there was one before - if not rule_found and ( - previous_general_rule != observation.general_rule - or previous_product_rule != observation.product_rule + if not rule_rego_found and ( + observation_before.general_rule_rego != observation.general_rule_rego + or observation_before.product_rule_rego != observation.product_rule_rego ): - self._write_observation_log_no_rule( - observation, previous_product_rule, previous_general_rule + _write_observation_log_no_rule( + observation, observation_before.product_rule_rego, observation_before.general_rule_rego ) def apply_all_rules_for_product(self) -> None: @@ -181,134 +119,300 @@ def apply_all_rules_for_product(self) -> None: else: observations = Observation.objects.filter(product=self.product) + observations = ( + observations.select_related("parser").select_related("general_rule").select_related("product_rule") + ) + for observation in observations: self.apply_rules_for_observation(observation) - def _check_regex(self, pattern: str, value: str) -> bool: - if not pattern: - return True - - if not value: - return False - - compiled_pattern = re.compile(pattern, re.IGNORECASE) - return compiled_pattern.match(value) is not None + if self.product.is_product_group: + for product in self.product.products.all(): + check_security_gate(product) + else: + check_security_gate(self.product) - def _write_observation_log( + def check_rule_for_observation( self, - *, - observation: Observation, rule: Rule, - previous_severity: str, - previous_status: str, - previous_vex_justification: str, - previous_risk_acceptance_expiry_date: Optional[date], - ) -> None: - if previous_status != observation.current_status: - status = observation.current_status - else: - status = "" - if previous_severity != observation.current_severity: - severity = observation.current_severity - else: - severity = "" - if previous_vex_justification != observation.current_vex_justification: - vex_justification = observation.current_vex_justification - else: - vex_justification = "" - if ( - previous_risk_acceptance_expiry_date - != observation.risk_acceptance_expiry_date + observation: Observation, + observation_before: Observation, + simulation: Optional[bool] = False, + ) -> bool: + fields_found = False + if rule.type == Rule_Type.RULE_TYPE_FIELDS: + fields_found = self._check_rule_fields(rule, observation, observation_before, simulation) + if simulation: + return fields_found + + # Write observation and observation and push to issue tracker log if status or severity has been changed + if fields_found and ( # pylint: disable=too-many-boolean-expressions + observation_before.rule_priority != observation.rule_priority + or observation_before.current_priority != observation.current_priority + or observation_before.rule_status != observation.rule_status + or observation_before.current_status != observation.current_status + or observation_before.rule_severity != observation.rule_severity + or observation_before.current_severity != observation.current_severity + or observation_before.rule_vex_justification != observation.rule_vex_justification + or observation_before.current_vex_justification != observation.current_vex_justification + or observation_before.general_rule != observation.general_rule + or observation_before.product_rule != observation.product_rule ): - risk_acceptance_expiry_date = observation.risk_acceptance_expiry_date - else: - risk_acceptance_expiry_date = None + _write_observation_log( + observation=observation, + observation_before=observation_before, + rule=rule, + ) + push_observation_to_issue_tracker(observation, get_current_user()) + + rego_found = False + if rule.type == Rule_Type.RULE_TYPE_REGO: + rego_found = self._check_rule_rego(rule, observation, observation_before, simulation) + if simulation: + return rego_found + + # Write observation and observation and push to issue tracker log if status or severity has been changed + if rego_found and ( # pylint: disable=too-many-boolean-expressions + observation_before.rule_rego_priority != observation.rule_rego_priority + or observation_before.current_priority != observation.current_priority + or observation_before.rule_rego_status != observation.rule_rego_status + or observation_before.current_status != observation.current_status + or observation_before.rule_rego_severity != observation.rule_rego_severity + or observation_before.current_severity != observation.current_severity + or observation_before.rule_rego_vex_justification != observation.rule_rego_vex_justification + or observation_before.current_vex_justification != observation.current_vex_justification + or observation_before.general_rule_rego != observation.general_rule_rego + or observation_before.product_rule_rego != observation.product_rule_rego + ): + _write_observation_log( + observation=observation, + observation_before=observation_before, + rule=rule, + ) + push_observation_to_issue_tracker(observation, get_current_user()) + + return fields_found or rego_found + + def _check_rule_fields( + self, rule: Rule, observation: Observation, observation_before: Observation, simulation: Optional[bool] = False + ) -> bool: + if ( # pylint: disable=too-many-boolean-expressions + (not rule.parser or observation.parser == rule.parser) + and (not rule.scanner_prefix or observation.scanner.lower().startswith(rule.scanner_prefix.lower())) + and _check_regex(rule.title, observation.title) + and _check_regex(rule.description_observation, observation.description) + and _check_regex(rule.origin_component_name_version, observation.origin_component_name_version) + and _check_regex(rule.origin_component_purl, observation.origin_component_purl) + and _check_regex( + rule.origin_docker_image_name_tag, + observation.origin_docker_image_name_tag, + ) + and _check_regex(rule.origin_endpoint_url, observation.origin_endpoint_url) + and _check_regex(rule.origin_service_name, observation.origin_service_name) + and _check_regex(rule.origin_source_file, observation.origin_source_file) + and _check_regex( + rule.origin_cloud_qualified_resource, + observation.origin_cloud_qualified_resource, + ) + and _check_regex( + rule.origin_kubernetes_qualified_resource, + observation.origin_kubernetes_qualified_resource, + ) + ): + if simulation: + return True + + if rule.new_severity: + observation.rule_severity = rule.new_severity + observation.current_severity = get_current_severity(observation) + + if rule.new_status: + observation.rule_status = rule.new_status + observation.current_status = get_current_status(observation) + + if rule.new_vex_justification: + observation.rule_vex_justification = rule.new_vex_justification + observation.current_vex_justification = get_current_vex_justification(observation) + + if observation.current_status == Status.STATUS_RISK_ACCEPTED: + if observation_before.current_status != Status.STATUS_RISK_ACCEPTED: + observation.risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date(observation.product) + else: + observation.risk_acceptance_expiry_date = None - if rule.description: - comment = rule.description - else: if rule.product: - comment = f"Updated by product rule {rule.name}" + observation.product_rule = rule else: - comment = f"Updated by general rule {rule.name}" - - create_observation_log( - observation=observation, - severity=severity, - status=status, - comment=comment, - vex_justification=vex_justification, - assessment_status=Assessment_Status.ASSESSMENT_STATUS_AUTO_APPROVED, - risk_acceptance_expiry_date=risk_acceptance_expiry_date, - ) + observation.general_rule = rule - def _write_observation_log_no_rule( - self, - observation: Observation, - previous_product_rule: Optional[Rule], - previous_general_rule: Optional[Rule], - ) -> None: - observation.rule_severity = "" - previous_severity = observation.current_severity - observation.current_severity = get_current_severity(observation) + return True - observation.rule_status = "" - previous_status = observation.current_status - observation.current_status = get_current_status(observation) + return False - observation.rule_vex_justification = "" - previous_vex_justification = observation.current_vex_justification - observation.current_vex_justification = get_current_vex_justification( - observation - ) + def _check_rule_rego( + self, rule: Rule, observation: Observation, observation_before: Observation, simulation: Optional[bool] = False + ) -> bool: + jsonpickle.set_encoder_options("simplejson", use_decimal=True, sort_keys=True) + jsonpickle.set_preferred_backend("simplejson") - previous_risk_acceptance_expiry_date = observation.risk_acceptance_expiry_date - if observation.current_status == Status.STATUS_RISK_ACCEPTED: - if previous_status != Status.STATUS_RISK_ACCEPTED: - observation.risk_acceptance_expiry_date = ( - calculate_risk_acceptance_expiry_date(observation.product) - ) - else: - observation.risk_acceptance_expiry_date = None + observation_dict = json.loads(jsonpickle.dumps(observation, unpicklable=False, use_decimal=True)) + observation_dict = {k: v for k, v in observation_dict.items() if v is not None and v != ""} - log_status = ( - observation.current_status - if previous_status != observation.current_status - else "" - ) + rego_interpreter = self.rego_interpreters[rule.pk] + result = rego_interpreter.query(observation_dict) - log_severity = ( - observation.current_severity - if previous_severity != observation.current_severity - else "" - ) + new_priority = result.get("priority") + new_status = result.get("status") + new_severity = result.get("severity") + new_vex_justification = result.get("vex_justification") - log_vex_justification = ( - observation.current_vex_justification - if previous_vex_justification != observation.current_vex_justification - else "" - ) + if new_priority or new_severity or new_status or new_vex_justification: + if simulation: + return True - log_risk_acceptance_expiry_date = ( - observation.risk_acceptance_expiry_date - if previous_risk_acceptance_expiry_date - != observation.risk_acceptance_expiry_date - else None - ) + if new_priority: + observation.rule_rego_priority = new_priority + observation.current_priority = get_current_priority(observation) + if new_severity: + observation.rule_rego_severity = new_severity + observation.current_severity = get_current_severity(observation) + if new_status: + observation.rule_rego_status = new_status + observation.current_status = get_current_status(observation) + + if observation.current_status == Status.STATUS_RISK_ACCEPTED: + if observation_before.current_status != Status.STATUS_RISK_ACCEPTED: + observation.risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date( + observation.product + ) + else: + observation.risk_acceptance_expiry_date = None + + if new_vex_justification: + observation.rule_rego_vex_justification = new_vex_justification + observation.current_vex_justification = get_current_vex_justification(observation) - if previous_product_rule: - comment = f"Removed product rule {previous_product_rule.name}" - elif previous_general_rule: - comment = f"Removed general rule {previous_general_rule.name}" + if rule.product: + observation.product_rule_rego = rule + else: + observation.general_rule_rego = rule + + return True + + return False + + +def _check_regex(pattern: str, value: str) -> bool: + if not pattern: + return True + + if not value: + return False + + compiled_pattern = re.compile(pattern, re.IGNORECASE) + return compiled_pattern.match(value) is not None + + +def _write_observation_log( + *, + observation: Observation, + observation_before: Observation, + rule: Rule, +) -> None: + status = observation.current_status if observation_before.current_status != observation.current_status else "" + severity = ( + observation.current_severity if observation_before.current_severity != observation.current_severity else "" + ) + priority = ( + observation.current_priority if observation_before.current_priority != observation.current_priority else None + ) + vex_justification = ( + observation.current_vex_justification + if observation_before.current_vex_justification != observation.current_vex_justification + else "" + ) + risk_acceptance_expiry_date = ( + observation.risk_acceptance_expiry_date + if observation_before.risk_acceptance_expiry_date != observation.risk_acceptance_expiry_date + else None + ) + + if rule.description: + comment = rule.description + else: + if rule.product: + comment = f"Updated by product rule {rule.name}" else: - comment = "Removed unknown rule" - - create_observation_log( - observation=observation, - severity=log_severity, - status=log_status, - comment=comment, - vex_justification=log_vex_justification, - assessment_status=Assessment_Status.ASSESSMENT_STATUS_AUTO_APPROVED, - risk_acceptance_expiry_date=log_risk_acceptance_expiry_date, - ) + comment = f"Updated by general rule {rule.name}" + + create_observation_log( + observation=observation, + severity=severity, + status=status, + priority=priority, + comment=comment, + vex_justification=vex_justification, + assessment_status=Assessment_Status.ASSESSMENT_STATUS_AUTO_APPROVED, + risk_acceptance_expiry_date=risk_acceptance_expiry_date, + ) + + +def _write_observation_log_no_rule( + observation: Observation, + previous_product_rule: Optional[Rule], + previous_general_rule: Optional[Rule], +) -> None: + previous_severity = observation.current_severity + observation.current_severity = get_current_severity(observation) + + previous_status = observation.current_status + observation.current_status = get_current_status(observation) + + previous_priority = observation.current_priority + observation.current_priority = get_current_priority(observation) + + previous_vex_justification = observation.current_vex_justification + observation.current_vex_justification = get_current_vex_justification(observation) + + previous_risk_acceptance_expiry_date = observation.risk_acceptance_expiry_date + if observation.current_status == Status.STATUS_RISK_ACCEPTED: + if previous_status != Status.STATUS_RISK_ACCEPTED: + observation.risk_acceptance_expiry_date = calculate_risk_acceptance_expiry_date(observation.product) + else: + observation.risk_acceptance_expiry_date = None + + log_status = observation.current_status if previous_status != observation.current_status else "" + + log_severity = observation.current_severity if previous_severity != observation.current_severity else "" + + log_priority = observation.current_priority if previous_priority != observation.current_priority else None + + log_vex_justification = ( + observation.current_vex_justification + if previous_vex_justification != observation.current_vex_justification + else "" + ) + + log_risk_acceptance_expiry_date = ( + observation.risk_acceptance_expiry_date + if previous_risk_acceptance_expiry_date != observation.risk_acceptance_expiry_date + else None + ) + + if previous_product_rule: + comment = f"Removed product {previous_product_rule.type.lower()} rule {previous_product_rule.name}" + elif previous_general_rule: + comment = f"Removed general {previous_general_rule.type.lower()} rule {previous_general_rule.name}" + else: + comment = "Removed unknown rule" + + create_observation_log( + observation=observation, + severity=log_severity, + status=log_status, + priority=log_priority, + comment=comment, + vex_justification=log_vex_justification, + assessment_status=Assessment_Status.ASSESSMENT_STATUS_AUTO_APPROVED, + risk_acceptance_expiry_date=log_risk_acceptance_expiry_date, + ) diff --git a/backend/application/rules/services/simulator.py b/backend/application/rules/services/simulator.py new file mode 100644 index 000000000..fd003d946 --- /dev/null +++ b/backend/application/rules/services/simulator.py @@ -0,0 +1,73 @@ +from copy import copy +from typing import Tuple + +from application.core.models import Observation +from application.core.queries.product import get_products +from application.core.services.observation import normalize_observation_fields +from application.rules.models import Rule +from application.rules.services.rule_engine import Rule_Engine +from application.rules.types import Rule_Type + +MAX_OBSERVATIONS = 100 + + +def simulate_rule(rule: Rule) -> Tuple[int, list[Observation]]: + number_observations = 0 + simulation_results: list[Observation] = [] + + if rule.product: + if rule.product.is_product_group: + products = rule.product.products.all() + observations = Observation.objects.filter(product__in=products) + else: + observations = Observation.objects.filter(product=rule.product) + else: + observations = Observation.objects.filter(product__in=get_products(), product__apply_general_rules=True) + + if rule.type == Rule_Type.RULE_TYPE_FIELDS: + if rule.parser: + observations = observations.filter(parser=rule.parser) + if rule.scanner_prefix: + observations = observations.filter(scanner__startswith=rule.scanner_prefix) + + observations = ( + observations.order_by("product__name", "title") + .select_related("product") + .select_related("product__product_group") + .select_related("branch") + .select_related("parser") + .select_related("general_rule") + .select_related("product_rule") + ) + + rule_engines: dict[int, Rule_Engine] = {} + + for observation in observations: + rule_engine = rule_engines.get(observation.product.pk) + if not rule_engine: + rule_engine = Rule_Engine(observation.product) + rule_engines[observation.product.pk] = rule_engine + + observation_before = copy(observation) + + observation_before.rule_status = "" + observation_before.rule_rego_status = "" + observation_before.rule_severity = "" + observation_before.rule_rego_status = "" + observation_before.rule_priority = None + observation_before.rule_rego_priority = None + observation_before.rule_vex_justification = "" + observation_before.rule_rego_vex_justification = "" + observation_before.general_rule = None + observation_before.general_rule_rego = None + observation_before.product_rule = None + observation_before.product_rule_rego = None + + normalize_observation_fields(observation_before) + + if rule_engine.check_rule_for_observation(rule, observation, observation_before, True): + number_observations += 1 + if len(simulation_results) < MAX_OBSERVATIONS: + simulation_results.append(observation) + + return number_observations, simulation_results diff --git a/backend/application/rules/types.py b/backend/application/rules/types.py index 96f70c97e..ad1fe52e9 100644 --- a/backend/application/rules/types.py +++ b/backend/application/rules/types.py @@ -15,3 +15,13 @@ class Rule_Status: (RULE_STATUS_APPROVED, RULE_STATUS_APPROVED), (RULE_STATUS_REJECTED, RULE_STATUS_REJECTED), ] + + +class Rule_Type: + RULE_TYPE_FIELDS = "Fields" + RULE_TYPE_REGO = "Rego" + + RULE_TYPE_CHOICES = [ + (RULE_TYPE_FIELDS, RULE_TYPE_FIELDS), + (RULE_TYPE_REGO, RULE_TYPE_REGO), + ] diff --git a/backend/application/vex/api/filters.py b/backend/application/vex/api/filters.py index f60066ea8..aff318327 100644 --- a/backend/application/vex/api/filters.py +++ b/backend/application/vex/api/filters.py @@ -4,6 +4,9 @@ CSAF, CSAF_Branch, CSAF_Vulnerability, + CycloneDX, + CycloneDX_Branch, + CycloneDX_Vulnerability, OpenVEX, OpenVEX_Branch, OpenVEX_Vulnerability, @@ -22,7 +25,7 @@ class CSAFFilter(FilterSet): # tuple-mapping retains order fields=( ("user__full_name", "user_full_name"), - ("product__name", "product_name"), + ("product__name", "product_data.name"), ("document_id_prefix", "document_id_prefix"), ("document_base_id", "document_base_id"), ("version", "version"), @@ -76,7 +79,7 @@ class OpenVEXFilter(FilterSet): # tuple-mapping retains order fields=( ("user__full_name", "user_full_name"), - ("product__name", "product_name"), + ("product__name", "product_data.name"), ("document_id_prefix", "document_id_prefix"), ("document_base_id", "document_base_id"), ("version", "version"), @@ -116,11 +119,58 @@ class Meta: ] -class VEXCounterFilter(FilterSet): - document_id_prefix = CharFilter( - field_name="document_id_prefix", lookup_expr="icontains", distinct=True +class CycloneDXFilter(FilterSet): + vulnerability_names__name = CharFilter( + field_name="vulnerability_names__name", lookup_expr="icontains", distinct=True ) + ordering = OrderingFilter( + # tuple-mapping retains order + fields=( + ("user__full_name", "user_full_name"), + ("product__name", "product_data.name"), + ("document_base_id", "document_base_id"), + ("document_id_prefix", "document_id_prefix"), + ("version", "version"), + ("content_hash", "content_hash"), + ("author", "author"), + ("manufacturer", "manufacturer"), + ("first_issued", "first_issued"), + ("last_updated", "last_updated"), + ), + ) + + class Meta: + model = CycloneDX + fields = [ + "product", + "vulnerability_names__name", + "document_id_prefix", + "author", + ] + + +class CycloneDXVulnerabilityFilter(FilterSet): + class Meta: + model = CycloneDX_Vulnerability + fields = [ + "cyclonedx", + "name", + ] + + +class CycloneDXBranchFilter(FilterSet): + class Meta: + model = CycloneDX_Branch + fields = [ + "cyclonedx", + "branch__name", + ] + + +class VEXCounterFilter(FilterSet): + document_id_prefix = CharFilter(field_name="document_id_prefix", lookup_expr="icontains", distinct=True) + ordering = OrderingFilter( # tuple-mapping retains order fields=( @@ -138,9 +188,7 @@ class Meta: class VEXDocumentFilter(FilterSet): - document_id = CharFilter( - field_name="document_id", lookup_expr="icontains", distinct=True - ) + document_id = CharFilter(field_name="document_id", lookup_expr="icontains", distinct=True) author = CharFilter(field_name="author", lookup_expr="icontains", distinct=True) ordering = OrderingFilter( # tuple-mapping retains order @@ -161,9 +209,7 @@ class Meta: class VEXStatementFilter(FilterSet): - vulnerability_id = CharFilter( - field_name="vulnerability_id", lookup_expr="icontains", distinct=True - ) + vulnerability_id = CharFilter(field_name="vulnerability_id", lookup_expr="icontains", distinct=True) ordering = OrderingFilter( # tuple-mapping retains order @@ -173,6 +219,8 @@ class VEXStatementFilter(FilterSet): ("justification", "justification"), ("impact", "impact"), ("remediation", "remediation"), + ("product_purl", "product_purl"), + ("component_cyclonedx_bom_link", "component_cyclonedx_bom_link"), ) ) diff --git a/backend/application/vex/api/permissions.py b/backend/application/vex/api/permissions.py index e7eff3ca0..690459e22 100644 --- a/backend/application/vex/api/permissions.py +++ b/backend/application/vex/api/permissions.py @@ -1,11 +1,15 @@ +from django.contrib.auth.models import AnonymousUser from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.views import APIView -from application.access_control.api.permissions_base import check_object_permission -from application.access_control.services.roles_permissions import Permissions +from application.authorization.api.permissions_base import check_object_permission +from application.authorization.services.roles_permissions import Permissions +from application.vex.models import CSAF, OpenVEX class UserHasVEXPermission(BasePermission): - def has_object_permission(self, request, view, obj): + def has_object_permission(self, request: Request, view: APIView, obj: CSAF | OpenVEX) -> bool: return check_object_permission( request=request, object_to_check=obj, @@ -16,8 +20,14 @@ def has_object_permission(self, request, view, obj): class UserHasVEXCounterPermission(BasePermission): - def has_permission(self, request, view): + def has_permission(self, request: Request, view: APIView) -> bool: if request.method == "GET": return True - return request.user and request.user.is_superuser + if not request.user: + return False + + if isinstance(request.user, AnonymousUser): + return False + + return request.user.is_superuser diff --git a/backend/application/vex/api/serializers.py b/backend/application/vex/api/serializers.py index 5647fc7bc..9390a1657 100644 --- a/backend/application/vex/api/serializers.py +++ b/backend/application/vex/api/serializers.py @@ -20,6 +20,9 @@ CSAF_Branch, CSAF_Revision, CSAF_Vulnerability, + CycloneDX, + CycloneDX_Branch, + CycloneDX_Vulnerability, OpenVEX, OpenVEX_Branch, OpenVEX_Vulnerability, @@ -36,42 +39,27 @@ class CSAFDocumentCreateSerializer(Serializer): product = IntegerField(validators=[MinValueValidator(0)], required=False) - vulnerability_names = ListField( - child=CharField(max_length=255), min_length=0, max_length=20, required=False - ) - branch_names = ListField( - child=CharField(max_length=255), min_length=0, max_length=20, required=False - ) + vulnerability_names = ListField(child=CharField(max_length=255), min_length=0, max_length=20, required=False) + branch_names = ListField(child=CharField(max_length=255), min_length=0, max_length=20, required=False) + branches = ListField(child=IntegerField(min_value=1), min_length=0, max_length=20, required=False) document_id_prefix = CharField(max_length=200, required=True) title = CharField(max_length=255, required=True) publisher_name = CharField(max_length=255, required=True) - publisher_category = ChoiceField( - choices=CSAF_Publisher_Category.CSAF_PUBLISHER_CATEGORY_CHOICES, required=True - ) + publisher_category = ChoiceField(choices=CSAF_Publisher_Category.CSAF_PUBLISHER_CATEGORY_CHOICES, required=True) publisher_namespace = CharField(max_length=255, required=True) - tracking_status = ChoiceField( - choices=CSAF_Tracking_Status.CSAF_TRACKING_STATUS_CHOICES, required=True - ) - tlp_label = ChoiceField( - choices=CSAF_TLP_Label.CSAF_TLP_LABEL_CHOICES, required=True - ) + tracking_status = ChoiceField(choices=CSAF_Tracking_Status.CSAF_TRACKING_STATUS_CHOICES, required=True) + tlp_label = ChoiceField(choices=CSAF_TLP_Label.CSAF_TLP_LABEL_CHOICES, required=True) def validate_publisher_namespace(self, publisher_namespace: str) -> str: return _validate_url(publisher_namespace) class CSAFDocumentUpdateSerializer(Serializer): - tlp_label = ChoiceField( - choices=CSAF_TLP_Label.CSAF_TLP_LABEL_CHOICES, required=True - ) + tlp_label = ChoiceField(choices=CSAF_TLP_Label.CSAF_TLP_LABEL_CHOICES, required=True) publisher_name = CharField(max_length=255, required=False) - publisher_category = ChoiceField( - choices=CSAF_Publisher_Category.CSAF_PUBLISHER_CATEGORY_CHOICES, required=False - ) + publisher_category = ChoiceField(choices=CSAF_Publisher_Category.CSAF_PUBLISHER_CATEGORY_CHOICES, required=False) publisher_namespace = CharField(max_length=255, required=False) - tracking_status = ChoiceField( - choices=CSAF_Tracking_Status.CSAF_TRACKING_STATUS_CHOICES, required=False - ) + tracking_status = ChoiceField(choices=CSAF_Tracking_Status.CSAF_TRACKING_STATUS_CHOICES, required=False) def validate_publisher_namespace(self, publisher_namespace: str) -> str: return _validate_url(publisher_namespace) @@ -132,12 +120,9 @@ def get_name(self, obj: CSAF_Branch) -> str: class OpenVEXDocumentCreateSerializer(Serializer): product = IntegerField(validators=[MinValueValidator(0)], required=False) - vulnerability_names = ListField( - child=CharField(max_length=255), min_length=0, max_length=20, required=False - ) - branch_names = ListField( - child=CharField(max_length=255), min_length=0, max_length=20, required=False - ) + vulnerability_names = ListField(child=CharField(max_length=255), min_length=0, max_length=20, required=False) + branch_names = ListField(child=CharField(max_length=255), min_length=0, max_length=20, required=False) + branches = ListField(child=IntegerField(min_value=1), min_length=0, max_length=20, required=False) id_namespace = CharField(max_length=255, required=True) document_id_prefix = CharField(max_length=255, required=True) author = CharField(max_length=255, required=True) @@ -198,6 +183,82 @@ def get_name(self, obj: OpenVEX_Branch) -> str: return obj.branch.name +class CycloneDXDocumentCreateSerializer(Serializer): + product = IntegerField(validators=[MinValueValidator(0)], required=False) + vulnerability_names = ListField(child=CharField(max_length=255), min_length=0, max_length=20, required=False) + branches = ListField(child=IntegerField(min_value=1), min_length=0, max_length=20, required=False) + document_id_prefix = CharField(max_length=255, required=False) + author = CharField(max_length=255, required=False, allow_blank=True) + manufacturer = CharField(max_length=255, required=False, allow_blank=True) + + def validate(self, attrs: dict) -> dict: + if not attrs.get("author") and not attrs.get("manufacturer"): + raise ValidationError("Either author or manufacturer must be set") + if attrs.get("author") and attrs.get("manufacturer"): + raise ValidationError("Only one of author or manufacturer must be set") + + return super().validate(attrs) + + +class CycloneDXDocumentUpdateSerializer(Serializer): + author = CharField(max_length=255, required=False, allow_blank=True) + manufacturer = CharField(max_length=255, required=False, allow_blank=True) + + def validate(self, attrs: dict) -> dict: + if not attrs.get("author") and not attrs.get("manufacturer"): + raise ValidationError("Either author or manufacturer must be set") + if attrs.get("author") and attrs.get("manufacturer"): + raise ValidationError("Only one of author or manufacturer must be set") + + return super().validate(attrs) + + +class CycloneDXSerializer(ModelSerializer): + product_data = NestedProductSerializer(source="product") + vulnerability_names = SerializerMethodField() + branch_names = SerializerMethodField() + user_full_name = SerializerMethodField() + + class Meta: + model = CycloneDX + fields = "__all__" + + def get_vulnerability_names(self, obj: CycloneDX) -> Optional[str]: + vulnerabilities = [v.name for v in obj.vulnerability_names.all()] + if vulnerabilities: + return ", ".join(vulnerabilities) + return None + + def get_branch_names(self, obj: CycloneDX) -> Optional[str]: + branches = [b.branch.name for b in obj.branches.all()] + if branches: + return ", ".join(branches) + return None + + def get_user_full_name(self, obj: CycloneDX) -> Optional[str]: + if obj.user: + return obj.user.full_name + + return None + + +class CycloneDXVulnerabilitySerializer(ModelSerializer): + class Meta: + model = CycloneDX_Vulnerability + fields = "__all__" + + +class CycloneDXBranchSerializer(ModelSerializer): + name = SerializerMethodField(read_only=True) + + class Meta: + model = CycloneDX_Branch + fields = "__all__" + + def get_name(self, obj: OpenVEX_Branch) -> str: + return obj.branch.name + + class VEXCounterSerializer(ModelSerializer): class Meta: model = VEX_Counter diff --git a/backend/application/vex/api/views.py b/backend/application/vex/api/views.py index e88aa0aba..1cb1fc3a0 100644 --- a/backend/application/vex/api/views.py +++ b/backend/application/vex/api/views.py @@ -1,9 +1,12 @@ import json import re -from typing import Any +from typing import Any, Optional from urllib.parse import urlparse import jsonpickle +from cyclonedx.model.bom import Bom +from cyclonedx.output.json import JsonV1Dot6 +from django.db.models import QuerySet from django.http import HttpResponse from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.utils import extend_schema @@ -12,16 +15,22 @@ from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.parsers import MultiPartParser from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT from rest_framework.views import APIView from rest_framework.viewsets import GenericViewSet, ModelViewSet +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions from application.commons.api.permissions import UserHasSuperuserPermission from application.vex.api.filters import ( CSAFBranchFilter, CSAFFilter, CSAFVulnerabilityFilter, + CycloneDXBranchFilter, + CycloneDXFilter, + CycloneDXVulnerabilityFilter, OpenVEXBranchFilter, OpenVEXFilter, OpenVEXVulnerabilityFilter, @@ -39,6 +48,11 @@ CSAFDocumentUpdateSerializer, CSAFSerializer, CSAFVulnerabilitySerializer, + CycloneDXBranchSerializer, + CycloneDXDocumentCreateSerializer, + CycloneDXDocumentUpdateSerializer, + CycloneDXSerializer, + CycloneDXVulnerabilitySerializer, OpenVEXBranchSerializer, OpenVEXDocumentCreateSerializer, OpenVEXDocumentUpdateSerializer, @@ -53,6 +67,9 @@ CSAF, CSAF_Branch, CSAF_Vulnerability, + CycloneDX, + CycloneDX_Branch, + CycloneDX_Vulnerability, OpenVEX, OpenVEX_Branch, OpenVEX_Vulnerability, @@ -65,6 +82,11 @@ get_csaf_vulnerabilities, get_csafs, ) +from application.vex.queries.cyclonedx import ( + get_cyclonedx_branches, + get_cyclonedx_s, + get_cyclonedx_vulnerabilities, +) from application.vex.queries.openvex import ( get_openvex_branches, get_openvex_s, @@ -77,16 +99,30 @@ create_csaf_document, update_csaf_document, ) +from application.vex.services.cyclonedx_generator import ( + CycloneDXCreateParameters, + CycloneDXUpdateParameters, + create_cyclonedx_document, + update_cyclonedx_document, +) from application.vex.services.openvex_generator import ( OpenVEXCreateParameters, OpenVEXUpdateParameters, create_openvex_document, update_openvex_document, ) +from application.vex.services.vex_base import ( + check_and_get_product, + check_branch_names, + check_branches, +) from application.vex.services.vex_import import import_vex VEX_TYPE_CSAF = "csaf" VEX_TYPE_OPENVEX = "openvex" +VEX_TYPE_CYCLONEDX_VEX = "cdx_vex" + +APPLICATION_JSON = "application/json" class CSAFDocumentCreateView(APIView): @@ -96,11 +132,15 @@ class CSAFDocumentCreateView(APIView): responses={HTTP_200_OK: bytes}, ) @action(detail=True, methods=["post"]) - def post(self, request): + def post(self, request: Request) -> HttpResponse: serializer = CSAFDocumentCreateSerializer(data=request.data) if not serializer.is_valid(): raise ValidationError(serializer.errors) + product = check_and_get_product(serializer.validated_data.get("product")) + if product: + user_has_permission_or_403(product, Permissions.VEX_Create) + unique_vulnerability_names = [] if serializer.validated_data.get("vulnerability_names"): unique_vulnerability_names = _remove_duplicates_keep_order( @@ -113,10 +153,21 @@ def post(self, request): else [] ) + unique_branches = ( + _remove_duplicates_keep_order(serializer.validated_data.get("branches")) + if serializer.validated_data.get("branches") + else [] + ) + + if unique_branches: + branches = check_branches(unique_branches, product) + else: + branches = check_branch_names(unique_branch_names, product) + csaf_create_parameters = CSAFCreateParameters( - product_id=serializer.validated_data.get("product"), + product=product, vulnerability_names=unique_vulnerability_names, - branch_names=unique_branch_names, + branches=branches, document_id_prefix=serializer.validated_data.get("document_id_prefix"), title=serializer.validated_data.get("title"), publisher_name=serializer.validated_data.get("publisher_name"), @@ -134,7 +185,7 @@ def post(self, request): response = HttpResponse( # pylint: disable=http-response-with-content-type-json # HTTPResponse gives more control about JSON serialization content=_object_to_json(csaf_document, VEX_TYPE_CSAF), - content_type="application/json", + content_type=APPLICATION_JSON, ) response["Content-Disposition"] = ( f"attachment; filename={_get_csaf_filename(csaf_document.document.tracking.id)}.json" @@ -148,7 +199,7 @@ class CSAFDocumentUpdateView(APIView): request=CSAFDocumentUpdateSerializer, responses={HTTP_200_OK: bytes}, ) - def post(self, request, document_id_prefix: str, document_base_id: str): + def post(self, request: Request, document_id_prefix: str, document_base_id: str) -> HttpResponse: serializer = CSAFDocumentUpdateSerializer(data=request.data) if not serializer.is_valid(): raise ValidationError(serializer.errors) @@ -171,7 +222,7 @@ def post(self, request, document_id_prefix: str, document_base_id: str): response = HttpResponse( # pylint: disable=http-response-with-content-type-json # HTTPResponse gives more control about JSON serialization content=_object_to_json(csaf_document, VEX_TYPE_CSAF), - content_type="application/json", + content_type=APPLICATION_JSON, ) response["Content-Disposition"] = ( f"attachment; filename={_get_csaf_filename(csaf_document.document.tracking.id)}.json" @@ -179,16 +230,14 @@ def post(self, request, document_id_prefix: str, document_base_id: str): return response -class CSAFViewSet( - GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin -): +class CSAFViewSet(GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin): serializer_class = CSAFSerializer queryset = CSAF.objects.none() filterset_class = CSAFFilter filter_backends = [DjangoFilterBackend] permission_classes = (IsAuthenticated, UserHasVEXPermission) - def get_queryset(self): + def get_queryset(self) -> QuerySet[CSAF]: return get_csafs() @@ -198,7 +247,7 @@ class CSAFVulnerabilityViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixi filterset_class = CSAFVulnerabilityFilter filter_backends = [DjangoFilterBackend] - def get_queryset(self): + def get_queryset(self) -> QuerySet[CSAF_Vulnerability]: return get_csaf_vulnerabilities() @@ -208,7 +257,7 @@ class CSAFBranchViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): filterset_class = CSAFBranchFilter filter_backends = [DjangoFilterBackend] - def get_queryset(self): + def get_queryset(self) -> QuerySet[CSAF_Branch]: return get_csaf_branches() @@ -219,15 +268,17 @@ class OpenVEXDocumentCreateView(APIView): responses={HTTP_200_OK: bytes}, ) @action(detail=True, methods=["post"]) - def post(self, request): + def post(self, request: Request) -> HttpResponse: serializer = OpenVEXDocumentCreateSerializer(data=request.data) if not serializer.is_valid(): raise ValidationError(serializer.errors) + product = check_and_get_product(serializer.validated_data.get("product")) + if product: + user_has_permission_or_403(product, Permissions.VEX_Create) + unique_vulnerability_names = ( - _remove_duplicates_keep_order( - serializer.validated_data.get("vulnerability_names") - ) + _remove_duplicates_keep_order(serializer.validated_data.get("vulnerability_names")) if serializer.validated_data.get("vulnerability_names") else [] ) @@ -238,10 +289,21 @@ def post(self, request): else [] ) + unique_branches = ( + _remove_duplicates_keep_order(serializer.validated_data.get("branches")) + if serializer.validated_data.get("branches") + else [] + ) + + if unique_branches: + branches = check_branches(unique_branches, product) + else: + branches = check_branch_names(unique_branch_names, product) + parameters = OpenVEXCreateParameters( - product_id=serializer.validated_data.get("product"), + product=product, vulnerability_names=unique_vulnerability_names, - branch_names=unique_branch_names, + branches=branches, id_namespace=serializer.validated_data.get("id_namespace"), document_id_prefix=serializer.validated_data.get("document_id_prefix"), author=serializer.validated_data.get("author"), @@ -256,11 +318,10 @@ def post(self, request): response = HttpResponse( # pylint: disable=http-response-with-content-type-json # HTTPResponse gives more control about JSON serialization content=_object_to_json(openvex_document, VEX_TYPE_OPENVEX), - content_type="application/json", + content_type=APPLICATION_JSON, ) - response["Content-Disposition"] = ( - "attachment; filename=" - + _get_openvex_filename(openvex_document.id, openvex_document.version) + response["Content-Disposition"] = "attachment; filename=" + _get_openvex_filename( + openvex_document.id, openvex_document.version ) return response @@ -272,7 +333,7 @@ class OpenVEXDocumentUpdateView(APIView): responses={HTTP_200_OK: bytes}, ) @action(detail=True, methods=["post"]) - def post(self, request, document_id_prefix: str, document_base_id: str): + def post(self, request: Request, document_id_prefix: str, document_base_id: str) -> HttpResponse: serializer = OpenVEXDocumentUpdateSerializer(data=request.data) if not serializer.is_valid(): raise ValidationError(serializer.errors) @@ -292,25 +353,22 @@ def post(self, request, document_id_prefix: str, document_base_id: str): response = HttpResponse( # pylint: disable=http-response-with-content-type-json # HTTPResponse gives more control about JSON serialization content=_object_to_json(openvex_document, VEX_TYPE_OPENVEX), - content_type="application/json", + content_type=APPLICATION_JSON, ) - response["Content-Disposition"] = ( - "attachment; filename=" - + _get_openvex_filename(openvex_document.id, openvex_document.version) + response["Content-Disposition"] = "attachment; filename=" + _get_openvex_filename( + openvex_document.id, openvex_document.version ) return response -class OpenVEXViewSet( - GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin -): +class OpenVEXViewSet(GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin): serializer_class = OpenVEXSerializer queryset = OpenVEX.objects.none() filterset_class = OpenVEXFilter filter_backends = [DjangoFilterBackend] permission_classes = (IsAuthenticated, UserHasVEXPermission) - def get_queryset(self): + def get_queryset(self) -> QuerySet[OpenVEX]: return get_openvex_s() @@ -320,7 +378,7 @@ class OpenVEXVulnerabilityViewSet(GenericViewSet, ListModelMixin, RetrieveModelM filterset_class = OpenVEXVulnerabilityFilter filter_backends = [DjangoFilterBackend] - def get_queryset(self): + def get_queryset(self) -> QuerySet[OpenVEX_Vulnerability]: return get_openvex_vulnerabilities() @@ -330,10 +388,133 @@ class OpenVEXBranchViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): filterset_class = OpenVEXBranchFilter filter_backends = [DjangoFilterBackend] - def get_queryset(self): + def get_queryset(self) -> QuerySet[OpenVEX_Branch]: return get_openvex_branches() +class CycloneDXDocumentCreateView(APIView): + @extend_schema( + methods=["POST"], + request=CycloneDXDocumentCreateSerializer, + responses={HTTP_200_OK: bytes}, + ) + @action(detail=True, methods=["post"]) + def post(self, request: Request) -> HttpResponse: + serializer = CycloneDXDocumentCreateSerializer(data=request.data) + if not serializer.is_valid(): + raise ValidationError(serializer.errors) + + product = check_and_get_product(serializer.validated_data.get("product")) + if product: + user_has_permission_or_403(product, Permissions.VEX_Create) + + unique_vulnerability_names = ( + _remove_duplicates_keep_order(serializer.validated_data.get("vulnerability_names")) + if serializer.validated_data.get("vulnerability_names") + else [] + ) + + unique_branches = ( + _remove_duplicates_keep_order(serializer.validated_data.get("branches")) + if serializer.validated_data.get("branches") + else [] + ) + + branches = check_branches(unique_branches, product) + + parameters = CycloneDXCreateParameters( + product=product, + vulnerability_names=unique_vulnerability_names, + branches=branches, + document_id_prefix=serializer.validated_data.get("document_id_prefix"), + author=serializer.validated_data.get("author"), + manufacturer=serializer.validated_data.get("manufacturer"), + ) + + cyclonedx_bom = create_cyclonedx_document(parameters) + + if not cyclonedx_bom: + return Response(status=HTTP_204_NO_CONTENT) + + json_outputter = JsonV1Dot6(cyclonedx_bom) + serialized_json = json_outputter.output_as_string(indent=2) + + response = HttpResponse( # pylint: disable=http-response-with-content-type-json + # HTTPResponse gives more control about JSON serialization + content=serialized_json, + content_type=APPLICATION_JSON, + ) + response["Content-Disposition"] = "attachment; filename=" + _get_cyclonedx_vex_filename(cyclonedx_bom) + return response + + +class CycloneDXDocumentUpdateView(APIView): + @extend_schema( + methods=["POST"], + request=CycloneDXDocumentUpdateSerializer, + responses={HTTP_200_OK: bytes}, + ) + @action(detail=True, methods=["post"]) + def post(self, request: Request, document_id_prefix: str, document_base_id: str) -> HttpResponse: + serializer = CycloneDXDocumentUpdateSerializer(data=request.data) + if not serializer.is_valid(): + raise ValidationError(serializer.errors) + + parameters = CycloneDXUpdateParameters( + document_id_prefix=document_id_prefix, + document_base_id=document_base_id, + author=serializer.validated_data.get("author"), + manufacturer=serializer.validated_data.get("manufacturer"), + ) + + cyclonedx_bom = update_cyclonedx_document(parameters) + + if not cyclonedx_bom: + return Response(status=HTTP_204_NO_CONTENT) + + json_outputter = JsonV1Dot6(cyclonedx_bom) + serialized_json = json_outputter.output_as_string(indent=2) + + response = HttpResponse( # pylint: disable=http-response-with-content-type-json + # HTTPResponse gives more control about JSON serialization + content=serialized_json, + content_type=APPLICATION_JSON, + ) + response["Content-Disposition"] = "attachment; filename=" + _get_cyclonedx_vex_filename(cyclonedx_bom) + return response + + +class CycloneDXViewSet(GenericViewSet, DestroyModelMixin, ListModelMixin, RetrieveModelMixin): + serializer_class = CycloneDXSerializer + queryset = CycloneDX.objects.none() + filterset_class = CycloneDXFilter + filter_backends = [DjangoFilterBackend] + permission_classes = (IsAuthenticated, UserHasVEXPermission) + + def get_queryset(self) -> QuerySet[CycloneDX]: + return get_cyclonedx_s() + + +class CycloneDXVulnerabilityViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): + serializer_class = CycloneDXVulnerabilitySerializer + queryset = CycloneDX_Vulnerability.objects.none() + filterset_class = CycloneDXVulnerabilityFilter + filter_backends = [DjangoFilterBackend] + + def get_queryset(self) -> QuerySet[CycloneDX_Vulnerability]: + return get_cyclonedx_vulnerabilities() + + +class CycloneDXBranchViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): + serializer_class = CycloneDXBranchSerializer + queryset = CycloneDX_Branch.objects.none() + filterset_class = CycloneDXBranchFilter + filter_backends = [DjangoFilterBackend] + + def get_queryset(self) -> QuerySet[CycloneDX_Branch]: + return get_cyclonedx_branches() + + class VEXCounterViewSet(ModelViewSet): serializer_class = VEXCounterSerializer queryset = VEX_Counter.objects.all() @@ -342,16 +523,14 @@ class VEXCounterViewSet(ModelViewSet): permission_classes = (IsAuthenticated, UserHasVEXCounterPermission) -class VEXDocumentViewSet( - GenericViewSet, ListModelMixin, RetrieveModelMixin, DestroyModelMixin -): +class VEXDocumentViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin, DestroyModelMixin): serializer_class = VEXDocumentSerializer queryset = VEX_Document.objects.none() filterset_class = VEXDocumentFilter filter_backends = [DjangoFilterBackend] permission_classes = (IsAuthenticated, UserHasSuperuserPermission) - def get_queryset(self): + def get_queryset(self) -> QuerySet[VEX_Document]: return get_vex_documents() @@ -362,7 +541,7 @@ class VEXStatementViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): filter_backends = [DjangoFilterBackend] permission_classes = (IsAuthenticated, UserHasSuperuserPermission) - def get_queryset(self): + def get_queryset(self) -> QuerySet[VEX_Statement]: return get_vex_statements() @@ -374,7 +553,7 @@ class VEXImportView(APIView): request=VEXImportSerializer, responses={HTTP_204_NO_CONTENT: None}, ) - def post(self, request): + def post(self, request: Request) -> Response: request_serializer = VEXImportSerializer(data=request.data) if not request_serializer.is_valid(): raise ValidationError(request_serializer.errors) @@ -397,10 +576,10 @@ def _object_to_json(object_to_encode: Any, vex_type: str) -> str: return json.dumps(json_dict, indent=4, sort_keys=True, ensure_ascii=False) -def _remove_empty_elements(d: dict) -> dict: +def _remove_empty_elements(d: dict | list) -> dict | list: """recursively remove empty lists, empty dicts, or None elements from a dictionary""" - def empty(x): + def empty(x: Optional[(dict | list)]) -> bool: return x is None or x == {} or x == [] if not isinstance(d, (dict, list)): @@ -408,29 +587,23 @@ def empty(x): if isinstance(d, list): return [v for v in (_remove_empty_elements(v) for v in d) if not empty(v)] - return { - k: v - for k, v in ((k, _remove_empty_elements(v)) for k, v in d.items()) - if not empty(v) - } + return {k: v for k, v in ((k, _remove_empty_elements(v)) for k, v in d.items()) if not empty(v)} # Change all keys with the value 'id' to '@id' and # all keys with the value 'context' to '@context' in a dictionary recursively -def _change_keys_context(d: dict) -> dict: +def _change_keys_context(d: dict | list) -> dict | list: if not isinstance(d, (dict, list)): return d if isinstance(d, list): return [_change_keys_context(v) for v in d] - return { - k.replace("context", "@context"): _change_keys_context(v) for k, v in d.items() - } + return {k.replace("context", "@context"): _change_keys_context(v) for k, v in d.items()} # Change all keys with the value 'id' to '@id' and # all keys with the value 'context' to '@context' in a dictionary recursively -def _change_keys_id(d: dict) -> dict: +def _change_keys_id(d: dict | list) -> dict | list: if not isinstance(d, (dict, list)): return d if isinstance(d, list): @@ -462,3 +635,16 @@ def _get_openvex_filename(document_id: str, version: int) -> str: path = path.split("/")[-1] return f"{path}_{version:04d}.json" + + +def _get_cyclonedx_vex_filename(bom: Bom) -> str: + prefix = None + for bom_property in bom.metadata.properties: + if bom_property.name == "prefix": + prefix = bom_property.value + break + + if prefix: + return f"{prefix}_{bom.serial_number}_{bom.version:04d}.json" + + return f"{bom.serial_number}_{bom.version:04d}.json" diff --git a/backend/application/vex/apps.py b/backend/application/vex/apps.py index 8d9c01b40..c54c85ada 100644 --- a/backend/application/vex/apps.py +++ b/backend/application/vex/apps.py @@ -5,7 +5,7 @@ class VEXConfig(AppConfig): name = "application.vex" verbose_name = "Vulnerability Exploitability eXchange" - def ready(self): + def ready(self) -> None: try: import application.vex.signals # noqa F401 pylint: disable=import-outside-toplevel, unused-import except ImportError: diff --git a/backend/application/vex/migrations/0006_vex_document_vex_statement.py b/backend/application/vex/migrations/0006_vex_document_vex_statement.py index 3d13f0155..38b532bd9 100644 --- a/backend/application/vex/migrations/0006_vex_document_vex_statement.py +++ b/backend/application/vex/migrations/0006_vex_document_vex_statement.py @@ -107,11 +107,7 @@ class Migration(migrations.Migration): ), ], options={ - "indexes": [ - models.Index( - fields=["product_purl"], name="vex_vex_sta_product_990b30_idx" - ) - ], + "indexes": [models.Index(fields=["product_purl"], name="vex_vex_sta_product_990b30_idx")], }, ), ] diff --git a/backend/application/vex/migrations/0008_alter_vex_document_type.py b/backend/application/vex/migrations/0008_alter_vex_document_type.py new file mode 100644 index 000000000..150e61985 --- /dev/null +++ b/backend/application/vex/migrations/0008_alter_vex_document_type.py @@ -0,0 +1,20 @@ +# Generated by Django 5.2.4 on 2025-07-30 12:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vex", "0007_alter_csaf_tracking_current_release_date_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="vex_document", + name="type", + field=models.CharField( + choices=[("CSAF", "CSAF"), ("OpenVEX", "OpenVEX"), ("CycloneDX", "CycloneDX")], max_length=16 + ), + ), + ] diff --git a/backend/application/vex/migrations/0009_vex_statement_component_cyclonedx_bom_link_and_more.py b/backend/application/vex/migrations/0009_vex_statement_component_cyclonedx_bom_link_and_more.py new file mode 100644 index 000000000..889e0997c --- /dev/null +++ b/backend/application/vex/migrations/0009_vex_statement_component_cyclonedx_bom_link_and_more.py @@ -0,0 +1,22 @@ +# Generated by Django 5.2.5 on 2025-08-21 06:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vex", "0008_alter_vex_document_type"), + ] + + operations = [ + migrations.AddField( + model_name="vex_statement", + name="component_cyclonedx_bom_link", + field=models.CharField(blank=True, max_length=512), + ), + migrations.AddIndex( + model_name="vex_statement", + index=models.Index(fields=["component_cyclonedx_bom_link"], name="vex_vex_sta_compone_83cdc6_idx"), + ), + ] diff --git a/backend/application/vex/migrations/0010_cyclonedx_cyclonedx_branch_cyclonedx_vulnerability.py b/backend/application/vex/migrations/0010_cyclonedx_cyclonedx_branch_cyclonedx_vulnerability.py new file mode 100644 index 000000000..b70e9b175 --- /dev/null +++ b/backend/application/vex/migrations/0010_cyclonedx_cyclonedx_branch_cyclonedx_vulnerability.py @@ -0,0 +1,161 @@ +# Generated by Django 5.2.5 on 2025-08-22 18:24 + +import django.core.validators +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0065_observation_origin_component_cyclonedx_bom_link_and_more"), + ("vex", "0009_vex_statement_component_cyclonedx_bom_link_and_more"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name="CycloneDX", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("document_id_prefix", models.CharField(max_length=255)), + ("document_base_id", models.CharField(max_length=36)), + ( + "version", + models.IntegerField( + validators=[ + django.core.validators.MinValueValidator(0), + django.core.validators.MaxValueValidator(999999), + ] + ), + ), + ("content_hash", models.CharField(blank=True, max_length=256)), + ("author", models.CharField(blank=True, max_length=255)), + ("manufacturer", models.CharField(blank=True, max_length=255)), + ("first_issued", models.DateTimeField()), + ("last_updated", models.DateTimeField()), + ( + "product", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="core.product", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "abstract": False, + "unique_together": {("document_id_prefix", "document_base_id")}, + }, + ), + migrations.CreateModel( + name="CycloneDX_Branch", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "branch", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="cyclonedxes", + to="core.branch", + ), + ), + ( + "cyclonedx", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="branches", + to="vex.cyclonedx", + ), + ), + ], + ), + migrations.CreateModel( + name="CycloneDX_Vulnerability", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ( + "cyclonedx", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="vulnerability_names", + to="vex.cyclonedx", + ), + ), + ], + ), + migrations.AlterField( + model_name="vex_statement", + name="status", + field=models.CharField( + choices=[ + ("not_affected", "not_affected"), + ("affected", "affected"), + ("fixed", "fixed"), + ("under_investigation", "under_investigation"), + ("false_positive", "false_positive"), + ], + max_length=24, + ), + ), + migrations.AlterField( + model_name="vex_statement", + name="justification", + field=models.CharField( + blank=True, + choices=[ + ("component_not_present", "component_not_present"), + ("vulnerable_code_not_present", "vulnerable_code_not_present"), + ( + "vulnerable_code_cannot_be_controlled_by_adversary", + "vulnerable_code_cannot_be_controlled_by_adversary", + ), + ("vulnerable_code_not_in_execute_path", "vulnerable_code_not_in_execute_path"), + ("inline_mitigations_already_exist", "inline_mitigations_already_exist"), + ("code_not_present", "code_not_present"), + ("code_not_reachable", "code_not_reachable"), + ("requires_configuration", "requires_configuration"), + ("requires_dependency", "requires_dependency"), + ("requires_environment", "requires_environment"), + ("protected_by_compiler", "protected_by_compiler"), + ("protected_at_runtime", "protected_at_runtime"), + ("protected_at_perimeter", "protected_at_perimeter"), + ("protected_by_mitigating_control", "protected_by_mitigating_control"), + ], + max_length=64, + ), + ), + ] diff --git a/backend/application/vex/models.py b/backend/application/vex/models.py index 352a33b5a..120efe6a6 100644 --- a/backend/application/vex/models.py +++ b/backend/application/vex/models.py @@ -1,3 +1,5 @@ +from typing import Any + from django.core.validators import MaxValueValidator, MinValueValidator from django.db.models import ( CASCADE, @@ -13,12 +15,12 @@ from application.access_control.models import User from application.core.models import Branch, Product +from application.core.types import VEX_Justification from application.vex.types import ( CSAF_Publisher_Category, CSAF_TLP_Label, CSAF_Tracking_Status, VEX_Document_Type, - VEX_Justification, VEX_Status, ) @@ -62,7 +64,7 @@ class OpenVEX(VEX_Base): # Make sure that timestamp and last updated date are exactly the # same when creating a new CSAF record - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: now = timezone.now() if not self.timestamp: self.timestamp = now @@ -85,18 +87,14 @@ class CSAF(VEX_Base): tlp_label = CharField(max_length=16, choices=CSAF_TLP_Label.CSAF_TLP_LABEL_CHOICES) tracking_initial_release_date = DateTimeField() tracking_current_release_date = DateTimeField() - tracking_status = CharField( - max_length=16, choices=CSAF_Tracking_Status.CSAF_TRACKING_STATUS_CHOICES - ) + tracking_status = CharField(max_length=16, choices=CSAF_Tracking_Status.CSAF_TRACKING_STATUS_CHOICES) publisher_name = CharField(max_length=255) - publisher_category = CharField( - max_length=16, choices=CSAF_Publisher_Category.CSAF_PUBLISHER_CATEGORY_CHOICES - ) + publisher_category = CharField(max_length=16, choices=CSAF_Publisher_Category.CSAF_PUBLISHER_CATEGORY_CHOICES) publisher_namespace = CharField(max_length=255) # Make sure that initial release date and current release date are exactly the # same when creating a new CSAF record - def save(self, *args, **kwargs): + def save(self, *args: Any, **kwargs: Any) -> None: now = timezone.now() if not self.tracking_initial_release_date: self.tracking_initial_release_date = now @@ -121,6 +119,23 @@ class CSAF_Revision(Model): summary = TextField(max_length=255) +class CycloneDX(VEX_Base): + author = CharField(max_length=255, blank=True) + manufacturer = CharField(max_length=255, blank=True) + first_issued = DateTimeField() + last_updated = DateTimeField() + + +class CycloneDX_Branch(Model): + cyclonedx = ForeignKey(CycloneDX, related_name="branches", on_delete=CASCADE) + branch = ForeignKey(Branch, related_name="cyclonedxes", on_delete=CASCADE) + + +class CycloneDX_Vulnerability(Model): + cyclonedx = ForeignKey(CycloneDX, related_name="vulnerability_names", on_delete=CASCADE) + name = CharField(max_length=255) + + class VEX_Document(Model): type = CharField(max_length=16, choices=VEX_Document_Type.VEX_DOCUMENT_TYPE_CHOICES) document_id = CharField(max_length=255) @@ -142,15 +157,15 @@ class VEX_Statement(Model): vulnerability_id = CharField(max_length=255) description = TextField(max_length=2048, blank=True) status = CharField(max_length=24, choices=VEX_Status.VEX_STATUS_CHOICES) - justification = CharField( - max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True - ) + justification = CharField(max_length=64, choices=VEX_Justification.VEX_JUSTIFICATION_CHOICES, blank=True) impact = CharField(max_length=255, blank=True) remediation = CharField(max_length=255, blank=True) product_purl = CharField(max_length=255, blank=True) component_purl = CharField(max_length=255, blank=True) + component_cyclonedx_bom_link = CharField(max_length=512, blank=True) class Meta: indexes = [ Index(fields=["product_purl"]), + Index(fields=["component_cyclonedx_bom_link"]), ] diff --git a/backend/application/vex/queries/csaf.py b/backend/application/vex/queries/csaf.py index 12608b5a1..f7c1ffc6f 100644 --- a/backend/application/vex/queries/csaf.py +++ b/backend/application/vex/queries/csaf.py @@ -3,7 +3,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Product_Authorization_Group_Member, Product_Member from application.vex.models import CSAF, CSAF_Branch, CSAF_Vulnerability @@ -17,34 +17,24 @@ def get_csafs() -> QuerySet[CSAF]: csafs = CSAF.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) csafs = csafs.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) csafs = csafs.filter( @@ -58,18 +48,14 @@ def get_csafs() -> QuerySet[CSAF]: return csafs -def get_csaf_by_document_id( - document_id_prefix: str, document_base_id: str -) -> Optional[CSAF]: +def get_csaf_by_document_id(document_id_prefix: str, document_base_id: str) -> Optional[CSAF]: user = get_current_user() if user is None: return None try: - csaf = CSAF.objects.get( - document_id_prefix=document_id_prefix, document_base_id=document_base_id - ) + csaf = CSAF.objects.get(document_id_prefix=document_id_prefix, document_base_id=document_base_id) if not user.is_superuser and csaf not in get_csafs(): return None return csaf diff --git a/backend/application/vex/queries/cyclonedx.py b/backend/application/vex/queries/cyclonedx.py new file mode 100644 index 000000000..84207d597 --- /dev/null +++ b/backend/application/vex/queries/cyclonedx.py @@ -0,0 +1,90 @@ +from typing import Optional + +from django.db.models import Exists, OuterRef, Q +from django.db.models.query import QuerySet + +from application.access_control.services.current_user import get_current_user +from application.core.models import Product_Authorization_Group_Member, Product_Member +from application.vex.models import CycloneDX, CycloneDX_Branch, CycloneDX_Vulnerability + + +def get_cyclonedx_s() -> QuerySet[CycloneDX]: + user = get_current_user() + + if user is None: + return CycloneDX.objects.none() + + cyclonedx_s = CycloneDX.objects.all() + + if not user.is_superuser: + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) + + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, + ) + + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, + ) + + cyclonedx_s = cyclonedx_s.annotate( + product__member=Exists(product_members), + product__product_group__member=Exists(product_group_members), + authorization_group_member=Exists(product_authorization_group_members), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), + ) + + cyclonedx_s = cyclonedx_s.filter( + Q(product__member=True) + | Q(product__product_group__member=True) + | Q(authorization_group_member=True) + | Q(product_group_authorization_group_member=True) + | (Q(product__isnull=True) & Q(user=user)) + ) + + return cyclonedx_s + + +def get_cyclonedx_by_document_id(document_id_prefix: str, document_base_id: str) -> Optional[CycloneDX]: + user = get_current_user() + + if user is None: + return None + + try: + cyclonedx = CycloneDX.objects.get( + document_id_prefix=document_id_prefix, + document_base_id=document_base_id, + ) + if not user.is_superuser and cyclonedx not in get_cyclonedx_s(): + return None + return cyclonedx + except CycloneDX.DoesNotExist: + return None + + +def get_cyclonedx_vulnerabilities() -> QuerySet[CycloneDX_Vulnerability]: + user = get_current_user() + + if user is None: + return CycloneDX_Vulnerability.objects.none() + + if user.is_superuser: + return CycloneDX_Vulnerability.objects.all().order_by("name") + + return CycloneDX_Vulnerability.objects.filter(cyclonedx__in=get_cyclonedx_s()).order_by("name") + + +def get_cyclonedx_branches() -> QuerySet[CycloneDX_Branch]: + user = get_current_user() + + if user is None: + return CycloneDX_Branch.objects.none() + + if user.is_superuser: + return CycloneDX_Branch.objects.all().order_by("branch__name") + + return CycloneDX_Branch.objects.filter(cyclonedx__in=get_cyclonedx_s()).order_by("branch__name") diff --git a/backend/application/vex/queries/openvex.py b/backend/application/vex/queries/openvex.py index 5581276e7..db62fc7b6 100644 --- a/backend/application/vex/queries/openvex.py +++ b/backend/application/vex/queries/openvex.py @@ -3,7 +3,7 @@ from django.db.models import Exists, OuterRef, Q from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Product_Authorization_Group_Member, Product_Member from application.vex.models import OpenVEX, OpenVEX_Branch, OpenVEX_Vulnerability @@ -17,34 +17,24 @@ def get_openvex_s() -> QuerySet[OpenVEX]: openvex_s = OpenVEX.objects.all() if not user.is_superuser: - product_members = Product_Member.objects.filter( - product=OuterRef("product_id"), user=user - ) - product_group_members = Product_Member.objects.filter( - product=OuterRef("product__product_group"), user=user - ) + product_members = Product_Member.objects.filter(product=OuterRef("product_id"), user=user) + product_group_members = Product_Member.objects.filter(product=OuterRef("product__product_group"), user=user) - product_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product_id"), - authorization_group__users=user, - ) + product_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product_id"), + authorization_group__users=user, ) - product_group_authorization_group_members = ( - Product_Authorization_Group_Member.objects.filter( - product=OuterRef("product__product_group"), - authorization_group__users=user, - ) + product_group_authorization_group_members = Product_Authorization_Group_Member.objects.filter( + product=OuterRef("product__product_group"), + authorization_group__users=user, ) openvex_s = openvex_s.annotate( product__member=Exists(product_members), product__product_group__member=Exists(product_group_members), authorization_group_member=Exists(product_authorization_group_members), - product_group_authorization_group_member=Exists( - product_group_authorization_group_members - ), + product_group_authorization_group_member=Exists(product_group_authorization_group_members), ) openvex_s = openvex_s.filter( @@ -58,9 +48,7 @@ def get_openvex_s() -> QuerySet[OpenVEX]: return openvex_s -def get_openvex_by_document_id( - document_id_prefix: str, document_base_id: str -) -> Optional[OpenVEX]: +def get_openvex_by_document_id(document_id_prefix: str, document_base_id: str) -> Optional[OpenVEX]: user = get_current_user() if user is None: @@ -87,9 +75,7 @@ def get_openvex_vulnerabilities() -> QuerySet[OpenVEX_Vulnerability]: if user.is_superuser: return OpenVEX_Vulnerability.objects.all().order_by("name") - return OpenVEX_Vulnerability.objects.filter(openvex__in=get_openvex_s()).order_by( - "name" - ) + return OpenVEX_Vulnerability.objects.filter(openvex__in=get_openvex_s()).order_by("name") def get_openvex_branches() -> QuerySet[OpenVEX_Branch]: @@ -101,6 +87,4 @@ def get_openvex_branches() -> QuerySet[OpenVEX_Branch]: if user.is_superuser: return OpenVEX_Branch.objects.all().order_by("branch__name") - return OpenVEX_Branch.objects.filter(openvex__in=get_openvex_s()).order_by( - "branch__name" - ) + return OpenVEX_Branch.objects.filter(openvex__in=get_openvex_s()).order_by("branch__name") diff --git a/backend/application/vex/queries/vex_document.py b/backend/application/vex/queries/vex_document.py index e3596962f..d41d8b574 100644 --- a/backend/application/vex/queries/vex_document.py +++ b/backend/application/vex/queries/vex_document.py @@ -1,6 +1,6 @@ from django.db.models.query import QuerySet -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.vex.models import VEX_Document, VEX_Statement diff --git a/backend/application/vex/services/csaf_generator.py b/backend/application/vex/services/csaf_generator.py index c88b21f13..fd9478d89 100644 --- a/backend/application/vex/services/csaf_generator.py +++ b/backend/application/vex/services/csaf_generator.py @@ -5,9 +5,9 @@ import jsonpickle from rest_framework.exceptions import NotFound -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions from application.core.models import Branch, Product from application.vex.models import CSAF, CSAF_Branch, CSAF_Revision, CSAF_Vulnerability from application.vex.queries.csaf import get_csaf_by_document_id @@ -27,8 +27,6 @@ set_vulnerability_description, ) from application.vex.services.vex_base import ( - check_and_get_product, - check_branch_names, check_product_or_vulnerabilities, check_vulnerability_names, create_document_base_id, @@ -40,9 +38,9 @@ @dataclass() class CSAFCreateParameters: - product_id: int + product: Optional[Product] vulnerability_names: list[str] - branch_names: list[str] + branches: list[Branch] document_id_prefix: str title: str publisher_name: str @@ -70,15 +68,8 @@ class CSAFContent: def create_csaf_document(parameters: CSAFCreateParameters) -> Optional[CSAFRoot]: - check_product_or_vulnerabilities( - parameters.product_id, parameters.vulnerability_names - ) - product = check_and_get_product(parameters.product_id) - if product: - user_has_permission_or_403(product, Permissions.VEX_Create) - + check_product_or_vulnerabilities(parameters.product, parameters.vulnerability_names) check_vulnerability_names(parameters.vulnerability_names) - branches = check_branch_names(parameters.branch_names, product) user = get_current_user() if not user: @@ -87,7 +78,7 @@ def create_csaf_document(parameters: CSAFCreateParameters) -> Optional[CSAFRoot] document_base_id = create_document_base_id(parameters.document_id_prefix) csaf = CSAF.objects.create( - product=product, + product=parameters.product, document_id_prefix=parameters.document_id_prefix, document_base_id=document_base_id, version=1, @@ -110,22 +101,19 @@ def create_csaf_document(parameters: CSAFCreateParameters) -> Optional[CSAFRoot] vulnerability_name = "" CSAF_Vulnerability.objects.create(csaf=csaf, name=vulnerability_name) - for branch in branches: + for branch in parameters.branches: CSAF_Branch.objects.create(csaf=csaf, branch=branch) csaf_root = create_csaf_root(csaf) vulnerabilities = [] - product_tree = CSAFProductTree(branches=[], relationships=[]) - if product: + if parameters.product: vulnerabilities, product_tree = _get_content_for_product( - product, parameters.vulnerability_names, branches + parameters.product, parameters.vulnerability_names, parameters.branches ) else: - vulnerabilities, product_tree = _get_content_for_vulnerabilities( - parameters.vulnerability_names - ) + vulnerabilities, product_tree = _get_content_for_vulnerabilities(parameters.vulnerability_names) if not vulnerabilities: csaf.delete() @@ -133,9 +121,7 @@ def create_csaf_document(parameters: CSAFCreateParameters) -> Optional[CSAFRoot] csaf_content = CSAFContent(vulnerabilities, product_tree) content_json = jsonpickle.encode(csaf_content, unpicklable=False) - content_hash = hashlib.sha256( - content_json.casefold().encode("utf-8").strip() - ).hexdigest() + content_hash = hashlib.sha256(content_json.casefold().encode("utf-8").strip()).hexdigest() csaf.content_hash = content_hash csaf.save() @@ -146,9 +132,7 @@ def create_csaf_document(parameters: CSAFCreateParameters) -> Optional[CSAFRoot] def update_csaf_document(parameters: CSAFUpdateParameters) -> Optional[CSAFRoot]: - csaf = get_csaf_by_document_id( - parameters.document_id_prefix, parameters.document_base_id - ) + csaf = get_csaf_by_document_id(parameters.document_id_prefix, parameters.document_base_id) if not csaf: raise NotFound( f"CSAF document with ids {parameters.document_id_prefix} and {parameters.document_base_id} does not exist" @@ -156,32 +140,21 @@ def update_csaf_document(parameters: CSAFUpdateParameters) -> Optional[CSAFRoot] user_has_permission_or_403(csaf, Permissions.VEX_Edit) - csaf_vulnerability_names = list( - CSAF_Vulnerability.objects.filter(csaf=csaf).values_list("name", flat=True) - ) + csaf_vulnerability_names = list(CSAF_Vulnerability.objects.filter(csaf=csaf).values_list("name", flat=True)) - csaf_branch_ids = CSAF_Branch.objects.filter(csaf=csaf).values_list( - "branch", flat=True - ) + csaf_branch_ids = CSAF_Branch.objects.filter(csaf=csaf).values_list("branch", flat=True) csaf_branches = list(Branch.objects.filter(id__in=csaf_branch_ids)) vulnerabilities = [] - product_tree = CSAFProductTree(branches=[], relationships=[]) if csaf.product: - vulnerabilities, product_tree = _get_content_for_product( - csaf.product, csaf_vulnerability_names, csaf_branches - ) + vulnerabilities, product_tree = _get_content_for_product(csaf.product, csaf_vulnerability_names, csaf_branches) else: - vulnerabilities, product_tree = _get_content_for_vulnerabilities( - csaf_vulnerability_names - ) + vulnerabilities, product_tree = _get_content_for_vulnerabilities(csaf_vulnerability_names) csaf_content = CSAFContent(vulnerabilities, product_tree) content_json = jsonpickle.encode(csaf_content, unpicklable=False) - content_hash = hashlib.sha256( - content_json.casefold().encode("utf-8").strip() - ).hexdigest() + content_hash = hashlib.sha256(content_json.casefold().encode("utf-8").strip()).hexdigest() if content_hash == csaf.content_hash: return None @@ -229,9 +202,7 @@ def _get_content_for_vulnerabilities(vulnerability_names: list[str]) -> tuple: current_vulnerability_description = set_vulnerability_description( vulnerability, observation, current_vulnerability_description ) - append_product_to_product_tree( - product_tree, observation.product, observation.branch - ) + append_product_to_product_tree(product_tree, observation.product, observation.branch) set_product_status(vulnerability, observation) remove_conflicting_product_status(vulnerability) @@ -241,9 +212,7 @@ def _get_content_for_vulnerabilities(vulnerability_names: list[str]) -> tuple: return vulnerabilities, product_tree -def _get_content_for_product( - product: Product, vulnerability_names: list[str], branches: list[Branch] -) -> tuple: +def _get_content_for_product(product: Product, vulnerability_names: list[str], branches: list[Branch]) -> tuple: vulnerabilities: dict[str, CSAFVulnerability] = {} product_tree = CSAFProductTree(branches=[], relationships=[]) diff --git a/backend/application/vex/services/csaf_generator_component.py b/backend/application/vex/services/csaf_generator_component.py index 0134a8e45..1dfbcaaae 100644 --- a/backend/application/vex/services/csaf_generator_component.py +++ b/backend/application/vex/services/csaf_generator_component.py @@ -18,10 +18,12 @@ ) -def get_component_id( - component_name_version: str, purl: Optional[str], cpe: Optional[str] -) -> str: - return purl if purl else cpe if cpe else component_name_version +def get_component_id(component_name_version: str, purl: Optional[str], cpe: Optional[str]) -> str: + if purl: + return purl + if cpe: + return cpe + return component_name_version def append_component_to_product_tree( @@ -56,9 +58,7 @@ def append_component_to_product_tree( _append_component_to_relationships(product_tree, observation) - product_branch_name = ( - purl.name if purl and purl.name else observation.origin_component_name - ) + product_branch_name = purl.name if purl and purl.name else observation.origin_component_name found = False vendor_branch.branches = vendor_branch.branches or [] @@ -76,14 +76,10 @@ def append_component_to_product_tree( product_branch.branches = product_branch.branches or [] for component_branch in product_branch.branches: - if ( - component_branch.product - and component_branch.product.product_id - == get_component_id( - observation.origin_component_name_version, - observation.origin_component_purl, - observation.origin_component_cpe, - ) + if component_branch.product and component_branch.product.product_id == get_component_id( + observation.origin_component_name_version, + observation.origin_component_purl, + observation.origin_component_cpe, ): return @@ -99,16 +95,12 @@ def append_component_to_product_tree( product_branch.branches.append(component_branch) -def _create_component( - component_name_version: str, purl: Optional[str], cpe: Optional[str] -) -> CSAFFullProductName: +def _create_component(component_name_version: str, purl: Optional[str], cpe: Optional[str]) -> CSAFFullProductName: product_identification_helper = None if purl or cpe: purl = purl if purl else None cpe = cpe if cpe else None - product_identification_helper = CSAFProductIdentificationHelper( - purl=purl, cpe=cpe - ) + product_identification_helper = CSAFProductIdentificationHelper(purl=purl, cpe=cpe) component_id = get_component_id(component_name_version, purl, cpe) full_product_name = CSAFFullProductName( @@ -120,9 +112,7 @@ def _create_component( return full_product_name -def _append_component_to_relationships( - product_tree: CSAFProductTree, observation: Observation -) -> None: +def _append_component_to_relationships(product_tree: CSAFProductTree, observation: Observation) -> None: if not observation.origin_component_name_version: return @@ -134,10 +124,7 @@ def _append_component_to_relationships( product_id = get_product_id(observation.product, observation.branch) for relationship in product_tree.relationships: - if ( - relationship.product_reference == component_id - and relationship.relates_to_product_reference == product_id - ): + if relationship.product_reference == component_id and relationship.relates_to_product_reference == product_id: return full_product_name = CSAFFullProductName( diff --git a/backend/application/vex/services/csaf_generator_document.py b/backend/application/vex/services/csaf_generator_document.py index 0968831b0..3da99e8a2 100644 --- a/backend/application/vex/services/csaf_generator_document.py +++ b/backend/application/vex/services/csaf_generator_document.py @@ -39,9 +39,7 @@ def create_csaf_root(csaf: CSAF) -> CSAFRoot: ) csaf_revision_history_list.append(csaf_revision_history) - tracking_id = _get_document_id( - csaf.document_id_prefix, csaf.document_base_id, csaf.version - ) + tracking_id = _get_document_id(csaf.document_id_prefix, csaf.document_base_id, csaf.version) csaf_tracking = CSAFTracking( id=tracking_id, @@ -74,7 +72,5 @@ def create_csaf_root(csaf: CSAF) -> CSAFRoot: return csaf_root -def _get_document_id( - document_id_prefix: str, document_base_id: str, document_version: int -) -> str: +def _get_document_id(document_id_prefix: str, document_base_id: str, document_version: int) -> str: return document_id_prefix + "_" + document_base_id + f"_{document_version:04d}" diff --git a/backend/application/vex/services/csaf_generator_helpers.py b/backend/application/vex/services/csaf_generator_helpers.py index 201db42d6..5b20a635f 100644 --- a/backend/application/vex/services/csaf_generator_helpers.py +++ b/backend/application/vex/services/csaf_generator_helpers.py @@ -63,9 +63,7 @@ def get_product_id(product: Product, branch: Optional[Branch]) -> str: def get_relationship_name(observation: Observation) -> str: relationship_name = f"{observation.origin_component_name_version}@" relationship_name += ( - f"{observation.product.name}:{observation.branch.name}" - if observation.branch - else observation.product.name + f"{observation.product.name}:{observation.branch.name}" if observation.branch else observation.product.name ) return relationship_name diff --git a/backend/application/vex/services/csaf_generator_product.py b/backend/application/vex/services/csaf_generator_product.py index 620567a81..2ce65e17f 100644 --- a/backend/application/vex/services/csaf_generator_product.py +++ b/backend/application/vex/services/csaf_generator_product.py @@ -66,16 +66,12 @@ def _create_product(product: Product, branch: Optional[Branch]) -> CSAFFullProdu if branch.purl or branch.cpe23: purl = branch.purl if branch.purl else None cpe = branch.cpe23 if branch.cpe23 else None - product_identification_helper = CSAFProductIdentificationHelper( - purl=purl, cpe=cpe - ) + product_identification_helper = CSAFProductIdentificationHelper(purl=purl, cpe=cpe) else: if product.purl or product.cpe23: purl = product.purl if product.purl else None cpe = product.cpe23 if product.cpe23 else None - product_identification_helper = CSAFProductIdentificationHelper( - purl=purl, cpe=cpe - ) + product_identification_helper = CSAFProductIdentificationHelper(purl=purl, cpe=cpe) product_name = f"{product.name}:{branch.name}" if branch else product.name diff --git a/backend/application/vex/services/csaf_generator_remediation.py b/backend/application/vex/services/csaf_generator_remediation.py index 61c68ecb9..475480b43 100644 --- a/backend/application/vex/services/csaf_generator_remediation.py +++ b/backend/application/vex/services/csaf_generator_remediation.py @@ -6,24 +6,16 @@ from application.vex.types import CSAF_Status, CSAFRemediation, CSAFVulnerability -def set_remediation(vulnerability: CSAFVulnerability, observation: Observation): +def set_remediation(vulnerability: CSAFVulnerability, observation: Observation) -> None: vex_status = map_status(observation.current_status) if vex_status == CSAF_Status.CSAF_STATUS_AFFECTED: product_or_relationship_id = get_product_or_relationship_id(observation) category = "mitigation" if observation.recommendation else "none_available" - details = ( - observation.recommendation - if observation.recommendation - else "No remediation available" - ) + details = observation.recommendation if observation.recommendation else "No remediation available" - found = _check_and_append_none_available( - vulnerability, product_or_relationship_id, category - ) + found = _check_and_append_none_available(vulnerability, product_or_relationship_id, category) - found = _check_and_append_mitigation( - found, vulnerability, product_or_relationship_id, category, details - ) + found = _check_and_append_mitigation(found, vulnerability, product_or_relationship_id, category, details) if not found: remediation = CSAFRemediation( @@ -36,10 +28,7 @@ def set_remediation(vulnerability: CSAFVulnerability, observation: Observation): # remove "none_available" remediation if mitigation is available if category == "mitigation": for remediation in vulnerability.remediations: - if ( - remediation.category == "none_available" - and product_or_relationship_id in remediation.product_ids - ): + if remediation.category == "none_available" and product_or_relationship_id in remediation.product_ids: remediation.product_ids.remove(product_or_relationship_id) # remove remediations without product_ids diff --git a/backend/application/vex/services/csaf_generator_vulnerability.py b/backend/application/vex/services/csaf_generator_vulnerability.py index 5c85d6df0..206d3b95c 100644 --- a/backend/application/vex/services/csaf_generator_vulnerability.py +++ b/backend/application/vex/services/csaf_generator_vulnerability.py @@ -7,7 +7,10 @@ get_vulnerability_ecosystem, map_status, ) -from application.vex.services.vex_base import get_vulnerability_url +from application.vex.services.vex_base import ( + get_vulnerability_url, + map_vex_justification_to_csaf_openvex_justification, +) from application.vex.types import ( CSAF_Status, CSAFFlag, @@ -20,10 +23,8 @@ ) -def create_vulnerability(vulnerability_name) -> CSAFVulnerability: - product_status = CSAFProductStatus( - fixed=[], known_affected=[], known_not_affected=[], under_investigation=[] - ) +def create_vulnerability(vulnerability_name: str) -> CSAFVulnerability: + product_status = CSAFProductStatus(fixed=[], known_affected=[], known_not_affected=[], under_investigation=[]) if vulnerability_name.startswith("CVE"): vulnerability = CSAFVulnerability( cve=vulnerability_name, @@ -52,9 +53,7 @@ def create_vulnerability(vulnerability_name) -> CSAFVulnerability: ) reference_url = get_vulnerability_url(vulnerability_name) if reference_url: - reference = CSAFReference( - category="external", url=reference_url, summary="Security Advisory" - ) + reference = CSAFReference(category="external", url=reference_url, summary="Security Advisory") vulnerability.references.append(reference) return vulnerability @@ -65,15 +64,8 @@ def set_vulnerability_description( observation: Observation, current_vulnerability_description: Optional[str], ) -> str: - if ( - not current_vulnerability_description - or current_vulnerability_description == "No description available" - ): - description = ( - observation.description - if observation.description - else "No description available" - ) + if not current_vulnerability_description or current_vulnerability_description == "No description available": + description = observation.description if observation.description else "No description available" current_vulnerability_description = description csaf_note = CSAFNote( category="description", @@ -84,39 +76,24 @@ def set_vulnerability_description( return current_vulnerability_description -def set_product_status(vulnerability: CSAFVulnerability, observation: Observation): +def set_product_status(vulnerability: CSAFVulnerability, observation: Observation) -> None: vex_status = map_status(observation.current_status) product_or_relationship_id = get_product_or_relationship_id(observation) if vex_status == CSAF_Status.CSAF_STATUS_NOT_AFFECTED: - if ( - product_or_relationship_id - not in vulnerability.product_status.known_not_affected - ): - vulnerability.product_status.known_not_affected.append( - product_or_relationship_id - ) + if product_or_relationship_id not in vulnerability.product_status.known_not_affected: + vulnerability.product_status.known_not_affected.append(product_or_relationship_id) elif vex_status == CSAF_Status.CSAF_STATUS_AFFECTED: - if ( - product_or_relationship_id - not in vulnerability.product_status.known_affected - ): - vulnerability.product_status.known_affected.append( - product_or_relationship_id - ) + if product_or_relationship_id not in vulnerability.product_status.known_affected: + vulnerability.product_status.known_affected.append(product_or_relationship_id) elif vex_status == CSAF_Status.CSAF_STATUS_FIXED: if product_or_relationship_id not in vulnerability.product_status.fixed: vulnerability.product_status.fixed.append(product_or_relationship_id) elif vex_status == CSAF_Status.CSAF_STATUS_UNDER_INVESTIGATION: - if ( - product_or_relationship_id - not in vulnerability.product_status.under_investigation - ): - vulnerability.product_status.under_investigation.append( - product_or_relationship_id - ) + if product_or_relationship_id not in vulnerability.product_status.under_investigation: + vulnerability.product_status.under_investigation.append(product_or_relationship_id) -def remove_conflicting_product_status(vulnerability: CSAFVulnerability): +def remove_conflicting_product_status(vulnerability: CSAFVulnerability) -> None: product_ids = [] for product_id in vulnerability.product_status.known_affected: @@ -144,7 +121,7 @@ def remove_conflicting_product_status(vulnerability: CSAFVulnerability): vulnerability.product_status.fixed = fixed_product_ids -def set_flag_or_threat(vulnerability: CSAFVulnerability, observation: Observation): +def set_flag_or_threat(vulnerability: CSAFVulnerability, observation: Observation) -> None: vex_status = map_status(observation.current_status) if vex_status == CSAF_Status.CSAF_STATUS_NOT_AFFECTED: product_or_relationship_id = get_product_or_relationship_id(observation) @@ -152,23 +129,21 @@ def set_flag_or_threat(vulnerability: CSAFVulnerability, observation: Observatio if observation_log and observation_log.vex_justification: found = False for flag in vulnerability.flags: - if flag.label == observation_log.vex_justification: + if flag.label == map_vex_justification_to_csaf_openvex_justification(observation_log.vex_justification): if product_or_relationship_id not in flag.product_ids: flag.product_ids.append(product_or_relationship_id) found = True break if not found: csaf_flag = CSAFFlag( - label=observation_log.vex_justification, + label=map_vex_justification_to_csaf_openvex_justification(observation_log.vex_justification), product_ids=[product_or_relationship_id], ) vulnerability.flags.append(csaf_flag) else: category = "impact" details = ( - observation_log.comment - if observation_log and observation_log.comment - else "No justification available" + observation_log.comment if observation_log and observation_log.comment else "No justification available" ) found = False for threat in vulnerability.threats: diff --git a/backend/application/vex/services/csaf_parser.py b/backend/application/vex/services/csaf_parser.py index 663f04c96..e4ffa8785 100644 --- a/backend/application/vex/services/csaf_parser.py +++ b/backend/application/vex/services/csaf_parser.py @@ -26,15 +26,11 @@ def parse_csaf_data(data: dict) -> None: products: dict[str, str] = {} product_tree: dict = data.get("product_tree", {}) _find_products_in_branches(product_tree.get("branches", []), products) - _find_products_in_full_product_names( - product_tree.get("full_product_names", []), products - ) + _find_products_in_full_product_names(product_tree.get("full_product_names", []), products) relationships: dict[str, Relationship] = _process_relationships(product_tree) - product_purls, vex_statements = _process_vulnerabilities( - data, csaf_document, products, relationships - ) + product_purls, vex_statements = _process_vulnerabilities(data, csaf_document, products, relationships) apply_vex_statements_after_import(product_purls, vex_statements) @@ -50,14 +46,10 @@ def _create_csaf_document(data: dict) -> VEX_Document: version = data.get("document", {}).get("tracking", {}).get("version") if not version: raise ValidationError("document/tracking/version is missing") - initial_release_date = ( - data.get("document", {}).get("tracking", {}).get("initial_release_date") - ) + initial_release_date = data.get("document", {}).get("tracking", {}).get("initial_release_date") if not initial_release_date: raise ValidationError("document/tracking/initial_release_date is missing") - current_release_date = ( - data.get("document", {}).get("tracking", {}).get("current_release_date") - ) + current_release_date = data.get("document", {}).get("tracking", {}).get("current_release_date") if not current_release_date: current_release_date = initial_release_date author = data.get("document", {}).get("publisher", {}).get("name") @@ -93,9 +85,7 @@ def _find_products_in_branches(branches: list, products: dict[str, str]) -> None _process_product(product, products) -def _find_products_in_full_product_names( - full_product_names: list, products: dict[str, str] -) -> None: +def _find_products_in_full_product_names(full_product_names: list, products: dict[str, str]) -> None: for product in full_product_names: _process_product(product, products) @@ -124,9 +114,7 @@ def _process_relationships(product_tree: dict) -> dict[str, Relationship]: product_id = relationship.get("relates_to_product_reference") component_id = relationship.get("product_reference") if relationship_id and component_id and product_id: - relationships[relationship_id] = Relationship( - component_id=component_id, product_id=product_id - ) + relationships[relationship_id] = Relationship(component_id=component_id, product_id=product_id) return relationships @@ -162,9 +150,7 @@ def _process_vulnerabilities( known_affected = product_status.get(CSAF_Status.CSAF_STATUS_AFFECTED, []) for product_id in known_affected: - product_component = _get_product_component( - product_id, products, relationships - ) + product_component = _get_product_component(product_id, products, relationships) vex_statement = VEX_Statement( document=csaf_document, vulnerability_id=vulnerability_id, @@ -178,13 +164,9 @@ def _process_vulnerabilities( vex_statements.add(vex_statement) product_purls.add(product_component.product_purl) - known_not_affected = product_status.get( - CSAF_Status.CSAF_STATUS_NOT_AFFECTED, [] - ) + known_not_affected = product_status.get(CSAF_Status.CSAF_STATUS_NOT_AFFECTED, []) for product_id in known_not_affected: - product_component = _get_product_component( - product_id, products, relationships - ) + product_component = _get_product_component(product_id, products, relationships) vex_statement = VEX_Statement( document=csaf_document, vulnerability_id=vulnerability_id, @@ -199,13 +181,9 @@ def _process_vulnerabilities( vex_statements.add(vex_statement) product_purls.add(product_component.product_purl) - under_investigation = product_status.get( - CSAF_Status.CSAF_STATUS_UNDER_INVESTIGATION, [] - ) + under_investigation = product_status.get(CSAF_Status.CSAF_STATUS_UNDER_INVESTIGATION, []) for product_id in under_investigation: - product_component = _get_product_component( - product_id, products, relationships - ) + product_component = _get_product_component(product_id, products, relationships) vex_statement = VEX_Statement( document=csaf_document, vulnerability_id=vulnerability_id, @@ -220,9 +198,7 @@ def _process_vulnerabilities( fixed = product_status.get(CSAF_Status.CSAF_STATUS_FIXED, []) for product_id in fixed: - product_component = _get_product_component( - product_id, products, relationships - ) + product_component = _get_product_component(product_id, products, relationships) vex_statement = VEX_Statement( document=csaf_document, vulnerability_id=vulnerability_id, @@ -248,9 +224,7 @@ def _get_product_component( product_purl = products.get(relationship.product_id) component_purl = products.get(relationship.component_id) if product_purl and component_purl: - return Product_Component( - product_purl=product_purl, component_purl=component_purl - ) + return Product_Component(product_purl=product_purl, component_purl=component_purl) raise ValidationError(f"Product or relationship data not found for {product_id}") diff --git a/backend/application/vex/services/cyclonedx_generator.py b/backend/application/vex/services/cyclonedx_generator.py new file mode 100644 index 000000000..70f315c6a --- /dev/null +++ b/backend/application/vex/services/cyclonedx_generator.py @@ -0,0 +1,282 @@ +import hashlib +from dataclasses import dataclass +from datetime import datetime +from typing import Optional +from uuid import UUID + +from cyclonedx.model.bom import Bom, Property +from cyclonedx.model.bom_ref import BomRef +from cyclonedx.model.component import Component, ComponentType +from cyclonedx.model.contact import OrganizationalContact, OrganizationalEntity +from cyclonedx.model.impact_analysis import ( + ImpactAnalysisJustification, + ImpactAnalysisState, +) +from cyclonedx.model.vulnerability import ( + BomTarget, + Vulnerability, + VulnerabilityAnalysis, +) +from django.utils import timezone +from rest_framework.exceptions import NotFound, ValidationError + +from application.__init__ import __version__ +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions +from application.core.models import Branch, Product +from application.core.queries.observation import get_current_modifying_observation_log +from application.core.types import Status, VEX_Justification +from application.vex.models import ( + CycloneDX, + CycloneDX_Branch, + CycloneDX_Vulnerability, +) +from application.vex.queries.cyclonedx import get_cyclonedx_by_document_id +from application.vex.services.vex_base import ( + check_product_or_vulnerabilities, + check_vulnerability_names, + get_observations_for_product, + get_observations_for_vulnerabilities, +) + + +@dataclass() +class CycloneDXCreateParameters: + product: Optional[Product] + vulnerability_names: list[str] + branches: list[Branch] + document_id_prefix: str + author: str + manufacturer: str + + +@dataclass() +class CycloneDXUpdateParameters: + document_id_prefix: str + document_base_id: str + author: str + manufacturer: str + + +def create_cyclonedx_document( + parameters: CycloneDXCreateParameters, +) -> Optional[Bom]: + check_product_or_vulnerabilities(parameters.product, parameters.vulnerability_names) + check_vulnerability_names(parameters.vulnerability_names) + + user = get_current_user() + if not user: + raise ValueError("No user in request") + + bom = Bom() + bom.version = 1 + + now = timezone.now() + + cyclonedx = CycloneDX.objects.create( + product=parameters.product, + document_id_prefix=parameters.document_id_prefix, + document_base_id=str(bom.serial_number), + author=parameters.author, + manufacturer=parameters.manufacturer, + version=1, + user=user, + first_issued=now, + last_updated=now, + ) + for vulnerability_name in parameters.vulnerability_names: + if vulnerability_name is None: + vulnerability_name = "" + CycloneDX_Vulnerability.objects.create(cyclonedx=cyclonedx, name=vulnerability_name) + + for branch in parameters.branches: + CycloneDX_Branch.objects.create(cyclonedx=cyclonedx, branch=branch) + + _add_metadata(bom, now, parameters.document_id_prefix, parameters.author, parameters.manufacturer) + + vulnerabilities = _create_vulnerabilities( + parameters.product, parameters.branches, parameters.vulnerability_names, cyclonedx + ) + bom.vulnerabilities = set(vulnerabilities) + + if not vulnerabilities: + cyclonedx.delete() + return None + + vulnerabilities_string = _get_vulnerabilities_string(vulnerabilities) + cyclonedx.content_hash = hashlib.sha256(vulnerabilities_string.casefold().encode("utf-8").strip()).hexdigest() + cyclonedx.save() + + return bom + + +def update_cyclonedx_document( + parameters: CycloneDXUpdateParameters, +) -> Optional[Bom]: + cyclonedx = get_cyclonedx_by_document_id(parameters.document_id_prefix, parameters.document_base_id) + if not cyclonedx: + raise NotFound( + f"CycloneDX document with ids {parameters.document_id_prefix}" + + f" and {parameters.document_base_id} does not exist" + ) + + user_has_permission_or_403(cyclonedx, Permissions.VEX_Edit) + + cyclonedx_branch_ids = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx).values_list("branch", flat=True) + cyclonedx_branches = list(Branch.objects.filter(id__in=cyclonedx_branch_ids)) + + cyclonedx_vulnerability_names = list( + CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx).values_list("name", flat=True) + ) + + vulnerabilities = _create_vulnerabilities( + cyclonedx.product, cyclonedx_branches, cyclonedx_vulnerability_names, cyclonedx + ) + + vulnerabilities_string = _get_vulnerabilities_string(vulnerabilities) + vulnerabilities_hash = hashlib.sha256(vulnerabilities_string.casefold().encode("utf-8").strip()).hexdigest() + if vulnerabilities_hash == cyclonedx.content_hash: + return None + + now = timezone.now() + + bom = Bom() + bom.version = cyclonedx.version + 1 + bom.serial_number = UUID(cyclonedx.document_base_id) + _add_metadata(bom, now, cyclonedx.document_id_prefix, parameters.author, parameters.manufacturer) + bom.vulnerabilities = set(vulnerabilities) + + for vulnerability in bom.vulnerabilities: + if vulnerability.analysis: + vulnerability.analysis.last_updated = now + + cyclonedx.author = parameters.author if parameters.author else "" + cyclonedx.manufacturer = parameters.manufacturer if parameters.manufacturer else "" + cyclonedx.version += 1 + cyclonedx.content_hash = vulnerabilities_hash + cyclonedx.last_updated = now + cyclonedx.save() + + return bom + + +def _add_metadata(bom: Bom, timestamp: datetime, document_id_prefix: str, author: str, manufacturer: str) -> None: + bom.metadata.tools.components.add(Component(name=f"SecObserve / {__version__}", type=ComponentType.APPLICATION)) + bom.metadata.timestamp = timestamp + bom.metadata.properties.add(Property(name="prefix", value=document_id_prefix)) + if author: + bom.metadata.authors.add(OrganizationalContact(name=author)) + if manufacturer: + bom.metadata.manufacturer = OrganizationalEntity(name=manufacturer) + + +def _create_vulnerabilities( + product: Optional[Product], branches: list[Branch], vulnerability_names: list[str], cyclonedx: CycloneDX +) -> list[Vulnerability]: + vulnerabilities: dict[str, Vulnerability] = {} + if product: + observations = get_observations_for_product(product, vulnerability_names, branches) + else: + observations = get_observations_for_vulnerabilities(vulnerability_names=vulnerability_names) + for observation in observations: + if not observation.origin_component_cyclonedx_bom_link: + raise ValidationError(f"Observation {observation.title} doesn't have a BOM-link") + + state = _map_current_status_to_cyclonedx_state(observation.current_status) + if not state: + continue + + justification = None + detail = None + observation_log = get_current_modifying_observation_log(observation) + if observation_log: + justification = _map_vex_justification_to_cyclonedx_justification(observation_log.vex_justification) + detail = observation_log.comment + + analysis = VulnerabilityAnalysis( + state=state, + justification=justification, + detail=detail, + first_issued=cyclonedx.first_issued, + last_updated=cyclonedx.last_updated, + ) + + analysis_hash = hashlib.sha256( + f"{str(vars(analysis))}_{observation.description}_{observation.recommendation}".casefold() + .encode("utf-8") + .strip() + ).hexdigest() + + vulnerability = vulnerabilities.get(f"{observation.vulnerability_id}_{analysis_hash}") + if not vulnerability: + vulnerability = Vulnerability( + bom_ref=BomRef( + value=str( + hashlib.sha256( + f"{observation.pk}_{observation.title}".casefold().encode("utf-8").strip() + ).hexdigest() + ) + ), + id=observation.vulnerability_id, + description=observation.description if observation.description else None, + recommendation=( + observation.recommendation + if observation.recommendation + and analysis.state in [ImpactAnalysisState.EXPLOITABLE, ImpactAnalysisState.IN_TRIAGE] + else None + ), + analysis=analysis, + ) + vulnerabilities[f"{observation.vulnerability_id}_{analysis_hash}"] = vulnerability + + vulnerability.affects.add(BomTarget(ref=observation.origin_component_cyclonedx_bom_link)) + + return sorted(vulnerabilities.values()) + + +def _get_vulnerabilities_string(vulnerabilities: list[Vulnerability]) -> str: + vulnerability_string = "" + for vulnerability in vulnerabilities: + vulnerability_string += str(vulnerability.id) + vulnerability_string += vulnerability.description if vulnerability.description else "" + vulnerability_string += vulnerability.recommendation if vulnerability.recommendation else "" + vulnerability_string += str(vars(vulnerability.analysis)) + for affects in vulnerability.affects: + vulnerability_string += str(vars(affects)) + + return vulnerability_string + + +def _map_current_status_to_cyclonedx_state(current_status: str) -> Optional[ImpactAnalysisState]: + mapping = { + Status.STATUS_OPEN: ImpactAnalysisState.EXPLOITABLE, + Status.STATUS_RESOLVED: ImpactAnalysisState.RESOLVED, + Status.STATUS_DUPLICATE: None, + Status.STATUS_FALSE_POSITIVE: ImpactAnalysisState.FALSE_POSITIVE, + Status.STATUS_IN_REVIEW: ImpactAnalysisState.IN_TRIAGE, + Status.STATUS_NOT_AFFECTED: ImpactAnalysisState.NOT_AFFECTED, + Status.STATUS_NOT_SECURITY: ImpactAnalysisState.NOT_AFFECTED, + Status.STATUS_RISK_ACCEPTED: ImpactAnalysisState.EXPLOITABLE, + } + return mapping.get(current_status) + + +def _map_vex_justification_to_cyclonedx_justification(justification: str) -> Optional[ImpactAnalysisJustification]: + mapping = { + VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT: ImpactAnalysisJustification.REQUIRES_DEPENDENCY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT: ImpactAnalysisJustification.CODE_NOT_PRESENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH: ImpactAnalysisJustification.CODE_NOT_REACHABLE, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY: ImpactAnalysisJustification.PROTECTED_BY_MITIGATING_CONTROL, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST: ImpactAnalysisJustification.PROTECTED_BY_MITIGATING_CONTROL, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT: ImpactAnalysisJustification.CODE_NOT_PRESENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE: ImpactAnalysisJustification.CODE_NOT_REACHABLE, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION: ImpactAnalysisJustification.REQUIRES_CONFIGURATION, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY: ImpactAnalysisJustification.REQUIRES_DEPENDENCY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT: ImpactAnalysisJustification.REQUIRES_ENVIRONMENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER: ImpactAnalysisJustification.PROTECTED_BY_COMPILER, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME: ImpactAnalysisJustification.PROTECTED_AT_RUNTIME, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER: ImpactAnalysisJustification.PROTECTED_AT_PERIMITER, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL: ImpactAnalysisJustification.PROTECTED_BY_MITIGATING_CONTROL, # noqa: E501 pylint: disable=line-too-long + } + return mapping.get(justification) diff --git a/backend/application/vex/services/cyclonedx_parser.py b/backend/application/vex/services/cyclonedx_parser.py new file mode 100644 index 000000000..b86a96235 --- /dev/null +++ b/backend/application/vex/services/cyclonedx_parser.py @@ -0,0 +1,332 @@ +from dataclasses import dataclass +from typing import Optional + +from django.utils import timezone +from rest_framework.exceptions import ValidationError + +from application.core.api.serializers_helpers import validate_purl +from application.vex.models import VEX_Document, VEX_Statement +from application.vex.services.vex_engine import apply_vex_statements_after_import +from application.vex.types import ( + CycloneDX_Analysis_State, + VEX_Document_Type, + VEX_Status, +) + + +@dataclass +class CycloneDX_Analysis: + state: str = "" + justification: str = "" + response: Optional[list[str]] = None + detail: str = "" + first_issued: str = "" + last_updated: str = "" + + +@dataclass +class VexStatementData: + vulnerability_id: str + description: str + status: str + justification: str + impact: str + remediation: str + product_purl: str + component_purl: str = "" + component_cyclonedx_bom_link: str = "" + + +def parse_cyclonedx_data(data: dict) -> None: + cyclonedx_document = _create_cyclonedx_document(data) + + product_purls, vex_statements = _process_vex_statements(data, cyclonedx_document) + + apply_vex_statements_after_import(product_purls, vex_statements) + + +def _create_cyclonedx_document(data: dict) -> VEX_Document: + document_id = data.get("serialNumber") + if not document_id: + raise ValidationError("serialNumber is missing") + + version = str(data.get("version", 1)) + + metadata = data.get("metadata", {}) + + timestamp = metadata.get("timestamp") + if not timestamp: + timestamp = timezone.now() + + author = None + # Prefer authors list if available + authors = metadata.get("authors") + if authors and isinstance(authors, list) and len(authors) > 0: + # Find the first author with a name set + author = next( + (item.get("name") for item in authors if isinstance(item, dict) and item.get("name")), + None, + ) + + # Fall back to manufacturer or supplier if no authors + if not author: + author = metadata.get("manufacturer", {}).get("name") or metadata.get("supplier", {}).get("name") + + if not author: + author = "Unknown" + + try: + cyclonedx_document = VEX_Document.objects.get(document_id=document_id, author=author) + cyclonedx_document.delete() + except VEX_Document.DoesNotExist: + pass + + cyclonedx_document = VEX_Document.objects.create( + type=VEX_Document_Type.VEX_DOCUMENT_TYPE_CYCLONEDX, + document_id=document_id, + version=version, + initial_release_date=timestamp, + current_release_date=timestamp, + author=author, + role="", + ) + + return cyclonedx_document + + +def _process_vex_statements(data: dict, cyclonedx_document: VEX_Document) -> tuple[set[str], set[VEX_Statement]]: + vulnerabilities = data.get("vulnerabilities", []) + if not vulnerabilities: + raise ValidationError("CycloneDX document doesn't contain any vulnerabilities") + if not isinstance(vulnerabilities, list): + raise ValidationError("vulnerabilities is not a list") + + components_map = _build_components_map(data) + + product_purl = data.get("metadata", {}).get("component", {}).get("purl", "") + if product_purl: + validate_purl(product_purl) + + product_purls: set[str] = set() + vex_statements: set[VEX_Statement] = set() + + vulnerability_counter = 0 + for vulnerability in vulnerabilities: + if not isinstance(vulnerability, dict): + raise ValidationError(f"vulnerability[{vulnerability_counter}] is not a dictionary") + + vulnerability_id = vulnerability.get("id") + if not vulnerability_id: + raise ValidationError(f"vulnerability[{vulnerability_counter}]/id is missing") + + analysis = vulnerability.get("analysis", {}) + if not analysis: + # Skip vulnerabilities without analysis + vulnerability_counter += 1 + continue + + cyclonedx_analysis = _parse_analysis(analysis, vulnerability_counter) + + vex_status = _map_cyclonedx_state_to_vex_status(cyclonedx_analysis.state) + if not vex_status: + raise ValidationError( + f"vulnerability[{vulnerability_counter}]/analysis/state is not valid: {cyclonedx_analysis.state}" + ) + + description = vulnerability.get("description", "") + detail = vulnerability.get("detail", "") + if detail: + description += f"\n\n{detail}" + + remediation = _build_remediation_text(cyclonedx_analysis.response, vulnerability.get("recommendation", "")) + + affects = vulnerability.get("affects", []) + if not affects: + # General statement for the product + _create_vex_statement( + cyclonedx_document, + product_purls, + vex_statements, + data=VexStatementData( + vulnerability_id=vulnerability_id, + description=description, + status=vex_status, + justification=cyclonedx_analysis.justification, + impact=cyclonedx_analysis.detail, + remediation=remediation, + product_purl=product_purl, + ), + ) + elif not isinstance(affects, list): + raise ValidationError(f"affects[{vulnerability_counter}] is not a list") + else: + _process_affected_components( + document=cyclonedx_document, + product_purls=product_purls, + vex_statements=vex_statements, + vulnerability_counter=vulnerability_counter, + vex_data=VexStatementData( + vulnerability_id=vulnerability_id, + description=description, + status=vex_status, + justification=cyclonedx_analysis.justification, + impact=cyclonedx_analysis.detail, + remediation=remediation, + product_purl=product_purl, + ), + affects=affects, + components_map=components_map, + ) + + vulnerability_counter += 1 + + return product_purls, vex_statements + + +def _build_components_map(data: dict) -> dict[str, dict]: + components_map = {} + + # Add root component from metadata + metadata_component = data.get("metadata", {}).get("component") + if metadata_component and metadata_component.get("bom-ref"): + components_map[metadata_component["bom-ref"]] = metadata_component + + # Add all components + for component in data.get("components", []): + if component.get("bom-ref"): + components_map[component["bom-ref"]] = component + + return components_map + + +def _parse_analysis(analysis: dict, vulnerability_counter: int) -> CycloneDX_Analysis: + state = analysis.get("state", "") + if not state: + raise ValidationError(f"vulnerability[{vulnerability_counter}]/analysis/state is missing") + + justification = analysis.get("justification", "") + response = analysis.get("response", []) + if not isinstance(response, list): + response = [] + + detail = analysis.get("detail", "") + first_issued = analysis.get("firstIssued", "") + last_updated = analysis.get("lastUpdated", "") + + return CycloneDX_Analysis( + state=state, + justification=justification, + response=response, + detail=detail, + first_issued=first_issued, + last_updated=last_updated, + ) + + +def _map_cyclonedx_state_to_vex_status(state: str) -> Optional[str]: + mapping = { + CycloneDX_Analysis_State.CYCLONEDX_STATE_RESOLVED: VEX_Status.VEX_STATUS_FIXED, + CycloneDX_Analysis_State.CYCLONEDX_STATE_RESOLVED_WITH_PEDIGREE: VEX_Status.VEX_STATUS_FIXED, + CycloneDX_Analysis_State.CYCLONEDX_STATE_EXPLOITABLE: VEX_Status.VEX_STATUS_AFFECTED, + CycloneDX_Analysis_State.CYCLONEDX_STATE_IN_TRIAGE: VEX_Status.VEX_STATUS_UNDER_INVESTIGATION, + CycloneDX_Analysis_State.CYCLONEDX_STATE_FALSE_POSITIVE: VEX_Status.VEX_STATUS_FALSE_POSITIVE, + CycloneDX_Analysis_State.CYCLONEDX_STATE_NOT_AFFECTED: VEX_Status.VEX_STATUS_NOT_AFFECTED, + } + return mapping.get(state) + + +def _build_remediation_text(response: Optional[list[str]], recommendation: str) -> str: + remediation_parts = [] + + if response: + response_text = ", ".join(response) + remediation_parts.append(f"Response: {response_text}") + + if recommendation: + remediation_parts.append(recommendation) + + return "; ".join(remediation_parts) + + +def _process_affected_components( + *, + document: VEX_Document, + product_purls: set[str], + vex_statements: set[VEX_Statement], + vulnerability_counter: int, + vex_data: VexStatementData, + affects: list, + components_map: dict, +) -> None: + affected_counter = 0 + for affected in affects: + if not isinstance(affected, dict): + raise ValidationError(f"affects[{vulnerability_counter}][{affected_counter}] is not a dictionary") + + ref = affected.get("ref") + if not ref: + raise ValidationError(f"affects[{vulnerability_counter}][{affected_counter}]/ref is missing") + + if ref.startswith("urn:cdx:"): + component_purl = "" + vex_data.product_purl = "" + component_cyclonedx_bom_link = ref + else: + component_cyclonedx_bom_link = "" + component = components_map.get(ref) + if not component: + raise ValidationError( + f"affects[{vulnerability_counter}][{affected_counter}]/ref '{ref}' not found in components" + ) + + component_purl = component.get("purl", "") + if not component_purl: + raise ValidationError( + f"affects[{vulnerability_counter}][{affected_counter}]/ref '{ref}' component is missing PURL" + ) + validate_purl(component_purl) + + if not vex_data.product_purl: + raise ValidationError("metadata/component/purl is missing for VEX data inside an SBOM") + + _create_vex_statement( + document, + product_purls, + vex_statements, + data=VexStatementData( + vulnerability_id=vex_data.vulnerability_id, + description=vex_data.description, + status=vex_data.status, + justification=vex_data.justification, + impact=vex_data.impact, + remediation=vex_data.remediation, + product_purl=vex_data.product_purl, + component_purl=component_purl, + component_cyclonedx_bom_link=component_cyclonedx_bom_link, + ), + ) + + affected_counter += 1 + + +def _create_vex_statement( + document: VEX_Document, + product_purls: set[str], + vex_statements: set[VEX_Statement], + data: VexStatementData, +) -> None: + vex_statement = VEX_Statement( + document=document, + vulnerability_id=data.vulnerability_id, + description=data.description, + status=data.status, + justification=data.justification, + impact=data.impact, + remediation=data.remediation, + product_purl=data.product_purl, + component_purl=data.component_purl, + component_cyclonedx_bom_link=data.component_cyclonedx_bom_link, + ) + vex_statement.save() + vex_statements.add(vex_statement) + product_purls.add(data.product_purl) diff --git a/backend/application/vex/services/openvex_generator.py b/backend/application/vex/services/openvex_generator.py index 66a8915d1..a17817942 100644 --- a/backend/application/vex/services/openvex_generator.py +++ b/backend/application/vex/services/openvex_generator.py @@ -6,9 +6,9 @@ from rest_framework.exceptions import NotFound from application.__init__ import __version__ -from application.access_control.services.authorization import user_has_permission_or_403 -from application.access_control.services.roles_permissions import Permissions -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user +from application.authorization.services.authorization import user_has_permission_or_403 +from application.authorization.services.roles_permissions import Permissions from application.core.models import Branch, Observation, Product from application.core.queries.observation import get_current_modifying_observation_log from application.core.types import Status @@ -16,8 +16,6 @@ from application.vex.queries.openvex import get_openvex_by_document_id from application.vex.services.openvex_generator_helpers import OpenVEXVulnerabilityCache from application.vex.services.vex_base import ( - check_and_get_product, - check_branch_names, check_product_or_vulnerabilities, check_vulnerability_names, create_document_base_id, @@ -26,6 +24,7 @@ get_observations_for_vulnerability, get_product_id, get_vulnerability_url, + map_vex_justification_to_csaf_openvex_justification, ) from application.vex.types import ( OpenVEX_Status, @@ -40,9 +39,9 @@ @dataclass() class OpenVEXCreateParameters: - product_id: int + product: Optional[Product] vulnerability_names: list[str] - branch_names: list[str] + branches: list[Branch] id_namespace: str document_id_prefix: str author: str @@ -60,15 +59,8 @@ class OpenVEXUpdateParameters: def create_openvex_document( parameters: OpenVEXCreateParameters, ) -> Optional[OpenVEXDocument]: - check_product_or_vulnerabilities( - parameters.product_id, parameters.vulnerability_names - ) - product = check_and_get_product(parameters.product_id) - if product: - user_has_permission_or_403(product, Permissions.VEX_Create) - + check_product_or_vulnerabilities(parameters.product, parameters.vulnerability_names) check_vulnerability_names(parameters.vulnerability_names) - branches = check_branch_names(parameters.branch_names, product) user = get_current_user() if not user: @@ -77,7 +69,7 @@ def create_openvex_document( document_base_id = create_document_base_id(parameters.document_id_prefix) openvex = OpenVEX.objects.create( - product=product, + product=parameters.product, id_namespace=parameters.id_namespace, document_id_prefix=parameters.document_id_prefix, document_base_id=document_base_id, @@ -91,12 +83,10 @@ def create_openvex_document( vulnerability_name = "" OpenVEX_Vulnerability.objects.create(openvex=openvex, name=vulnerability_name) - for branch in branches: + for branch in parameters.branches: OpenVEX_Branch.objects.create(openvex=openvex, branch=branch) - document_id = _get_document_id( - parameters.id_namespace, parameters.document_id_prefix, document_base_id - ) + document_id = _get_document_id(parameters.id_namespace, parameters.document_id_prefix, document_base_id) openvex_document = OpenVEXDocument( context="https://openvex.dev/ns/v0.2.0", id=document_id, @@ -110,9 +100,9 @@ def create_openvex_document( ) statements = [] - if product: + if parameters.product: statements = _get_statements_for_product( - product, parameters.vulnerability_names, branches + parameters.product, parameters.vulnerability_names, parameters.branches ) else: statements = _get_statements_for_vulnerabilities(parameters.vulnerability_names) @@ -122,9 +112,7 @@ def create_openvex_document( return None statements_json = jsonpickle.encode(statements, unpicklable=False) - statements_hash = hashlib.sha256( - statements_json.casefold().encode("utf-8").strip() - ).hexdigest() + statements_hash = hashlib.sha256(statements_json.casefold().encode("utf-8").strip()).hexdigest() openvex.content_hash = statements_hash openvex.save() @@ -137,9 +125,7 @@ def create_openvex_document( def update_openvex_document( parameters: OpenVEXUpdateParameters, ) -> Optional[OpenVEXDocument]: - openvex = get_openvex_by_document_id( - parameters.document_id_prefix, parameters.document_base_id - ) + openvex = get_openvex_by_document_id(parameters.document_id_prefix, parameters.document_base_id) if not openvex: raise NotFound( f"OpenVEX document with ids {parameters.document_id_prefix}" @@ -149,28 +135,20 @@ def update_openvex_document( user_has_permission_or_403(openvex, Permissions.VEX_Edit) openvex_vulnerability_names = list( - OpenVEX_Vulnerability.objects.filter(openvex=openvex).values_list( - "name", flat=True - ) + OpenVEX_Vulnerability.objects.filter(openvex=openvex).values_list("name", flat=True) ) - openvex_branch_ids = OpenVEX_Branch.objects.filter(openvex=openvex).values_list( - "branch", flat=True - ) + openvex_branch_ids = OpenVEX_Branch.objects.filter(openvex=openvex).values_list("branch", flat=True) openvex_branches = list(Branch.objects.filter(id__in=openvex_branch_ids)) statements = [] if openvex.product: - statements = _get_statements_for_product( - openvex.product, openvex_vulnerability_names, openvex_branches - ) + statements = _get_statements_for_product(openvex.product, openvex_vulnerability_names, openvex_branches) else: statements = _get_statements_for_vulnerabilities(openvex_vulnerability_names) statements_json = jsonpickle.encode(statements, unpicklable=False) - statements_hash = hashlib.sha256( - statements_json.casefold().encode("utf-8").strip() - ).hexdigest() + statements_hash = hashlib.sha256(statements_json.casefold().encode("utf-8").strip()).hexdigest() if statements_hash == openvex.content_hash: return None @@ -183,9 +161,7 @@ def update_openvex_document( openvex.content_hash = statements_hash openvex.save() - document_id = _get_document_id( - openvex.id_namespace, openvex.document_id_prefix, parameters.document_base_id - ) + document_id = _get_document_id(openvex.id_namespace, openvex.document_id_prefix, parameters.document_base_id) openvex_document = OpenVEXDocument( context="https://openvex.dev/ns/v0.2.0", id=document_id, @@ -204,9 +180,7 @@ def update_openvex_document( return openvex_document -def _get_document_id( - id_namespace: str, document_id_prefix: str, document_base_id: str -) -> str: +def _get_document_id(id_namespace: str, document_id_prefix: str, document_base_id: str) -> str: if not id_namespace.endswith("/"): id_namespace += "/" @@ -237,15 +211,11 @@ def _get_statements_for_vulnerabilities( + str(prepared_statement.action_statement) + str(prepared_statement.impact_statement) ) - hashed_string = hashlib.sha256( - string_to_hash.casefold().encode("utf-8").strip() - ).hexdigest() + hashed_string = hashlib.sha256(string_to_hash.casefold().encode("utf-8").strip()).hexdigest() existing_statement = statements.get(hashed_string) if existing_statement: - existing_product = get_openvex_product_by_id( - existing_statement.products, get_product_id(observation) - ) + existing_product = get_openvex_product_by_id(existing_statement.products, get_product_id(observation)) if existing_product: _add_subcomponent(observation, existing_product) else: @@ -275,9 +245,7 @@ def _get_statements_for_product( if not prepared_statement: continue - openvex_vulnerability = vulnerability_cache.get_vulnerability( - observation.vulnerability_id - ) + openvex_vulnerability = vulnerability_cache.get_vulnerability(observation.vulnerability_id) if not openvex_vulnerability: openvex_vulnerability = OpenVEXVulnerability( name=observation.vulnerability_id, @@ -294,21 +262,15 @@ def _get_statements_for_product( + str(prepared_statement.action_statement) + str(prepared_statement.impact_statement) ) - hashed_string = hashlib.sha256( - string_to_hash.casefold().encode("utf-8").strip() - ).hexdigest() + hashed_string = hashlib.sha256(string_to_hash.casefold().encode("utf-8").strip()).hexdigest() existing_statement = statements.get(hashed_string) if existing_statement: - existing_product = get_openvex_product_by_id( - existing_statement.products, get_product_id(observation) - ) + existing_product = get_openvex_product_by_id(existing_statement.products, get_product_id(observation)) if existing_product: _add_subcomponent(observation, existing_product) else: - raise ValueError( - f"Product {product.name} not found in existing statement" - ) + raise ValueError(f"Product {product.name} not found in existing statement") else: prepared_statement.vulnerability = openvex_vulnerability openvex_product = _create_product(observation) @@ -332,15 +294,15 @@ def _prepare_statement(observation: Observation) -> Optional[OpenVEXStatement]: if observation.recommendation: openvex_action_statement = observation.recommendation else: - openvex_action_statement = ( - "No recommendation for remediation or mitigation available" - ) + openvex_action_statement = "No recommendation for remediation or mitigation available" else: observation_log = get_current_modifying_observation_log(observation) if openvex_status == OpenVEX_Status.OPENVEX_STATUS_NOT_AFFECTED: if observation_log: if observation_log.vex_justification: - openvex_justification = observation_log.vex_justification + openvex_justification = map_vex_justification_to_csaf_openvex_justification( + observation_log.vex_justification + ) openvex_impact_statement = observation_log.comment else: openvex_impact_statement = "No impact statement available" @@ -382,22 +344,18 @@ def _map_status(secobserve_status: str) -> Optional[str]: raise ValueError(f"Invalid status {secobserve_status}") -def get_openvex_product_by_id( - openvex_products: list[OpenVEXProduct], product_id: str -) -> Optional[OpenVEXProduct]: +def get_openvex_product_by_id(openvex_products: list[OpenVEXProduct], product_id: str) -> Optional[OpenVEXProduct]: for openvex_product in openvex_products: if openvex_product.id == product_id: return openvex_product return None -def _add_subcomponent(observation: Observation, existing_product: OpenVEXProduct): +def _add_subcomponent(observation: Observation, existing_product: OpenVEXProduct) -> None: if get_component_id(observation): openvex_subcomponent = OpenVEXSubcomponent(id=get_component_id(observation)) if openvex_subcomponent not in existing_product.subcomponents: - existing_product.subcomponents.append( - OpenVEXSubcomponent(id=get_component_id(observation)) - ) + existing_product.subcomponents.append(OpenVEXSubcomponent(id=get_component_id(observation))) def _create_product(observation: Observation) -> OpenVEXProduct: @@ -406,16 +364,12 @@ def _create_product(observation: Observation) -> OpenVEXProduct: if observation.branch.purl or observation.branch.cpe23: purl = observation.branch.purl if observation.branch.purl else None cpe23 = observation.branch.cpe23 if observation.branch.cpe23 else None - openvex_product_identifiers = OpenVEXProductIdentifiers( - cpe23=cpe23, purl=purl - ) + openvex_product_identifiers = OpenVEXProductIdentifiers(cpe23=cpe23, purl=purl) else: if observation.product.purl or observation.product.cpe23: purl = observation.product.purl if observation.product.purl else None cpe23 = observation.product.cpe23 if observation.product.cpe23 else None - openvex_product_identifiers = OpenVEXProductIdentifiers( - cpe23=cpe23, purl=purl - ) + openvex_product_identifiers = OpenVEXProductIdentifiers(cpe23=cpe23, purl=purl) openvex_product = OpenVEXProduct( id=get_product_id(observation), diff --git a/backend/application/vex/services/openvex_parser.py b/backend/application/vex/services/openvex_parser.py index 1f2a29014..a254a9cda 100644 --- a/backend/application/vex/services/openvex_parser.py +++ b/backend/application/vex/services/openvex_parser.py @@ -45,9 +45,7 @@ def _create_openvex_document(data: dict) -> VEX_Document: role = data.get("role", "") try: - openvex_document = VEX_Document.objects.get( - document_id=document_id, author=author - ) + openvex_document = VEX_Document.objects.get(document_id=document_id, author=author) openvex_document.delete() except VEX_Document.DoesNotExist: pass @@ -64,9 +62,7 @@ def _create_openvex_document(data: dict) -> VEX_Document: return openvex_document -def _process_vex_statements( - data: dict, openvex_document: VEX_Document -) -> tuple[set[str], set[VEX_Statement]]: +def _process_vex_statements(data: dict, openvex_document: VEX_Document) -> tuple[set[str], set[VEX_Statement]]: statements = data.get("statements", []) if not statements: raise ValidationError("OpenVEX document doesn't contain any statements") @@ -83,14 +79,10 @@ def _process_vex_statements( openvex_statement = OpenVEX_Statement() - openvex_statement.vulnerability_id = statement.get("vulnerability", {}).get( - "name" - ) + openvex_statement.vulnerability_id = statement.get("vulnerability", {}).get("name") if not openvex_statement.vulnerability_id: raise ValidationError(f"vulnerability[{statement_counter}]/name is missing") - openvex_statement.description = statement.get("vulnerability", {}).get( - "description" - ) + openvex_statement.description = statement.get("vulnerability", {}).get("description") openvex_statement.status = statement.get("status", "") if not openvex_statement.status: raise ValidationError(f"status[{statement_counter}] is missing") @@ -98,26 +90,18 @@ def _process_vex_statements( openvex_statement.status, openvex_statement.status, ) not in VEX_Status.VEX_STATUS_CHOICES: - raise ValidationError( - f"status[{statement_counter}] is not valid: {openvex_statement.status}" - ) + raise ValidationError(f"status[{statement_counter}] is not valid: {openvex_statement.status}") openvex_statement.justification = statement.get("justification", "") openvex_statement.impact = statement.get("impact_statement", "") - if ( - openvex_statement.status == OpenVEX_Status.OPENVEX_STATUS_NOT_AFFECTED - and not (openvex_statement.justification or openvex_statement.impact) + if openvex_statement.status == OpenVEX_Status.OPENVEX_STATUS_NOT_AFFECTED and not ( + openvex_statement.justification or openvex_statement.impact ): raise ValidationError( f"justification or impact_statement is required for status[{statement_counter}] not affected" ) openvex_statement.remediation = statement.get("action_statement", "") - if ( - openvex_statement.status == OpenVEX_Status.OPENVEX_STATUS_AFFECTED - and not openvex_statement.remediation - ): - raise ValidationError( - f"action_statement is required for status[{statement_counter}] affected" - ) + if openvex_statement.status == OpenVEX_Status.OPENVEX_STATUS_AFFECTED and not openvex_statement.remediation: + raise ValidationError(f"action_statement is required for status[{statement_counter}] affected") _process_products( openvex_document=openvex_document, @@ -141,21 +125,17 @@ def _process_products( statement_counter: int, statement: dict, openvex_statement: OpenVEX_Statement, -): +) -> None: products = statement.get("products", []) if not products: - raise ValidationError( - f"statement[{statement_counter}] doesn't contain any products" - ) + raise ValidationError(f"statement[{statement_counter}] doesn't contain any products") if not isinstance(products, list): raise ValidationError(f"products[{statement_counter}] is not a list") product_counter = 0 for product in products: if not isinstance(product, dict): - raise ValidationError( - f"product[{statement_counter}][{product_counter}] is not a dictionary" - ) + raise ValidationError(f"product[{statement_counter}][{product_counter}] is not a dictionary") product_purl = product.get("identifiers", {}).get("purl", "") if product_purl: validate_purl(product_purl) @@ -179,9 +159,7 @@ def _process_products( vex_statements.add(vex_statement) product_purls.add(product_purl) elif not isinstance(components, list): - raise ValidationError( - f"subcomponents[{statement_counter}][{product_counter}] is not a list" - ) + raise ValidationError(f"subcomponents[{statement_counter}][{product_counter}] is not a list") component_counter = 0 for component in components: diff --git a/backend/application/vex/services/vex_base.py b/backend/application/vex/services/vex_base.py index bb8cad84a..e004aa5b0 100644 --- a/backend/application/vex/services/vex_base.py +++ b/backend/application/vex/services/vex_base.py @@ -6,21 +6,20 @@ from application.core.models import Branch, Observation, Product from application.core.queries.observation import get_observations from application.core.queries.product import get_product_by_id +from application.core.types import VEX_Justification from application.vex.models import VEX_Counter def create_document_base_id(document_id_prefix: str) -> str: year = timezone.now().year - counter = VEX_Counter.objects.get_or_create( - document_id_prefix=document_id_prefix, year=year - )[0] + counter = VEX_Counter.objects.get_or_create(document_id_prefix=document_id_prefix, year=year)[0] counter.counter += 1 counter.save() return f"{counter.year}_{counter.counter:04d}" -def check_product_or_vulnerabilities(product_id, vulnerability_names): - if not product_id and not vulnerability_names: +def check_product_or_vulnerabilities(product: Optional[Product], vulnerability_names: list[str]) -> None: + if not product and not vulnerability_names: raise ValidationError("Either product or vulnerabilities or both must be set") @@ -41,14 +40,24 @@ def check_vulnerability_names(vulnerability_names: list[str]) -> None: for vulnerability_name in vulnerability_names: if not Observation.objects.filter(vulnerability_id=vulnerability_name).exists(): - raise ValidationError( - f"Vulnerability with name {vulnerability_name} does not exist" - ) + raise ValidationError(f"Vulnerability with name {vulnerability_name} does not exist") -def check_branch_names( - branch_names: list[str], product: Optional[Product] -) -> list[Branch]: +def check_branches(branches: list[int], product: Optional[Product]) -> list[Branch]: + if not branches: + return [] + + if not product: + raise ValidationError("Product must be set when using branch_names") + + product_branches = Branch.objects.filter(id__in=branches, product=product) + if len(product_branches) != len(branches): + raise ValidationError(f"Some of the branches do not exist for product {product.name}") + + return list(product_branches) + + +def check_branch_names(branch_names: list[str], product: Optional[Product]) -> list[Branch]: if not branch_names: return [] @@ -65,20 +74,19 @@ def check_branch_names( def get_observations_for_vulnerability( vulnerability_name: str, ) -> list[Observation]: - return list( - get_observations().filter(vulnerability_id=vulnerability_name).order_by("id") - ) + return list(get_observations().filter(vulnerability_id=vulnerability_name).order_by("id")) + + +def get_observations_for_vulnerabilities( + vulnerability_names: list[str], +) -> list[Observation]: + return list(get_observations().filter(vulnerability_id__in=vulnerability_names).order_by("id")) def get_observations_for_product( product: Product, vulnerability_names: list[str], branches: list[Branch] ) -> list[Observation]: - observations = ( - get_observations() - .filter(product_id=product.pk) - .exclude(vulnerability_id="") - .order_by("id") - ) + observations = get_observations().filter(product_id=product.pk).exclude(vulnerability_id="").order_by("id") if vulnerability_names: observations = observations.filter(vulnerability_id__in=vulnerability_names) @@ -129,3 +137,23 @@ def get_vulnerability_url(vulnerability_name: str) -> Optional[str]: if vulnerability_name.startswith(key): return value + vulnerability_name return None + + +def map_vex_justification_to_csaf_openvex_justification(justification: str) -> str: + mapping = { + VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT: VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST: VEX_Justification.JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_PRESENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY: VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER: VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, # noqa: E501 pylint: disable=line-too-long + VEX_Justification.JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL: VEX_Justification.JUSTIFICATION_INLINE_MITIGATIONS_ALREADY_EXIST, # noqa: E501 pylint: disable=line-too-long + } + return mapping.get(justification, "") diff --git a/backend/application/vex/services/vex_engine.py b/backend/application/vex/services/vex_engine.py index da9c010dd..6607b3646 100644 --- a/backend/application/vex/services/vex_engine.py +++ b/backend/application/vex/services/vex_engine.py @@ -2,7 +2,7 @@ from packageurl import PackageURL -from application.commons.services.global_request import get_current_user +from application.access_control.services.current_user import get_current_user from application.core.models import Branch, Observation, Product from application.core.services.observation import ( get_current_status, @@ -35,33 +35,36 @@ def __init__(self, product: Product, branch: Optional[Branch]): except ValueError: return - search_purl = PackageURL( - type=purl.type, namespace=purl.namespace, name=purl.name - ).to_string() + search_purl = PackageURL(type=purl.type, namespace=purl.namespace, name=purl.name).to_string() - self.vex_statements = list( - VEX_Statement.objects.filter(product_purl__startswith=search_purl) - ) + self.vex_statements = list(VEX_Statement.objects.filter(product_purl__startswith=search_purl)) def apply_vex_statements_for_observation(self, observation: Observation) -> None: - previous_vex_statement = ( - observation.vex_statement if observation.vex_statement else None - ) + previous_vex_statement = observation.vex_statement if observation.vex_statement else None observation.vex_statement = None vex_statement_found = False - for vex_statement in self.vex_statements: - vex_statement_found = apply_vex_statement_for_observation( - vex_statement, observation, previous_vex_statement + if observation.origin_component_cyclonedx_bom_link: + bom_vex_statements = VEX_Statement.objects.filter( + component_cyclonedx_bom_link=observation.origin_component_cyclonedx_bom_link ) - if vex_statement_found: - break + for bom_vex_statement in bom_vex_statements: + vex_statement_found = apply_vex_statement_for_observation( + bom_vex_statement, observation, previous_vex_statement + ) + if vex_statement_found: + break + + if not vex_statement_found: + for vex_statement in self.vex_statements: + vex_statement_found = apply_vex_statement_for_observation( + vex_statement, observation, previous_vex_statement + ) + if vex_statement_found: + break # Write observation and observation log if no vex_statement was found but there was one before - if not vex_statement_found and ( - previous_vex_statement != observation.vex_statement - or observation.vex_status - ): + if not vex_statement_found and (previous_vex_statement != observation.vex_statement or observation.vex_status): write_observation_log_no_vex_statement(observation, previous_vex_statement) @@ -70,8 +73,9 @@ def apply_vex_statement_for_observation( observation: Observation, previous_vex_statement: Optional[VEX_Statement], ) -> bool: - if vex_statement.vulnerability_id == observation.vulnerability_id and _match_purls( - vex_statement, observation + if vex_statement.vulnerability_id == observation.vulnerability_id and ( + (vex_statement.component_purl and _match_purls(vex_statement, observation)) + or (vex_statement.component_cyclonedx_bom_link and _match_cyclonedx_bom_links(vex_statement, observation)) ): previous_current_status = observation.current_status previous_vex_status = observation.vex_status @@ -81,9 +85,7 @@ def apply_vex_statement_for_observation( previous_current_vex_justification = observation.current_vex_justification previous_vex_vex_justification = observation.vex_vex_justification observation.vex_vex_justification = vex_statement.justification - observation.current_vex_justification = get_current_vex_justification( - observation - ) + observation.current_vex_justification = get_current_vex_justification(observation) observation.vex_statement = vex_statement @@ -94,12 +96,11 @@ def apply_vex_statement_for_observation( or previous_vex_status != observation.vex_status or previous_vex_statement != observation.vex_statement or previous_vex_vex_justification != observation.vex_vex_justification - or previous_current_vex_justification - != observation.current_vex_justification + or previous_current_vex_justification != observation.current_vex_justification ) - and not previous_vex_status == Status.STATUS_OPEN - and not observation.vex_status == Status.STATUS_OPEN - and not observation.current_status == Status.STATUS_OPEN + and previous_vex_status != Status.STATUS_OPEN + and observation.vex_status != Status.STATUS_OPEN + and observation.current_status != Status.STATUS_OPEN ): _write_observation_log( observation, @@ -115,9 +116,7 @@ def apply_vex_statement_for_observation( def _match_purls(vex_statement: VEX_Statement, observation: Observation) -> bool: product_purl = ( - observation.branch.purl - if observation.branch and observation.branch.purl - else observation.product.purl + observation.branch.purl if observation.branch and observation.branch.purl else observation.product.purl ) if not _match_purl(vex_statement.product_purl, product_purl): return False @@ -126,9 +125,7 @@ def _match_purls(vex_statement: VEX_Statement, observation: Observation) -> bool return _match_purl(vex_statement.component_purl, observation.origin_component_purl) -def _match_purl( - vex_purl_str: Optional[str], observation_purl_str: Optional[str] -) -> bool: +def _match_purl(vex_purl_str: Optional[str], observation_purl_str: Optional[str]) -> bool: if not vex_purl_str and not observation_purl_str: return True @@ -145,16 +142,8 @@ def _match_purl( vex_purl.type != observation_purl.type or vex_purl.namespace != observation_purl.namespace or vex_purl.name != observation_purl.name - or ( - vex_purl.version - and observation_purl.version - and vex_purl.version != observation_purl.version - ) - or ( - vex_purl.subpath - and observation_purl.subpath - and vex_purl.subpath != observation_purl.subpath - ) + or (vex_purl.version and observation_purl.version and vex_purl.version != observation_purl.version) + or (vex_purl.subpath and observation_purl.subpath and vex_purl.subpath != observation_purl.subpath) or not _check_qualifiers(vex_purl.qualifiers, observation_purl.qualifiers) ): return False @@ -162,9 +151,14 @@ def _match_purl( return True -def _check_qualifiers( - vex_qualifiers: Optional[str | dict], observation_qualifiers: Optional[str | dict] -) -> bool: +def _match_cyclonedx_bom_links(vex_statement: VEX_Statement, observation: Observation) -> bool: + if not vex_statement.component_cyclonedx_bom_link or not observation.origin_component_cyclonedx_bom_link: + return False + + return vex_statement.component_cyclonedx_bom_link == observation.origin_component_cyclonedx_bom_link + + +def _check_qualifiers(vex_qualifiers: Optional[str | dict], observation_qualifiers: Optional[str | dict]) -> bool: if not vex_qualifiers and not observation_qualifiers: return True @@ -176,10 +170,7 @@ def _check_qualifiers( if isinstance(vex_qualifiers, dict) and isinstance(observation_qualifiers, dict): for key, value in vex_qualifiers.items(): - if ( - observation_qualifiers.get(key) is not None - and observation_qualifiers.get(key) != value - ): + if observation_qualifiers.get(key) is not None and observation_qualifiers.get(key) != value: return False for key, value in observation_qualifiers.items(): if vex_qualifiers.get(key) is not None and vex_qualifiers.get(key) != value: @@ -196,6 +187,8 @@ def _get_secobserve_status(vex_status: str) -> str: return Status.STATUS_RESOLVED if vex_status == VEX_Status.VEX_STATUS_UNDER_INVESTIGATION: return Status.STATUS_IN_REVIEW + if vex_status == VEX_Status.VEX_STATUS_FALSE_POSITIVE: + return Status.STATUS_FALSE_POSITIVE return Status.STATUS_OPEN @@ -221,9 +214,7 @@ def _write_observation_log( comment = f"{comment}\n\n{vex_statement.impact}" risk_acceptance_expiry_date = ( - calculate_risk_acceptance_expiry_date(observation.product) - if status == Status.STATUS_RISK_ACCEPTED - else None + calculate_risk_acceptance_expiry_date(observation.product) if status == Status.STATUS_RISK_ACCEPTED else None ) create_observation_log( @@ -249,11 +240,7 @@ def write_observation_log_no_vex_statement( previous_vex_justification = observation.current_vex_justification observation.current_vex_justification = get_current_vex_justification(observation) - log_status = ( - observation.current_status - if previous_status != observation.current_status - else "" - ) + log_status = observation.current_status if previous_status != observation.current_status else "" log_vex_justification = ( observation.current_vex_justification if previous_vex_justification != observation.current_vex_justification @@ -261,9 +248,7 @@ def write_observation_log_no_vex_statement( ) if previous_vex_statement: - log_comment = ( - f"Removed VEX statement from {previous_vex_statement.document.document_id}" - ) + log_comment = f"Removed VEX statement from {previous_vex_statement.document.document_id}" else: log_comment = "Removed unknown VEX statement" @@ -284,18 +269,15 @@ def write_observation_log_no_vex_statement( ) -def apply_vex_statements_after_import( - product_purls: set[str], vex_statements: set[VEX_Statement] -) -> None: +def apply_vex_statements_after_import(product_purls: set[str], vex_statements: set[VEX_Statement]) -> None: + # Alternative 1, apply VEX statements with PURLs for product_purl in product_purls: try: purl = PackageURL.from_string(product_purl) except ValueError: continue - search_purl = PackageURL( - type=purl.type, namespace=purl.namespace, name=purl.name - ).to_string() + search_purl = PackageURL(type=purl.type, namespace=purl.namespace, name=purl.name).to_string() products = set(Product.objects.filter(purl__startswith=search_purl)) branches = Branch.objects.filter(purl__startswith=search_purl) @@ -306,6 +288,18 @@ def apply_vex_statements_after_import( observations = Observation.objects.filter(product=product) for observation in observations: for vex_statement in vex_statements: - apply_vex_statement_for_observation( - vex_statement, observation, observation.vex_statement - ) + apply_vex_statement_for_observation(vex_statement, observation, observation.vex_statement) + + # Alternative 2, apply VEX statements with BOM-Links + bom_links: dict[str, list[VEX_Statement]] = {} + for vex_statement in vex_statements: + if vex_statement.component_cyclonedx_bom_link: + if bom_links.get(vex_statement.component_cyclonedx_bom_link): + bom_links.get(vex_statement.component_cyclonedx_bom_link, []).append(vex_statement) + else: + bom_links[vex_statement.component_cyclonedx_bom_link] = [vex_statement] + + observations = Observation.objects.filter(origin_component_cyclonedx_bom_link__in=bom_links) + for observation in observations: + for vex_statement in bom_links.get(observation.origin_component_cyclonedx_bom_link, []): + apply_vex_statement_for_observation(vex_statement, observation, observation.vex_statement) diff --git a/backend/application/vex/services/vex_import.py b/backend/application/vex/services/vex_import.py index e52fc56a5..584342500 100644 --- a/backend/application/vex/services/vex_import.py +++ b/backend/application/vex/services/vex_import.py @@ -5,6 +5,7 @@ from rest_framework.exceptions import ValidationError from application.vex.services.csaf_parser import parse_csaf_data +from application.vex.services.cyclonedx_parser import parse_cyclonedx_data from application.vex.services.openvex_parser import parse_openvex_data from application.vex.types import VEX_Document_Type @@ -22,6 +23,8 @@ def import_vex(vex_file: File) -> None: parse_openvex_data(data) elif vex_type == VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF: parse_csaf_data(data) + elif vex_type == VEX_Document_Type.VEX_DOCUMENT_TYPE_CYCLONEDX: + parse_cyclonedx_data(data) def _get_json_data(vex_file: File) -> Optional[dict]: @@ -37,10 +40,10 @@ def _get_vex_type(data: dict) -> Optional[str]: if data.get("@context", "").startswith("https://openvex.dev/ns/v0.2.0"): return VEX_Document_Type.VEX_DOCUMENT_TYPE_OPENVEX - if ( - data.get("document", {}).get("category") == "csaf_vex" - and data.get("document", {}).get("csaf_version") == "2.0" - ): + if data.get("document", {}).get("category") == "csaf_vex" and data.get("document", {}).get("csaf_version") == "2.0": return VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF + if data.get("bomFormat") == "CycloneDX": + return VEX_Document_Type.VEX_DOCUMENT_TYPE_CYCLONEDX + return None diff --git a/backend/application/vex/signals.py b/backend/application/vex/signals.py index 821969a8e..aaac31f9d 100644 --- a/backend/application/vex/signals.py +++ b/backend/application/vex/signals.py @@ -1,3 +1,5 @@ +from typing import Any + from django.db.models.signals import pre_delete from django.dispatch import receiver @@ -8,7 +10,7 @@ @receiver(pre_delete, sender=VEX_Statement) def vex_statement_pre_delete( # pylint: disable=unused-argument - sender, instance: VEX_Statement, **kwargs + sender: Any, instance: VEX_Statement, **kwargs: Any ) -> None: # sender is needed according to Django documentation observations = Observation.objects.filter(vex_statement=instance) diff --git a/backend/application/vex/types.py b/backend/application/vex/types.py index cf813fe6d..3bb9aa7c4 100644 --- a/backend/application/vex/types.py +++ b/backend/application/vex/types.py @@ -5,37 +5,12 @@ class VEX_Document_Type: VEX_DOCUMENT_TYPE_CSAF = "CSAF" VEX_DOCUMENT_TYPE_OPENVEX = "OpenVEX" + VEX_DOCUMENT_TYPE_CYCLONEDX = "CycloneDX" VEX_DOCUMENT_TYPE_CHOICES = [ (VEX_DOCUMENT_TYPE_CSAF, VEX_DOCUMENT_TYPE_CSAF), (VEX_DOCUMENT_TYPE_OPENVEX, VEX_DOCUMENT_TYPE_OPENVEX), - ] - - -class VEX_Justification: - STATUS_COMPONENT_NOT_PRESENT = "component_not_present" - STATUS_VULNERABLE_CODE_NOT_PRESENT = "vulnerable_code_not_present" - STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY = ( - "vulnerable_code_cannot_be_controlled_by_adversary" - ) - STATUS_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH = "vulnerable_code_not_in_execute_path" - STATUS_INLINE_MITIGATIONS_ALREADY_EXIST = "inline_mitigations_already_exist" - - VEX_JUSTIFICATION_CHOICES = [ - (STATUS_COMPONENT_NOT_PRESENT, STATUS_COMPONENT_NOT_PRESENT), - (STATUS_VULNERABLE_CODE_NOT_PRESENT, STATUS_VULNERABLE_CODE_NOT_PRESENT), - ( - STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, - STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, - ), - ( - STATUS_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, - STATUS_VULNERABLE_CODE_NOT_IN_EXECUTE_PATH, - ), - ( - STATUS_INLINE_MITIGATIONS_ALREADY_EXIST, - STATUS_INLINE_MITIGATIONS_ALREADY_EXIST, - ), + (VEX_DOCUMENT_TYPE_CYCLONEDX, VEX_DOCUMENT_TYPE_CYCLONEDX), ] @@ -277,6 +252,27 @@ class OpenVEX_Status: OPENVEX_STATUS_UNDER_INVESTIGATION = "under_investigation" +class CycloneDX_Analysis_State: + CYCLONEDX_STATE_RESOLVED = "resolved" + CYCLONEDX_STATE_RESOLVED_WITH_PEDIGREE = "resolved_with_pedigree" + CYCLONEDX_STATE_EXPLOITABLE = "exploitable" + CYCLONEDX_STATE_IN_TRIAGE = "in_triage" + CYCLONEDX_STATE_FALSE_POSITIVE = "false_positive" + CYCLONEDX_STATE_NOT_AFFECTED = "not_affected" + + +class CycloneDX_Analysis_Justification: + CYCLONEDX_JUSTIFICATION_CODE_NOT_PRESENT = "code_not_present" + CYCLONEDX_JUSTIFICATION_CODE_NOT_REACHABLE = "code_not_reachable" + CYCLONEDX_JUSTIFICATION_REQUIRES_CONFIGURATION = "requires_configuration" + CYCLONEDX_JUSTIFICATION_REQUIRES_DEPENDENCY = "requires_dependency" + CYCLONEDX_JUSTIFICATION_REQUIRES_ENVIRONMENT = "requires_environment" + CYCLONEDX_JUSTIFICATION_PROTECTED_BY_COMPILER = "protected_by_compiler" + CYCLONEDX_JUSTIFICATION_PROTECTED_AT_RUNTIME = "protected_at_runtime" + CYCLONEDX_JUSTIFICATION_PROTECTED_AT_PERIMETER = "protected_at_perimeter" + CYCLONEDX_JUSTIFICATION_PROTECTED_BY_MITIGATING_CONTROL = "protected_by_mitigating_control" + + @dataclass(frozen=True) class OpenVEXSubcomponent: id: str @@ -336,12 +332,14 @@ class VEX_Status: VEX_STATUS_AFFECTED = "affected" VEX_STATUS_FIXED = "fixed" VEX_STATUS_UNDER_INVESTIGATION = "under_investigation" + VEX_STATUS_FALSE_POSITIVE = "false_positive" VEX_STATUS_LIST = [ VEX_STATUS_NOT_AFFECTED, VEX_STATUS_AFFECTED, VEX_STATUS_FIXED, VEX_STATUS_UNDER_INVESTIGATION, + VEX_STATUS_FALSE_POSITIVE, ] VEX_STATUS_CHOICES = [ @@ -349,4 +347,5 @@ class VEX_Status: (VEX_STATUS_AFFECTED, VEX_STATUS_AFFECTED), (VEX_STATUS_FIXED, VEX_STATUS_FIXED), (VEX_STATUS_UNDER_INVESTIGATION, VEX_STATUS_UNDER_INVESTIGATION), + (VEX_STATUS_FALSE_POSITIVE, VEX_STATUS_FALSE_POSITIVE), ] diff --git a/backend/config/api_router.py b/backend/config/api_router.py index b49c99fcd..e5de570a1 100644 --- a/backend/config/api_router.py +++ b/backend/config/api_router.py @@ -4,24 +4,27 @@ ApiTokenViewSet, AuthorizationGroupMemberViewSet, AuthorizationGroupViewSet, - ProductApiTokenViewset, UserViewSet, ) -from application.commons.api.views import NotificationViewSet +from application.background_tasks.api.views import PeriodicTaskViewSet from application.core.api.views import ( BranchNameViewSet, BranchViewSet, + ComponentNameViewSet, + ComponentViewSet, EvidenceViewSet, ObservationLogViewSet, ObservationTitleViewSet, ObservationViewSet, PotentialDuplicateViewSet, + ProductApiTokenViewset, ProductAuthorizationGroupMemberViewSet, ProductGroupNameViewSet, ProductGroupViewSet, ProductMemberViewSet, ProductNameViewSet, ProductViewSet, + ServiceNameViewSet, ServiceViewSet, ) from application.import_observations.api.views import ( @@ -30,6 +33,7 @@ VulnerabilityCheckViewSet, ) from application.licenses.api.views import ( + ConcludedLicenseViewSet, LicenseComponentEvidenceViewSet, LicenseComponentIdViewSet, LicenseComponentViewSet, @@ -42,11 +46,15 @@ LicensePolicyViewSet, LicenseViewSet, ) +from application.notifications.api.views import NotificationViewSet from application.rules.api.views import GeneralRuleViewSet, ProductRuleViewSet from application.vex.api.views import ( CSAFBranchViewSet, CSAFViewSet, CSAFVulnerabilityViewSet, + CycloneDXBranchViewSet, + CycloneDXViewSet, + CycloneDXVulnerabilityViewSet, OpenVEXBranchViewSet, OpenVEXViewSet, OpenVEXVulnerabilityViewSet, @@ -58,24 +66,18 @@ router = SimpleRouter() router.register("users", UserViewSet, basename="users") -router.register( - "authorization_groups", AuthorizationGroupViewSet, basename="authorization_groups" -) +router.register("authorization_groups", AuthorizationGroupViewSet, basename="authorization_groups") router.register( "authorization_group_members", AuthorizationGroupMemberViewSet, basename="authorization_group_members", ) router.register("api_tokens", ApiTokenViewSet, basename="api_tokens") -router.register( - "product_api_tokens", ProductApiTokenViewset, basename="product_api_tokens" -) +router.register("product_api_tokens", ProductApiTokenViewset, basename="product_api_tokens") router.register("products", ProductViewSet, basename="products") router.register("product_names", ProductNameViewSet, basename="product_names") router.register("product_groups", ProductGroupViewSet, basename="product_groups") -router.register( - "product_group_names", ProductGroupNameViewSet, basename="product_group_names" -) +router.register("product_group_names", ProductGroupNameViewSet, basename="product_group_names") router.register("product_members", ProductMemberViewSet, basename="product_members") router.register( "product_authorization_group_members", @@ -85,25 +87,20 @@ router.register("branches", BranchViewSet, basename="branches") router.register("branch_names", BranchNameViewSet, basename="branch_names") router.register("services", ServiceViewSet, basename="services") +router.register("service_names", ServiceNameViewSet, basename="service_names") router.register("parsers", ParserViewSet, basename="parsers") router.register("observations", ObservationViewSet, basename="observations") -router.register( - "observation_titles", ObservationTitleViewSet, basename="observation_titles" -) +router.register("observation_titles", ObservationTitleViewSet, basename="observation_titles") router.register("observation_logs", ObservationLogViewSet, basename="observation_logs") +router.register("components", ComponentViewSet, basename="components") +router.register("component_names", ComponentNameViewSet, basename="component_names") router.register("general_rules", GeneralRuleViewSet, basename="general_rules") -router.register( - "api_configurations", ApiConfigurationViewSet, basename="api_configurations" -) +router.register("api_configurations", ApiConfigurationViewSet, basename="api_configurations") router.register("product_rules", ProductRuleViewSet, basename="product_rules") router.register("evidences", EvidenceViewSet, basename="evidences") router.register("notifications", NotificationViewSet, basename="notifications") -router.register( - "vulnerability_checks", VulnerabilityCheckViewSet, basename="vulnerability_checks" -) -router.register( - "potential_duplicates", PotentialDuplicateViewSet, basename="potential_duplicates" -) +router.register("vulnerability_checks", VulnerabilityCheckViewSet, basename="vulnerability_checks") +router.register("potential_duplicates", PotentialDuplicateViewSet, basename="potential_duplicates") router.register("vex/csaf", CSAFViewSet, basename="csaf") router.register( "vex/csaf_vulnerabilities", @@ -117,19 +114,21 @@ OpenVEXVulnerabilityViewSet, basename="openvex_vulnerabilities", ) +router.register("vex/openvex_branches", OpenVEXBranchViewSet, basename="openvex_branches") +router.register("vex/cyclonedx", CycloneDXViewSet, basename="cyclonedx") router.register( - "vex/openvex_branches", OpenVEXBranchViewSet, basename="openvex_branches" + "vex/cyclonedx_vulnerabilities", + CycloneDXVulnerabilityViewSet, + basename="cyclonedx_vulnerabilities", ) +router.register("vex/cyclonedx_branches", CycloneDXBranchViewSet, basename="cyclonedx_branches") router.register("vex/vex_counters", VEXCounterViewSet, basename="vex_counters") router.register("vex/vex_documents", VEXDocumentViewSet, basename="vex_documents") router.register("vex/vex_statements", VEXStatementViewSet, basename="vex_statements") -router.register( - "license_components", LicenseComponentViewSet, basename="license_components" -) -router.register( - "license_component_ids", LicenseComponentIdViewSet, basename="license_component_ids" -) +router.register("concluded_licenses", ConcludedLicenseViewSet, basename="concluded_licenses") +router.register("license_components", LicenseComponentViewSet, basename="license_components") +router.register("license_component_ids", LicenseComponentIdViewSet, basename="license_component_ids") router.register( "license_component_evidences", LicenseComponentEvidenceViewSet, @@ -137,18 +136,14 @@ ) router.register("licenses", LicenseViewSet, basename="licenses") router.register("license_groups", LicenseGroupViewSet, basename="license_groups") -router.register( - "license_group_members", LicenseGroupMemberViewSet, basename="license_group_members" -) +router.register("license_group_members", LicenseGroupMemberViewSet, basename="license_group_members") router.register( "license_group_authorization_group_members", LicenseGroupAuthorizationGroupMemberViewSet, basename="license_group_authorization_group_members", ) router.register("license_policies", LicensePolicyViewSet, basename="license_policies") -router.register( - "license_policy_items", LicensePolicyItemViewSet, basename="license_policy_items" -) +router.register("license_policy_items", LicensePolicyItemViewSet, basename="license_policy_items") router.register( "license_policy_members", LicensePolicyMemberViewSet, @@ -159,6 +154,6 @@ LicensePolicyAuthorizationGroupMemberViewSet, basename="license_policy_authorization_group_members", ) - +router.register("periodic_tasks", PeriodicTaskViewSet, basename="periodic_tasks") app_name = "api" urlpatterns = router.urls diff --git a/backend/config/schema.py b/backend/config/schema.py index d80bfdea8..43d1127c7 100644 --- a/backend/config/schema.py +++ b/backend/config/schema.py @@ -1,4 +1,5 @@ from drf_spectacular.extensions import OpenApiAuthenticationExtension +from drf_spectacular.openapi import AutoSchema from drf_spectacular.plumbing import build_bearer_security_scheme_object from application.access_control.services.api_token_authentication import ( @@ -11,7 +12,7 @@ class APITokenAuthenticationScheme(OpenApiAuthenticationExtension): target_class = "application.access_control.services.api_token_authentication.APITokenAuthentication" name = "API token authentication" - def get_security_definition(self, auto_schema): + def get_security_definition(self, auto_schema: AutoSchema) -> dict | list[dict]: return build_bearer_security_scheme_object( header_name="AUTHORIZATION", token_prefix=API_TOKEN_PREFIX, @@ -19,12 +20,10 @@ def get_security_definition(self, auto_schema): class JWTAuthenticationScheme(OpenApiAuthenticationExtension): - target_class = ( - "application.access_control.services.jwt_authentication.JWTAuthentication" - ) + target_class = "application.access_control.services.jwt_authentication.JWTAuthentication" name = "JWT authentication" - def get_security_definition(self, auto_schema): + def get_security_definition(self, auto_schema: AutoSchema) -> dict | list[dict]: return build_bearer_security_scheme_object( header_name="AUTHORIZATION", token_prefix=JWT_PREFIX, @@ -35,7 +34,7 @@ class AdfsAccessTokenAuthenticationScheme(OpenApiAuthenticationExtension): target_class = "django_auth_adfs.rest_framework.AdfsAccessTokenAuthentication" name = "OAauth2 authentication" - def get_security_definition(self, auto_schema): + def get_security_definition(self, auto_schema: AutoSchema) -> dict | list[dict]: return build_bearer_security_scheme_object( # nosec hardcoded_password_funcarg header_name="AUTHORIZATION", token_prefix="Bearer", bearer_format="JWT" ) diff --git a/backend/config/settings/base.py b/backend/config/settings/base.py index ea01d08ae..63c91e11b 100644 --- a/backend/config/settings/base.py +++ b/backend/config/settings/base.py @@ -3,8 +3,10 @@ """ from pathlib import Path +from socket import gethostbyname, gethostname import environ +from csp.constants import NONE, SELF from application.__init__ import __version__ @@ -22,10 +24,11 @@ # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = env.bool("DJANGO_DEBUG", False) + # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key SECRET_KEY = env("DJANGO_SECRET_KEY") # https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts -ALLOWED_HOSTS = ["localhost", "127.0.0.1"] + [ +ALLOWED_HOSTS = ["localhost", "127.0.0.1", gethostbyname(gethostname())] + [ x.strip() for x in env("ALLOWED_HOSTS").split(",") ] @@ -83,13 +86,9 @@ if env("DATABASE_ENGINE") == "django.db.backends.mysql": DATABASES["default"]["OPTIONS"] = {"charset": "utf8mb4"} if env("MYSQL_AZURE", default="false") == "single": - DATABASES["default"]["OPTIONS"]["ssl"] = { - "ca": "/app/BaltimoreCyberTrustRoot_combined.crt.pem" - } + DATABASES["default"]["OPTIONS"]["ssl"] = {"ca": "/app/BaltimoreCyberTrustRoot_combined.crt.pem"} if env("MYSQL_AZURE", default="false") == "flexible": - DATABASES["default"]["OPTIONS"]["ssl"] = { - "ca": "/app/DigiCertGlobalRootCA.crt.pem" - } + DATABASES["default"]["OPTIONS"]["ssl"] = {"ca": "/app/combined-ca-certificates.pem"} # https://docs.djangoproject.com/en/stable/ref/settings/#std:setting-DEFAULT_AUTO_FIELD DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" @@ -122,6 +121,7 @@ LOCAL_APPS = [ "application.access_control", + "application.background_tasks", "application.commons", "application.constance", "application.core", @@ -130,6 +130,7 @@ "application.issue_tracker", "application.licenses", "application.metrics", + "application.notifications", "application.rules", "application.vex", ] @@ -160,9 +161,7 @@ ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" - }, + {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, @@ -186,6 +185,7 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", "csp.middleware.CSPMiddleware", "application.commons.services.global_request.GlobalRequestMiddleware", + "application.commons.services.request_cache.RequestCacheMiddleware", "application.commons.services.security_headers.SecurityHeadersMiddleware", ] @@ -204,7 +204,7 @@ ] -def whitenoise_security_headers(headers, path, url): +def whitenoise_security_headers(headers: dict, path: str, url: str) -> None: headers["Permissions-Policy"] = "geolocation=() camera=(), microphone=()" @@ -274,11 +274,15 @@ def whitenoise_security_headers(headers, path, url): # https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options X_FRAME_OPTIONS = "DENY" # https://django-csp.readthedocs.io/en/latest/configuration.html -CSP_SCRIPT_SRC = ("'self'",) -CSP_OBJECT_SRC = ("'none'",) -CSP_BASE_URI = ("'none'",) -CSP_FRAME_ANCESTORS = ("'self'",) -CSP_FORM_ACTION = ("'self'",) +CONTENT_SECURITY_POLICY = { + "DIRECTIVES": { + "script-src": [SELF], + "object-src": [NONE], + "frame-ancestors": [SELF], + "form-action": [SELF], + "base-uri": [NONE], + }, +} # https://docs.djangoproject.com/en/dev/ref/middleware/#http-strict-transport-security SECURE_HSTS_SECONDS = 31536000 SECURE_HSTS_PRELOAD = True @@ -290,9 +294,7 @@ def whitenoise_security_headers(headers, path, url): # EMAIL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend -EMAIL_BACKEND = env( - "EMAIL_BACKEND", default="django.core.mail.backends.smtp.EmailBackend" -) +EMAIL_BACKEND = env("EMAIL_BACKEND", default="django.core.mail.backends.smtp.EmailBackend") # https://docs.djangoproject.com/en/dev/ref/settings/#email-host EMAIL_HOST = env("EMAIL_HOST", default="localhost") # https://docs.djangoproject.com/en/dev/ref/settings/#email-port @@ -322,10 +324,7 @@ def whitenoise_security_headers(headers, path, url): "version": 1, "disable_existing_loggers": False, "formatters": { - "verbose": { - "format": "%(levelname)s | %(asctime)s | %(name)s | " - "%(process)d %(thread)d | %(message)s" - } + "verbose": {"format": "%(levelname)s | %(asctime)s | %(name)s | " "%(process)d %(thread)d | %(message)s"} }, "handlers": { "console": { @@ -395,7 +394,7 @@ def whitenoise_security_headers(headers, path, url): "rest_framework.throttling.UserRateThrottle", ], "DEFAULT_THROTTLE_RATES": {"anon": "10/second", "user": "100/second"}, - "EXCEPTION_HANDLER": "commons.api.exception_handler.custom_exception_handler", + "EXCEPTION_HANDLER": "notifications.api.exception_handler.custom_exception_handler", } # django-cors-headers - https://github.com/adamchainz/django-cors-headers#setup @@ -422,12 +421,16 @@ def whitenoise_security_headers(headers, path, url): HUEY_FILENAME = env("HUEY_FILENAME", default="/var/lib/huey/huey.db") +HUEY_IMMEDIATE = env.bool("HUEY_IMMEDIATE", False) +if HUEY_IMMEDIATE not in [True, False]: + raise ValueError("HUEY_IMMEDIATE must be True or False") + HUEY = { "huey_class": "huey.SqliteHuey", # Huey implementation to use. "name": DATABASES["default"]["NAME"], # Use db name for huey. "results": False, # Store return values of tasks. "store_none": False, # If a task returns None, do not save to results. - "immediate": DEBUG, # If DEBUG=True, run synchronously. + "immediate": HUEY_IMMEDIATE, # Check the variable for documentation "utc": True, # Use UTC for all times internally. "connection": { "filename": HUEY_FILENAME, # Filename for sqlite. diff --git a/backend/config/settings/dev.py b/backend/config/settings/dev.py index ea2aabf97..8ff5cefef 100644 --- a/backend/config/settings/dev.py +++ b/backend/config/settings/dev.py @@ -1,44 +1,26 @@ -from .base import * # noqa -from .base import env +from .base import * # noqa NOSONAR # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = True -# EMAIL +# MIDDLEWARE # ------------------------------------------------------------------------------ -# https://docs.djangoproject.com/en/dev/ref/settings/#email-host -EMAIL_HOST = env("EMAIL_HOST", default="mailhog") -# https://docs.djangoproject.com/en/dev/ref/settings/#email-port -EMAIL_PORT = 1025 +# https://docs.djangoproject.com/en/dev/ref/settings/#middleware +MIDDLEWARE = ["silk.middleware.SilkyMiddleware"] + MIDDLEWARE # noqa F405 # WhiteNoise # ------------------------------------------------------------------------------ # http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa F405 - -# django-debug-toolbar -# ------------------------------------------------------------------------------ -# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites -# INSTALLED_APPS += ["debug_toolbar"] # noqa F405 -# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware -# MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa F405 -# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config -# DEBUG_TOOLBAR_CONFIG = { -# "DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"], -# "SHOW_TEMPLATE_CONTEXT": True, -# } -# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips -INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"] -if env("USE_DOCKER") == "yes": - import socket - - hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS += [".".join(ip.split(".")[:-1] + ["1"]) for ip in ips] - # django-extensions # ------------------------------------------------------------------------------ # https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration INSTALLED_APPS += ["django_extensions"] # noqa F405 + +# django-silk +# ------------------------------------------------------------------------------ +# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration +INSTALLED_APPS += ["silk"] # noqa F405 diff --git a/backend/config/settings/dist.py b/backend/config/settings/dist.py index cccda6282..f0680d8c3 100644 --- a/backend/config/settings/dist.py +++ b/backend/config/settings/dist.py @@ -1,4 +1,4 @@ -from .base import * # noqa +from .base import * # noqa NOSONAR from .base import env # GENERAL @@ -6,6 +6,11 @@ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = True +# MIDDLEWARE +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#middleware +MIDDLEWARE = ["silk.middleware.SilkyMiddleware"] + MIDDLEWARE # noqa F405 + # CACHES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#caches @@ -16,43 +21,21 @@ } } -# EMAIL -# ------------------------------------------------------------------------------ -# https://docs.djangoproject.com/en/dev/ref/settings/#email-host -EMAIL_HOST = env("EMAIL_HOST", default="mailhog") -# https://docs.djangoproject.com/en/dev/ref/settings/#email-port -EMAIL_PORT = 1025 - # WhiteNoise # ------------------------------------------------------------------------------ # http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa F405 - -# django-debug-toolbar -# ------------------------------------------------------------------------------ -# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites -# INSTALLED_APPS += ["debug_toolbar"] # noqa F405 -# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware -# MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa F405 -# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config -# DEBUG_TOOLBAR_CONFIG = { -# "DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"], -# "SHOW_TEMPLATE_CONTEXT": True, -# } -# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips -INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"] -if env("USE_DOCKER") == "yes": - import socket - - hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS += [".".join(ip.split(".")[:-1] + ["1"]) for ip in ips] - # django-extensions # ------------------------------------------------------------------------------ # https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration INSTALLED_APPS += ["django_extensions"] # noqa F405 +# django-silk +# ------------------------------------------------------------------------------ +# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration +INSTALLED_APPS += ["silk"] # noqa F405 + # Your stuff... # ------------------------------------------------------------------------------ diff --git a/backend/config/settings/prod.py b/backend/config/settings/prod.py index bd802f814..ba9e6d83d 100644 --- a/backend/config/settings/prod.py +++ b/backend/config/settings/prod.py @@ -1,18 +1,10 @@ from .base import * # noqa -from .base import env # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = False -# EMAIL -# ------------------------------------------------------------------------------ -# https://docs.djangoproject.com/en/dev/ref/settings/#email-host -EMAIL_HOST = env("EMAIL_HOST", default="mailhog") -# https://docs.djangoproject.com/en/dev/ref/settings/#email-port -EMAIL_PORT = 1025 - # STATIC # ------------------------ STATICFILES_STORAGE = "whitenoise.storage.CompressedStaticFilesStorage" diff --git a/backend/config/urls.py b/backend/config/urls.py index ec01d0e12..faf6ba6ab 100644 --- a/backend/config/urls.py +++ b/backend/config/urls.py @@ -8,9 +8,9 @@ from application.access_control.api.views import ( AuthenticateView, - CreateUserAPITokenView, JWTSecretResetView, - RevokeUserAPITokenView, + UserAPITokenCreateView, + UserAPITokenRevokeView, ) from application.commons.api.views import ( HealthView, @@ -19,11 +19,16 @@ VersionView, ) from application.commons.views import empty_view +from application.core.api.views import PURLTypeManyView, PURLTypeOneView from application.import_observations.api.views import ( ApiImportObservationsById, ApiImportObservationsByName, FileUploadObservationsById, FileUploadObservationsByName, + FileUploadSBOMById, + FileUploadSBOMByName, + ScanOSVBranchView, + ScanOSVProductView, ) from application.metrics.api.views import ( ProductMetricsCurrentView, @@ -36,6 +41,8 @@ from application.vex.api.views import ( CSAFDocumentCreateView, CSAFDocumentUpdateView, + CycloneDXDocumentCreateView, + CycloneDXDocumentUpdateView, OpenVEXDocumentCreateView, OpenVEXDocumentUpdateView, VEXImportView, @@ -45,9 +52,7 @@ path("", empty_view), path( "favicon.ico", - RedirectView.as_view( - url=staticfiles_storage.url("favicon.ico"), permanent=False - ), + RedirectView.as_view(url=staticfiles_storage.url("favicon.ico"), permanent=False), name="favicon", ), # Your stuff: custom urls includes go here @@ -73,21 +78,26 @@ ), path( "api/authentication/create_user_api_token/", - CreateUserAPITokenView.as_view(), + UserAPITokenCreateView.as_view(), name="create_user_api_token", ), path( "api/authentication/revoke_user_api_token/", - RevokeUserAPITokenView.as_view(), + UserAPITokenRevokeView.as_view(), name="revoke_user_api_token", ), + path("api/purl_types//", PURLTypeOneView.as_view()), + path("api/purl_types/", PURLTypeManyView.as_view()), + path("api/products//scan_osv/", ScanOSVProductView.as_view()), path( - "api/import/api_import_observations_by_name/", - ApiImportObservationsByName.as_view(), + "api/products///scan_osv/", + ScanOSVBranchView.as_view(), ), path( - "api/import/api_import_observations_by_id/", ApiImportObservationsById.as_view() + "api/import/api_import_observations_by_name/", + ApiImportObservationsByName.as_view(), ), + path("api/import/api_import_observations_by_id/", ApiImportObservationsById.as_view()), path( "api/import/file_upload_observations_by_name/", FileUploadObservationsByName.as_view(), @@ -96,14 +106,20 @@ "api/import/file_upload_observations_by_id/", FileUploadObservationsById.as_view(), ), + path( + "api/import/file_upload_sbom_by_name/", + FileUploadSBOMByName.as_view(), + ), + path( + "api/import/file_upload_sbom_by_id/", + FileUploadSBOMById.as_view(), + ), path("api/metrics/product_metrics_timeline/", ProductMetricsTimelineView.as_view()), path("api/metrics/product_metrics_current/", ProductMetricsCurrentView.as_view()), path("api/metrics/product_metrics_status/", ProductMetricsStatusView.as_view()), path("api/metrics/export_excel/", ProductMetricsExportExcelView.as_view()), path("api/metrics/export_csv/", ProductMetricsExportCsvView.as_view()), - path( - "api/metrics/export_codecharta/", ProductMetricsExportCodeChartaView.as_view() - ), + path("api/metrics/export_codecharta/", ProductMetricsExportCodeChartaView.as_view()), # OpenAPI 3 path("api/oa3/schema/", SpectacularAPIView.as_view(), name="schema_oa3"), path( @@ -124,10 +140,16 @@ "api/vex/openvex_document/update///", OpenVEXDocumentUpdateView.as_view(), ), + path("api/vex/cyclonedx_document/create/", CycloneDXDocumentCreateView.as_view()), + path( + "api/vex/cyclonedx_document/update///", + CycloneDXDocumentUpdateView.as_view(), + ), path("api/vex/vex_import/", VEXImportView.as_view()), ] if settings.DEBUG: + urlpatterns += [path("silk/", include("silk.urls", namespace="silk"))] # This allows the error pages to be debugged during development, just visit # these url in browser to see how these error pages look like. urlpatterns += [ diff --git a/backend/poetry.lock b/backend/poetry.lock index 100dc5957..ed47e7df3 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,184 +1,196 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] [[package]] name = "argon2-cffi" -version = "23.1.0" +version = "25.1.0" description = "Argon2 for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, + {file = "argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741"}, + {file = "argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1"}, ] [package.dependencies] argon2-cffi-bindings = "*" -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - [[package]] name = "argon2-cffi-bindings" -version = "21.2.0" +version = "25.1.0" description = "Low-level CFFI bindings for Argon2" optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win32.whl", hash = "sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520"}, + {file = "argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d"}, ] [package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] +cffi = [ + {version = ">=1.0.1", markers = "python_version < \"3.14\""}, + {version = ">=2.0.0b1", markers = "python_version >= \"3.14\""}, +] [[package]] name = "asgiref" -version = "3.8.1" +version = "3.11.1" description = "ASGI specs, helper code, and adapters" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, + {file = "asgiref-3.11.1-py3-none-any.whl", hash = "sha256:e8667a091e69529631969fd45dc268fa79b99c92c5fcdda727757e52146ec133"}, + {file = "asgiref-3.11.1.tar.gz", hash = "sha256:5f184dc43b7e763efe848065441eac62229c9f7b0475f41f80e207a114eda4ce"}, ] -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - [package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +tests = ["mypy (>=1.14.0)", "pytest", "pytest-asyncio"] [[package]] name = "astroid" -version = "3.3.8" +version = "4.0.4" description = "An abstract syntax tree for Python with inference support." optional = true -python-versions = ">=3.9.0" +python-versions = ">=3.10.0" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, - {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, + {file = "astroid-4.0.4-py3-none-any.whl", hash = "sha256:52f39653876c7dec3e3afd4c2696920e05c83832b9737afc21928f2d2eb7a753"}, + {file = "astroid-4.0.4.tar.gz", hash = "sha256:986fed8bcf79fb82c78b18a53352a0b287a73817d6dbcfba3162da36667c49a0"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] name = "asttokens" -version = "3.0.0" +version = "3.0.1" description = "Annotate AST trees with source code positions" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, - {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, + {file = "asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a"}, + {file = "asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7"}, ] [package.extras] -astroid = ["astroid (>=2,<4)"] -test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] +astroid = ["astroid (>=2,<5)"] +test = ["astroid (>=2,<5)", "pytest (<9.0)", "pytest-cov", "pytest-xdist"] [[package]] name = "attrs" -version = "24.3.0" +version = "25.4.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, - {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, ] -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - [[package]] name = "beartype" -version = "0.19.0" -description = "Unbearably fast near-real-time hybrid runtime-static type-checking in pure Python." +version = "0.22.9" +description = "Unbearably fast near-real-time pure-Python runtime-static type-checker." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "beartype-0.19.0-py3-none-any.whl", hash = "sha256:33b2694eda0daf052eb2aff623ed9a8a586703bbf0a90bbc475a83bbf427f699"}, - {file = "beartype-0.19.0.tar.gz", hash = "sha256:de42dfc1ba5c3710fde6c3002e3bd2cad236ed4d2aabe876345ab0b4234a6573"}, + {file = "beartype-0.22.9-py3-none-any.whl", hash = "sha256:d16c9bbc61ea14637596c5f6fbff2ee99cbe3573e46a716401734ef50c3060c2"}, + {file = "beartype-0.22.9.tar.gz", hash = "sha256:8f82b54aa723a2848a56008d18875f91c1db02c32ef6a62319a002e3e25a975f"}, ] [package.extras] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "mypy (>=0.800)", "numba", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] -doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test = ["coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "mypy (>=0.800)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "sphinx", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] -test-tox = ["equinox", "jax[cpu]", "jaxtyping", "mypy (>=0.800)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "celery", "click", "coverage (>=5.5)", "docutils (>=0.22.0)", "equinox ; sys_platform == \"linux\" and python_version < \"3.15.0\"", "fastmcp ; python_version < \"3.14.0\"", "jax[cpu] ; sys_platform == \"linux\" and python_version < \"3.15.0\"", "jaxtyping ; sys_platform == \"linux\"", "langchain ; python_version < \"3.14.0\" and sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "mypy (>=0.800) ; platform_python_implementation != \"PyPy\"", "nuitka (>=1.2.6) ; sys_platform == \"linux\" and python_version < \"3.14.0\"", "numba ; python_version < \"3.14.0\"", "numpy ; python_version < \"3.15.0\" and sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "pandera (>=0.26.0) ; python_version < \"3.14.0\"", "poetry", "polars ; python_version < \"3.14.0\"", "pydata-sphinx-theme (<=0.7.2)", "pygments", "pyinstaller", "pyright (>=1.1.370)", "pytest (>=6.2.0)", "redis", "rich-click", "setuptools", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "sqlalchemy", "torch ; sys_platform == \"linux\" and python_version < \"3.14.0\"", "tox (>=3.20.1)", "typer", "typing-extensions (>=3.10.0.0)", "xarray ; python_version < \"3.15.0\""] +doc-ghp = ["mkdocs-material[imaging] (>=9.6.0)", "mkdocstrings-python (>=1.16.0)", "mkdocstrings-python-xref (>=1.16.0)"] +doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "setuptools", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] +test = ["celery", "click", "coverage (>=5.5)", "docutils (>=0.22.0)", "equinox ; sys_platform == \"linux\" and python_version < \"3.15.0\"", "fastmcp ; python_version < \"3.14.0\"", "jax[cpu] ; sys_platform == \"linux\" and python_version < \"3.15.0\"", "jaxtyping ; sys_platform == \"linux\"", "langchain ; python_version < \"3.14.0\" and sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "mypy (>=0.800) ; platform_python_implementation != \"PyPy\"", "nuitka (>=1.2.6) ; sys_platform == \"linux\" and python_version < \"3.14.0\"", "numba ; python_version < \"3.14.0\"", "numpy ; python_version < \"3.15.0\" and sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "pandera (>=0.26.0) ; python_version < \"3.14.0\"", "poetry", "polars ; python_version < \"3.14.0\"", "pygments", "pyinstaller", "pyright (>=1.1.370)", "pytest (>=6.2.0)", "redis", "rich-click", "sphinx", "sqlalchemy", "torch ; sys_platform == \"linux\" and python_version < \"3.14.0\"", "tox (>=3.20.1)", "typer", "typing-extensions (>=3.10.0.0)", "xarray ; python_version < \"3.15.0\""] +test-tox = ["celery", "click", "docutils (>=0.22.0)", "equinox ; sys_platform == \"linux\" and python_version < \"3.15.0\"", "fastmcp ; python_version < \"3.14.0\"", "jax[cpu] ; sys_platform == \"linux\" and python_version < \"3.15.0\"", "jaxtyping ; sys_platform == \"linux\"", "langchain ; python_version < \"3.14.0\" and sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "mypy (>=0.800) ; platform_python_implementation != \"PyPy\"", "nuitka (>=1.2.6) ; sys_platform == \"linux\" and python_version < \"3.14.0\"", "numba ; python_version < \"3.14.0\"", "numpy ; python_version < \"3.15.0\" and sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "pandera (>=0.26.0) ; python_version < \"3.14.0\"", "poetry", "polars ; python_version < \"3.14.0\"", "pygments", "pyinstaller", "pyright (>=1.1.370)", "pytest (>=6.2.0)", "redis", "rich-click", "sphinx", "sqlalchemy", "torch ; sys_platform == \"linux\" and python_version < \"3.14.0\"", "typer", "typing-extensions (>=3.10.0.0)", "xarray ; python_version < \"3.15.0\""] test-tox-coverage = ["coverage (>=5.5)"] [[package]] name = "black" -version = "24.10.0" +version = "26.1.0" description = "The uncompromising code formatter." optional = true -python-versions = ">=3.9" -files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"code-quality\"" +files = [ + {file = "black-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ca699710dece84e3ebf6e92ee15f5b8f72870ef984bf944a57a777a48357c168"}, + {file = "black-26.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e8e75dabb6eb83d064b0db46392b25cabb6e784ea624219736e8985a6b3675d"}, + {file = "black-26.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb07665d9a907a1a645ee41a0df8a25ffac8ad9c26cdb557b7b88eeeeec934e0"}, + {file = "black-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:7ed300200918147c963c87700ccf9966dceaefbbb7277450a8d646fc5646bf24"}, + {file = "black-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:c5b7713daea9bf943f79f8c3b46f361cc5229e0e604dcef6a8bb6d1c37d9df89"}, + {file = "black-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cee1487a9e4c640dc7467aaa543d6c0097c391dc8ac74eb313f2fbf9d7a7cb5"}, + {file = "black-26.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d62d14ca31c92adf561ebb2e5f2741bf8dea28aef6deb400d49cca011d186c68"}, + {file = "black-26.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb1dafbbaa3b1ee8b4550a84425aac8874e5f390200f5502cf3aee4a2acb2f14"}, + {file = "black-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:101540cb2a77c680f4f80e628ae98bd2bd8812fb9d72ade4f8995c5ff019e82c"}, + {file = "black-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f3977a16e347f1b115662be07daa93137259c711e526402aa444d7a88fdc9d4"}, + {file = "black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f"}, + {file = "black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6"}, + {file = "black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a"}, + {file = "black-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a19915ec61f3a8746e8b10adbac4a577c6ba9851fa4a9e9fbfbcf319887a5791"}, + {file = "black-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:643d27fb5facc167c0b1b59d0315f2674a6e950341aed0fc05cf307d22bf4954"}, + {file = "black-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ba1d768fbfb6930fc93b0ecc32a43d8861ded16f47a40f14afa9bb04ab93d304"}, + {file = "black-26.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b807c240b64609cb0e80d2200a35b23c7df82259f80bef1b2c96eb422b4aac9"}, + {file = "black-26.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1de0f7d01cc894066a1153b738145b194414cc6eeaad8ef4397ac9abacf40f6b"}, + {file = "black-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:91a68ae46bf07868963671e4d05611b179c2313301bd756a89ad4e3b3db2325b"}, + {file = "black-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:be5e2fe860b9bd9edbf676d5b60a9282994c03fbbd40fe8f5e75d194f96064ca"}, + {file = "black-26.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9dc8c71656a79ca49b8d3e2ce8103210c9481c57798b48deeb3a8bb02db5f115"}, + {file = "black-26.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b22b3810451abe359a964cc88121d57f7bce482b53a066de0f1584988ca36e79"}, + {file = "black-26.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:53c62883b3f999f14e5d30b5a79bd437236658ad45b2f853906c7cbe79de00af"}, + {file = "black-26.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:f016baaadc423dc960cdddf9acae679e71ee02c4c341f78f3179d7e4819c095f"}, + {file = "black-26.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:66912475200b67ef5a0ab665011964bf924745103f51977a78b4fb92a9fc1bf0"}, + {file = "black-26.1.0-py3-none-any.whl", hash = "sha256:1054e8e47ebd686e078c0bb0eaf31e6ce69c966058d122f2c0c950311f9f3ede"}, + {file = "black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" packaging = ">=22.0" -pathspec = ">=0.9.0" +pathspec = ">=1.0.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} +pytokens = ">=0.3.0" [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -188,226 +200,264 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boolean-py" -version = "4.0" +version = "5.0" description = "Define boolean algebras, create and parse boolean expressions and create custom boolean DSL." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "boolean.py-4.0-py3-none-any.whl", hash = "sha256:2876f2051d7d6394a531d82dc6eb407faa0b01a0a0b3083817ccd7323b8d96bd"}, - {file = "boolean.py-4.0.tar.gz", hash = "sha256:17b9a181630e43dde1851d42bef546d616d5d9b4480357514597e78b203d06e4"}, + {file = "boolean_py-5.0-py3-none-any.whl", hash = "sha256:ef28a70bd43115208441b53a045d1549e2f0ec6e3d08a9d142cbc41c1938e8d9"}, + {file = "boolean_py-5.0.tar.gz", hash = "sha256:60cbc4bad079753721d32649545505362c754e121570ada4658b852a3a318d95"}, ] +[package.extras] +dev = ["build", "twine"] +docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)", "sphinxcontrib-apidoc (>=0.3.0)"] +linting = ["black", "isort", "pycodestyle"] +testing = ["pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] + [[package]] name = "certifi" -version = "2024.12.14" +version = "2026.1.4" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, - {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, ] [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, ] [package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = true -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] [[package]] name = "click" -version = "8.1.8" +version = "8.3.1" description = "Composable command line interface toolkit" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, ] [package.dependencies] @@ -419,6 +469,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "(sys_platform == \"win32\" or platform_system == \"Windows\") and (extra == \"dev\" or extra == \"code-quality\" or platform_system == \"Windows\")" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -426,158 +478,255 @@ files = [ [[package]] name = "coverage" -version = "7.6.10" +version = "7.13.4" description = "Code coverage measurement for Python" optional = true -python-versions = ">=3.9" -files = [ - {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, - {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, - {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, - {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, - {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, - {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, - {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, - {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, - {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, - {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, - {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, - {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, - {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, - {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, - {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, - {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"unittests\"" +files = [ + {file = "coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415"}, + {file = "coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e24f9156097ff9dc286f2f913df3a7f63c0e333dcafa3c196f2c18b4175ca09a"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8041b6c5bfdc03257666e9881d33b1abc88daccaf73f7b6340fb7946655cd10f"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a09cfa6a5862bc2fc6ca7c3def5b2926194a56b8ab78ffcf617d28911123012"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:296f8b0af861d3970c2a4d8c91d48eb4dd4771bcef9baedec6a9b515d7de3def"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e101609bcbbfb04605ea1027b10dc3735c094d12d40826a60f897b98b1c30256"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa3feb8db2e87ff5e6d00d7e1480ae241876286691265657b500886c98f38bda"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4fc7fa81bbaf5a02801b65346c8b3e657f1d93763e58c0abdf7c992addd81a92"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:33901f604424145c6e9c2398684b92e176c0b12df77d52db81c20abd48c3794c"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:bb28c0f2cf2782508a40cec377935829d5fcc3ad9a3681375af4e84eb34b6b58"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d107aff57a83222ddbd8d9ee705ede2af2cc926608b57abed8ef96b50b7e8f9"}, + {file = "coverage-7.13.4-cp310-cp310-win32.whl", hash = "sha256:a6f94a7d00eb18f1b6d403c91a88fd58cfc92d4b16080dfdb774afc8294469bf"}, + {file = "coverage-7.13.4-cp310-cp310-win_amd64.whl", hash = "sha256:2cb0f1e000ebc419632bbe04366a8990b6e32c4e0b51543a6484ffe15eaeda95"}, + {file = "coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053"}, + {file = "coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9"}, + {file = "coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9"}, + {file = "coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f"}, + {file = "coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f"}, + {file = "coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459"}, + {file = "coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0"}, + {file = "coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246"}, + {file = "coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126"}, + {file = "coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d"}, + {file = "coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9"}, + {file = "coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a"}, + {file = "coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d"}, + {file = "coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd"}, + {file = "coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af"}, + {file = "coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d"}, + {file = "coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b"}, + {file = "coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9"}, + {file = "coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd"}, + {file = "coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997"}, + {file = "coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601"}, + {file = "coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0"}, + {file = "coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb"}, + {file = "coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505"}, + {file = "coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2"}, + {file = "coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056"}, + {file = "coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0"}, + {file = "coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea"}, + {file = "coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932"}, + {file = "coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b"}, + {file = "coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0"}, + {file = "coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91"}, ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" -version = "43.0.1" +version = "46.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +files = [ + {file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731"}, + {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82"}, + {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1"}, + {file = "cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48"}, + {file = "cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4"}, + {file = "cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663"}, + {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826"}, + {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d"}, + {file = "cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a"}, + {file = "cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4"}, + {file = "cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c"}, + {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4"}, + {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9"}, + {file = "cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72"}, + {file = "cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7"}, + {file = "cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d"}, ] [package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] name = "cvss" -version = "3.3" +version = "3.6" description = "CVSS2/3/4 library with interactive calculator for Python 2 and Python 3" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "cvss-3.3-py2.py3-none-any.whl", hash = "sha256:cc7326afc7585cc63d0a6ca74dab27d74aa2bc99f5f3d5d4bc4d94a3c22bc0a1"}, - {file = "cvss-3.3.tar.gz", hash = "sha256:ae097183ee58b02262ab2291d27857ca3d0a7c4242b9b076c6bf537e6239fbc0"}, + {file = "cvss-3.6-py2.py3-none-any.whl", hash = "sha256:e342c6ad9c7eb69d2aebbbc2768a03cabd57eb947c806e145de5b936219833ea"}, + {file = "cvss-3.6.tar.gz", hash = "sha256:f21d18224efcd3c01b44ff1b37dec2e3208d29a6d0ce6c87a599c73c21ee1a99"}, ] +[[package]] +name = "cyclonedx-python-lib" +version = "11.6.0" +description = "Python library for CycloneDX" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "cyclonedx_python_lib-11.6.0-py3-none-any.whl", hash = "sha256:94f4aae97db42a452134dafdddcfab9745324198201c4777ed131e64c8380759"}, + {file = "cyclonedx_python_lib-11.6.0.tar.gz", hash = "sha256:7fb85a4371fa3a203e5be577ac22b7e9a7157f8b0058b7448731474d6dea7bf0"}, +] + +[package.dependencies] +license-expression = ">=30,<31" +packageurl-python = ">=0.11,<2" +py-serializable = ">=2.1.0,<3.0.0" +sortedcontainers = ">=2.4.0,<3.0.0" +typing_extensions = {version = ">=4.6,<5.0", markers = "python_version < \"3.13\""} + +[package.extras] +json-validation = ["jsonschema[format-nongpl] (>=4.25,<5.0)", "referencing (>=0.28.4)"] +validation = ["jsonschema[format-nongpl] (>=4.25,<5.0)", "lxml (>=4,<7)", "referencing (>=0.28.4)"] +xml-validation = ["lxml (>=4,<7)"] + [[package]] name = "decorator" -version = "5.1.1" +version = "5.2.1" description = "Decorators for Humans" optional = true -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] [[package]] name = "defusedcsv" -version = "2.0.0" +version = "3.0.0" description = "Drop-in replacement for Python's CSV library that tries to mitigate CSV injection attacks" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "defusedcsv-2.0.0-py3-none-any.whl", hash = "sha256:a7bc3b1ac1ce4f8c6c1e8740466b1b5789b51ca18d918b0099313dc0cdf2cef4"}, - {file = "defusedcsv-2.0.0.tar.gz", hash = "sha256:7612228e54ef1690a19f7aef526709010608e987f9998c89588ef05d9ecfe4d6"}, + {file = "defusedcsv-3.0.0-py3-none-any.whl", hash = "sha256:5e5f2e940cefb5ac60580c8009388bfb154b7853784d34a8f0ff3a52c6130e87"}, + {file = "defusedcsv-3.0.0.tar.gz", hash = "sha256:018678533bc375f3bf2f70f9721e48daf3800a88320dc325c1dac67ee09e2a45"}, ] [[package]] @@ -586,6 +735,7 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -593,43 +743,35 @@ files = [ [[package]] name = "dill" -version = "0.3.9" +version = "0.4.1" description = "serialize all of Python" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, + {file = "dill-0.4.1-py3-none-any.whl", hash = "sha256:1e1ce33e978ae97fcfcff5638477032b801c46c7c65cf717f95fbc2248f79a9d"}, + {file = "dill-0.4.1.tar.gz", hash = "sha256:423092df4182177d4d8ba8290c8a5b640c66ab35ec7da59ccfa00f6fa3eea5fa"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] -[[package]] -name = "distlib" -version = "0.3.9" -description = "Distribution utilities" -optional = true -python-versions = "*" -files = [ - {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, - {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, -] - [[package]] name = "django" -version = "5.1.5" +version = "5.2.11" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "Django-5.1.5-py3-none-any.whl", hash = "sha256:c46eb936111fffe6ec4bc9930035524a8be98ec2f74d8a0ff351226a3e52f459"}, - {file = "Django-5.1.5.tar.gz", hash = "sha256:19bbca786df50b9eca23cee79d495facf55c8f5c54c529d9bf1fe7b5ea086af3"}, + {file = "django-5.2.11-py3-none-any.whl", hash = "sha256:e7130df33ada9ab5e5e929bc19346a20fe383f5454acb2cc004508f242ee92c0"}, + {file = "django-5.2.11.tar.gz", hash = "sha256:7f2d292ad8b9ee35e405d965fbbad293758b858c34bbf7f3df551aeeac6f02d3"}, ] [package.dependencies] -asgiref = ">=3.8.1,<4" +asgiref = ">=3.8.1" sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -639,13 +781,14 @@ bcrypt = ["bcrypt"] [[package]] name = "django-cors-headers" -version = "4.6.0" +version = "4.9.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "django_cors_headers-4.6.0-py3-none-any.whl", hash = "sha256:8edbc0497e611c24d5150e0055d3b178c6534b8ed826fb6f53b21c63f5d48ba3"}, - {file = "django_cors_headers-4.6.0.tar.gz", hash = "sha256:14d76b4b4c8d39375baeddd89e4f08899051eeaf177cb02a29bd6eae8cf63aa8"}, + {file = "django_cors_headers-4.9.0-py3-none-any.whl", hash = "sha256:15c7f20727f90044dcee2216a9fd7303741a864865f0c3657e28b7056f61b449"}, + {file = "django_cors_headers-4.9.0.tar.gz", hash = "sha256:fe5d7cb59fdc2c8c646ce84b727ac2bca8912a247e6e68e1fb507372178e59e8"}, ] [package.dependencies] @@ -654,35 +797,57 @@ django = ">=4.2" [[package]] name = "django-coverage-plugin" -version = "3.1.0" +version = "3.2.0" description = "Django template coverage.py plugin" optional = true -python-versions = "*" +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"unittests\"" files = [ - {file = "django_coverage_plugin-3.1.0-py3-none-any.whl", hash = "sha256:eb0ea8ffdb0db11a02994fc99be6500550efb496c350d709f418ff3d8e553a67"}, - {file = "django_coverage_plugin-3.1.0.tar.gz", hash = "sha256:223d34bf92bebadcb8b7b89932480e41c7bd98b44a8156934488fbe7f4a71f99"}, + {file = "django_coverage_plugin-3.2.0-py3-none-any.whl", hash = "sha256:a4a9400c784c86f1ba53a73c336508e07316c92345b34a0eb0b22b3b14cdbdd6"}, + {file = "django_coverage_plugin-3.2.0.tar.gz", hash = "sha256:0e1460294ecd4b192bd09788ab9ad9380d9b8c9b45925b408ce6c620ac352585"}, ] [package.dependencies] coverage = "*" +Django = "*" [[package]] name = "django-csp" -version = "3.8" +version = "4.0" description = "Django Content Security Policy support." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "django_csp-3.8-py3-none-any.whl", hash = "sha256:19b2978b03fcd73517d7d67acbc04fbbcaec0facc3e83baa502965892d1e0719"}, - {file = "django_csp-3.8.tar.gz", hash = "sha256:ef0f1a9f7d8da68ae6e169c02e9ac661c0ecf04db70e0d1d85640512a68471c0"}, + {file = "django_csp-4.0-py3-none-any.whl", hash = "sha256:d5a0a05463a6b75a4f1fc1828c58c89af8db9364d09fc6e12f122b4d7f3d00dc"}, + {file = "django_csp-4.0.tar.gz", hash = "sha256:b27010bb702eb20a3dad329178df2b61a2b82d338b70fbdc13c3a3bd28712833"}, ] [package.dependencies] -Django = ">=3.2" +django = ">=4.2" +packaging = "*" [package.extras] +dev = ["django-stubs[compatible-mypy]", "jinja2 (>=2.9.6)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-django", "pytest-ruff", "sphinx", "sphinx-rtd-theme", "tox", "tox-gh-actions", "types-setuptools"] jinja2 = ["jinja2 (>=2.9.6)"] tests = ["jinja2 (>=2.9.6)", "pytest", "pytest-cov", "pytest-django", "pytest-ruff"] +typing = ["django-stubs[compatible-mypy]", "jinja2 (>=2.9.6)", "mypy", "pytest", "pytest-django", "types-setuptools"] + +[[package]] +name = "django-dirtyfields" +version = "1.9.9" +description = "Tracking dirty fields on a Django model instance." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "django_dirtyfields-1.9.9-py3-none-any.whl", hash = "sha256:80da479ab989331e6f29c90576e5f7d7b3df65b36f16bfd6f5d37a0dbdeda6b0"}, + {file = "django_dirtyfields-1.9.9.tar.gz", hash = "sha256:c11c8e03827166d2c91f6634a4d10f5aa5cc7ea430d60e5bd576b3ebf319172f"}, +] + +[package.dependencies] +Django = ">=3.2" [[package]] name = "django-encrypted-model-fields" @@ -690,6 +855,7 @@ version = "0.6.5" description = "A set of fields that wrap standard Django fields with encryption provided by the python cryptography library." optional = false python-versions = ">=3.6,<4.0" +groups = ["main"] files = [ {file = "django-encrypted-model-fields-0.6.5.tar.gz", hash = "sha256:8bd21c5565c0d64ec4dbd375ad34fea68b4da2db871dec3fa71fe7b5e4221c99"}, {file = "django_encrypted_model_fields-0.6.5-py3-none-any.whl", hash = "sha256:b21bbdd8ae2e1a0ea37a5049b3ba46e6e63bf287ad241219a058fac1070796cc"}, @@ -701,13 +867,14 @@ Django = ">=2.2" [[package]] name = "django-environ" -version = "0.12.0" +version = "0.12.1" description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." optional = false python-versions = "<4,>=3.9" +groups = ["main"] files = [ - {file = "django_environ-0.12.0-py2.py3-none-any.whl", hash = "sha256:92fb346a158abda07ffe6eb23135ce92843af06ecf8753f43adf9d2366dcc0ca"}, - {file = "django_environ-0.12.0.tar.gz", hash = "sha256:227dc891453dd5bde769c3449cf4a74b6f2ee8f7ab2361c93a07068f4179041a"}, + {file = "django_environ-0.12.1-py2.py3-none-any.whl", hash = "sha256:064ba2d5082f833e6d7fe4def4928bde1eedc0248a417575da7db147aeec1c20"}, + {file = "django_environ-0.12.1.tar.gz", hash = "sha256:22859c6e905ab7637fa3348d1787543bb4492f38d761104a3ce0519b7b752845"}, ] [package.extras] @@ -717,82 +884,112 @@ testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)", "setuptools (>=71.0. [[package]] name = "django-extensions" -version = "3.2.3" +version = "4.1" description = "Extensions for Django" optional = true -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\" or extra == \"unittests\"" files = [ - {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, - {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, + {file = "django_extensions-4.1-py3-none-any.whl", hash = "sha256:0699a7af28f2523bf8db309a80278519362cd4b6e1fd0a8cd4bf063e1e023336"}, + {file = "django_extensions-4.1.tar.gz", hash = "sha256:7b70a4d28e9b840f44694e3f7feb54f55d495f8b3fa6c5c0e5e12bcb2aa3cdeb"}, ] [package.dependencies] -Django = ">=3.2" +django = ">=4.2" [[package]] name = "django-filter" -version = "24.3" +version = "25.2" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "django_filter-24.3-py3-none-any.whl", hash = "sha256:c4852822928ce17fb699bcfccd644b3574f1a2d80aeb2b4ff4f16b02dd49dc64"}, - {file = "django_filter-24.3.tar.gz", hash = "sha256:d8ccaf6732afd21ca0542f6733b11591030fa98669f8d15599b358e24a2cd9c3"}, + {file = "django_filter-25.2-py3-none-any.whl", hash = "sha256:9c0f8609057309bba611062fe1b720b4a873652541192d232dd28970383633e3"}, + {file = "django_filter-25.2.tar.gz", hash = "sha256:760e984a931f4468d096f5541787efb8998c61217b73006163bf2f9523fe8f23"}, ] [package.dependencies] -Django = ">=4.2" +Django = ">=5.2" + +[package.extras] +drf = ["djangorestframework"] [[package]] name = "django-picklefield" -version = "3.2" +version = "3.4.0" description = "Pickled object field for Django" optional = false -python-versions = ">=3" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "django-picklefield-3.2.tar.gz", hash = "sha256:aa463f5d79d497dbe789f14b45180f00a51d0d670067d0729f352a3941cdfa4d"}, - {file = "django_picklefield-3.2-py3-none-any.whl", hash = "sha256:e9a73539d110f69825d9320db18bcb82e5189ff48dbed41821c026a20497764c"}, + {file = "django_picklefield-3.4.0-py3-none-any.whl", hash = "sha256:929bcfbae5b48bd22a52bc04521fdfdd152eee36abb9f20228f9480f9df65f45"}, + {file = "django_picklefield-3.4.0.tar.gz", hash = "sha256:3a1f740536c0e60d0dba43aa89ccdbe86760d4c3f8ec47799eae122baa741d0a"}, ] [package.dependencies] -Django = ">=3.2" +Django = ">=4.2" [package.extras] tests = ["tox"] +[[package]] +name = "django-silk" +version = "5.4.3" +description = "Silky smooth profiling for the Django Framework" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\"" +files = [ + {file = "django_silk-5.4.3-py3-none-any.whl", hash = "sha256:f7920ae91a34716654296140b2cbf449e9798237a0c6eb7cf2cd79c2cfb39321"}, + {file = "django_silk-5.4.3.tar.gz", hash = "sha256:bedb17c8fd9c029a7746cb947864f5c9ea943ae33d6a9581e60f67c45e4490ad"}, +] + +[package.dependencies] +Django = ">=4.2" +gprof2dot = ">=2017.9.19" +sqlparse = "*" + +[package.extras] +formatting = ["autopep8"] + [[package]] name = "django-stubs" -version = "5.1.2" +version = "5.2.9" description = "Mypy stubs for Django" optional = true -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "django_stubs-5.1.2-py3-none-any.whl", hash = "sha256:04ddc778faded6fb48468a8da9e98b8d12b9ba983faa648d37a73ebde0f024da"}, - {file = "django_stubs-5.1.2.tar.gz", hash = "sha256:a0fcb3659bab46a6d835cc2d9bff3fc29c36ccea41a10e8b1930427bc0f9f0df"}, + {file = "django_stubs-5.2.9-py3-none-any.whl", hash = "sha256:2317a7130afdaa76f6ff7f623650d7f3bf1b6c86a60f95840e14e6ec6de1a7cd"}, + {file = "django_stubs-5.2.9.tar.gz", hash = "sha256:c192257120b08785cfe6f2f1c91f1797aceae8e9daa689c336e52c91e8f6a493"}, ] [package.dependencies] -asgiref = "*" django = "*" -django-stubs-ext = ">=5.1.2" -tomli = {version = "*", markers = "python_version < \"3.11\""} -types-PyYAML = "*" +django-stubs-ext = ">=5.2.9" +types-pyyaml = "*" typing-extensions = ">=4.11.0" [package.extras] -compatible-mypy = ["mypy (>=1.12,<1.15)"] +compatible-mypy = ["mypy (>=1.13,<1.20)"] oracle = ["oracledb"] -redis = ["redis"] +redis = ["redis", "types-redis"] [[package]] name = "django-stubs-ext" -version = "5.1.2" +version = "5.2.9" description = "Monkey-patching and extensions for django-stubs" optional = true -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "django_stubs_ext-5.1.2-py3-none-any.whl", hash = "sha256:6c559214538d6a26f631ca638ddc3251a0a891d607de8ce01d23d3201ad8ad6c"}, - {file = "django_stubs_ext-5.1.2.tar.gz", hash = "sha256:421c0c3025a68e3ab8e16f065fad9ba93335ecefe2dd92a0cff97a665680266c"}, + {file = "django_stubs_ext-5.2.9-py3-none-any.whl", hash = "sha256:230c51575551b0165be40177f0f6805f1e3ebf799b835c85f5d64c371ca6cf71"}, + {file = "django_stubs_ext-5.2.9.tar.gz", hash = "sha256:6db4054d1580657b979b7d391474719f1a978773e66c7070a5e246cd445a25a9"}, ] [package.dependencies] @@ -801,13 +998,14 @@ typing-extensions = "*" [[package]] name = "djangorestframework" -version = "3.15.2" +version = "3.16.1" description = "Web APIs for Django, made easy." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "djangorestframework-3.15.2-py3-none-any.whl", hash = "sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20"}, - {file = "djangorestframework-3.15.2.tar.gz", hash = "sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad"}, + {file = "djangorestframework-3.16.1-py3-none-any.whl", hash = "sha256:33a59f47fb9c85ede792cbf88bde71893bcda0667bc573f784649521f1102cec"}, + {file = "djangorestframework-3.16.1.tar.gz", hash = "sha256:166809528b1aced0a17dc66c24492af18049f2c9420dbd0be29422029cfc3ff7"}, ] [package.dependencies] @@ -815,56 +1013,60 @@ django = ">=4.2" [[package]] name = "djangorestframework-stubs" -version = "3.15.2" +version = "3.16.8" description = "PEP-484 stubs for django-rest-framework" optional = true -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "djangorestframework_stubs-3.15.2-py3-none-any.whl", hash = "sha256:0e72f1e8507bdb2acd99b304520494ea5d45bccba51a4877140cb65fd461adf0"}, - {file = "djangorestframework_stubs-3.15.2.tar.gz", hash = "sha256:3df129845acac6c1b097bc7e5b360d53e32a02029d60b4f972dfbd3e2508f236"}, + {file = "djangorestframework_stubs-3.16.8-py3-none-any.whl", hash = "sha256:c5bf61def0f330a071dd5f470f05710189d06c467b3f3e186b32c5a23d4952fb"}, + {file = "djangorestframework_stubs-3.16.8.tar.gz", hash = "sha256:f6d464b54fa2f929610e957446c04e6ac29558265418e0a2d9f653a4cdd410b5"}, ] [package.dependencies] -django-stubs = ">=5.1.1" -requests = ">=2.0.0" -types-PyYAML = ">=5.4.3" -types-requests = ">=0.1.12" -typing-extensions = ">=3.10.0" +django-stubs = ">=5.2.8" +types-pyyaml = "*" +types-requests = {version = "*", optional = true, markers = "extra == \"requests\""} +typing-extensions = ">=4.0" [package.extras] -compatible-mypy = ["django-stubs[compatible-mypy]", "mypy (>=1.12,<1.14)"] +compatible-mypy = ["django-stubs[compatible-mypy]", "mypy (>=1.13,<1.20)"] coreapi = ["coreapi (>=2.0.0)"] -markdown = ["types-Markdown (>=0.1.5)"] +markdown = ["types-markdown (>=0.1.5)"] +requests = ["types-requests"] [[package]] name = "dnspython" -version = "2.7.0" +version = "2.8.0" description = "DNS toolkit" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, - {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, + {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, + {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=43)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=1.0.0)"] -idna = ["idna (>=3.7)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] +dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"] +dnssec = ["cryptography (>=45)"] +doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] +doq = ["aioquic (>=1.2.0)"] +idna = ["idna (>=3.10)"] +trio = ["trio (>=0.30)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] [[package]] name = "drf-spectacular" -version = "0.28.0" +version = "0.29.0" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "drf_spectacular-0.28.0-py3-none-any.whl", hash = "sha256:856e7edf1056e49a4245e87a61e8da4baff46c83dbc25be1da2df77f354c7cb4"}, - {file = "drf_spectacular-0.28.0.tar.gz", hash = "sha256:2c778a47a40ab2f5078a7c42e82baba07397bb35b074ae4680721b2805943061"}, + {file = "drf_spectacular-0.29.0-py3-none-any.whl", hash = "sha256:d1ee7c9535d89848affb4427347f7c4a22c5d22530b8842ef133d7b72e19b41a"}, + {file = "drf_spectacular-0.29.0.tar.gz", hash = "sha256:0a069339ea390ce7f14a75e8b5af4a0860a46e833fd4af027411a3e94fc1a0cc"}, ] [package.dependencies] @@ -881,13 +1083,14 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2024.12.1" +version = "2026.1.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "drf_spectacular_sidecar-2024.12.1-py3-none-any.whl", hash = "sha256:e30821d150d29294f3be2018aab31b55cd724158e9e690b51a215264751aa8c7"}, - {file = "drf_spectacular_sidecar-2024.12.1.tar.gz", hash = "sha256:6be31df38bcf95681224b6550faa9344ee6dd5360dcf2b44afcc3f7460385613"}, + {file = "drf_spectacular_sidecar-2026.1.1-py3-none-any.whl", hash = "sha256:af8df62f1b594ec280351336d837eaf2402ab25a6bc2a1fad7aee9935821070f"}, + {file = "drf_spectacular_sidecar-2026.1.1.tar.gz", hash = "sha256:6f7c173a8ddbbbdafc7a27e028614b65f07a89ca90f996a432d57460463b56be"}, ] [package.dependencies] @@ -899,6 +1102,7 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -914,147 +1118,141 @@ version = "2.0.0" description = "An implementation of lxml.xmlfile for the standard library" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, ] -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = true -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "executing" -version = "2.1.0" +version = "2.2.1" description = "Get the currently executing AST node of a frame, and other information" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, - {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, + {file = "executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017"}, + {file = "executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "filelock" -version = "3.16.1" -description = "A platform independent file lock." -optional = true -python-versions = ">=3.8" -files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] -typing = ["typing-extensions (>=4.12.2)"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "flake8" -version = "7.1.1" +version = "7.3.0" description = "the modular source code checker: pep8 pyflakes and co" optional = true -python-versions = ">=3.8.1" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, - {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, + {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, + {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.12.0,<2.13.0" -pyflakes = ">=3.2.0,<3.3.0" +pycodestyle = ">=2.14.0,<2.15.0" +pyflakes = ">=3.4.0,<3.5.0" [[package]] name = "flake8-isort" -version = "6.1.1" +version = "7.0.0" description = "flake8 plugin that integrates isort" optional = true -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, - {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, + {file = "flake8_isort-7.0.0-py3-none-any.whl", hash = "sha256:c301a0e55fc77582348e636194b84b1a0baf0dfdaa6eddf3b0eeea75f8be7f36"}, + {file = "flake8_isort-7.0.0.tar.gz", hash = "sha256:a677199d1197f826eb69084e7ac272f208f4583363285f43111c34272abe7e5d"}, ] [package.dependencies] flake8 = "*" -isort = ">=5.0.0,<6" +isort = ">=5.0.0" [package.extras] test = ["pytest"] +[[package]] +name = "gprof2dot" +version = "2025.4.14" +description = "Generate a dot graph from the output of several profilers." +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" +files = [ + {file = "gprof2dot-2025.4.14-py3-none-any.whl", hash = "sha256:0742e4c0b4409a5e8777e739388a11e1ed3750be86895655312ea7c20bd0090e"}, + {file = "gprof2dot-2025.4.14.tar.gz", hash = "sha256:35743e2d2ca027bf48fa7cba37021aaf4a27beeae1ae8e05a50b55f1f921a6ce"}, +] + [[package]] name = "gunicorn" -version = "23.0.0" +version = "25.1.0" description = "WSGI HTTP Server for UNIX" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, - {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, + {file = "gunicorn-25.1.0-py3-none-any.whl", hash = "sha256:d0b1236ccf27f72cfe14bce7caadf467186f19e865094ca84221424e839b8b8b"}, + {file = "gunicorn-25.1.0.tar.gz", hash = "sha256:1426611d959fa77e7de89f8c0f32eed6aa03ee735f98c01efba3e281b1c47616"}, ] [package.dependencies] packaging = "*" [package.extras] -eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] -gevent = ["gevent (>=1.4.0)"] +eventlet = ["eventlet (>=0.40.3)"] +gevent = ["gevent (>=24.10.1)"] +http2 = ["h2 (>=4.1.0)"] setproctitle = ["setproctitle"] -testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] -tornado = ["tornado (>=0.2)"] +testing = ["coverage", "eventlet (>=0.40.3)", "gevent (>=24.10.1)", "h2 (>=4.1.0)", "httpx[http2]", "pytest", "pytest-asyncio", "pytest-cov", "uvloop (>=0.19.0)"] +tornado = ["tornado (>=6.5.0)"] [[package]] -name = "huey" -version = "2.5.2" -description = "huey, a little task queue" +name = "html-to-markdown" +version = "2.25.0" +description = "High-performance HTML to Markdown converter powered by Rust with a clean Python API" optional = false -python-versions = "*" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "huey-2.5.2.tar.gz", hash = "sha256:df33db474c05414ed40ee2110e9df692369871734da22d74ffb035a4bd74047f"}, + {file = "html_to_markdown-2.25.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:1d796e8f88e9ad2fad461e2b8ed783d72ff00bf5b6f3126e998c7af6a7ba68f0"}, + {file = "html_to_markdown-2.25.0-cp310-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7b31fd8177cbd9b445bca90d961f5c344745b24615d008afb650cfcc813e8ae3"}, + {file = "html_to_markdown-2.25.0-cp310-abi3-win_amd64.whl", hash = "sha256:542543a90edfe31d5f72535866d39f258766f9f1498f3c7fa30f878053e80baf"}, + {file = "html_to_markdown-2.25.0.tar.gz", hash = "sha256:5961313ae4f4e1b0863e453ca287992abc89d0f033c611cec9f304aa6b194b3b"}, ] -[package.extras] -backends = ["redis (>=3.0.0)"] -redis = ["redis (>=3.0.0)"] - [[package]] -name = "identify" -version = "2.6.5" -description = "File identification library for Python" -optional = true -python-versions = ">=3.9" +name = "huey" +version = "2.6.0" +description = "a little task queue" +optional = false +python-versions = "*" +groups = ["main"] files = [ - {file = "identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566"}, - {file = "identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc"}, + {file = "huey-2.6.0-py3-none-any.whl", hash = "sha256:1b9df9d370b49c6d5721ba8a01ac9a787cf86b3bdc584e4679de27b920395c3f"}, + {file = "huey-2.6.0.tar.gz", hash = "sha256:8d11f8688999d65266af1425b831f6e3773e99415027177b8734b0ffd5e251f6"}, ] [package.extras] -license = ["ukkonen"] +backends = ["redis (>=3.0.0)"] [[package]] name = "idna" -version = "3.10" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [package.extras] @@ -1066,6 +1264,7 @@ version = "7.5.0" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344"}, {file = "inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f"}, @@ -1076,7 +1275,7 @@ more_itertools = ">=8.5.0" typeguard = ">=4.0.1" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -1089,6 +1288,7 @@ version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, @@ -1100,78 +1300,83 @@ version = "0.13.13" description = "IPython-enabled pdb" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, ] [package.dependencies] -decorator = {version = "*", markers = "python_version > \"3.6\""} -ipython = {version = ">=7.31.1", markers = "python_version > \"3.6\""} -tomli = {version = "*", markers = "python_version > \"3.6\" and python_version < \"3.11\""} +decorator = {version = "*", markers = "python_version >= \"3.11\""} +ipython = {version = ">=7.31.1", markers = "python_version >= \"3.11\""} [[package]] name = "ipython" -version = "8.31.0" +version = "9.10.0" description = "IPython: Productive Interactive Computing" optional = true -python-versions = ">=3.10" +python-versions = ">=3.11" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "ipython-8.31.0-py3-none-any.whl", hash = "sha256:46ec58f8d3d076a61d128fe517a51eb730e3aaf0c184ea8c17d16e366660c6a6"}, - {file = "ipython-8.31.0.tar.gz", hash = "sha256:b6a2274606bec6166405ff05e54932ed6e5cfecaca1fc05f2cacde7bb074d70b"}, + {file = "ipython-9.10.0-py3-none-any.whl", hash = "sha256:c6ab68cc23bba8c7e18e9b932797014cc61ea7fd6f19de180ab9ba73e65ee58d"}, + {file = "ipython-9.10.0.tar.gz", hash = "sha256:cd9e656be97618a0676d058134cd44e6dc7012c0e5cb36a9ce96a8c904adaf77"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" +colorama = {version = ">=0.4.4", markers = "sys_platform == \"win32\""} +decorator = ">=4.3.2" +ipython-pygments-lexers = ">=1.0.0" +jedi = ">=0.18.1" +matplotlib-inline = ">=0.1.5" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} prompt_toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack_data = "*" +pygments = ">=2.11.0" +stack_data = ">=0.6.0" traitlets = ">=5.13.0" typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +all = ["argcomplete (>=3.0)", "ipython[doc,matplotlib,terminal,test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "isodate" -version = "0.7.2" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = ">=3.7" +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[matplotlib,test]", "setuptools (>=70.0)", "sphinx (>=8.0)", "sphinx-rtd-theme (>=0.1.8)", "sphinx_toml (==0.0.4)", "typing_extensions"] +matplotlib = ["matplotlib (>3.9)"] +test = ["packaging (>=20.1.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=1.0.0)", "setuptools (>=61.2)", "testpath (>=0.2)"] +test-extra = ["curio", "ipykernel (>6.30)", "ipython[matplotlib]", "ipython[test]", "jupyter_ai", "nbclient", "nbformat", "numpy (>=1.27)", "pandas (>2.1)", "trio (>=0.1.0)"] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +description = "Defines a variety of Pygments lexers for highlighting IPython code." +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, - {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, + {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, + {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, ] +[package.dependencies] +pygments = "*" + [[package]] name = "isort" -version = "5.13.2" +version = "7.0.0" description = "A Python utility / library to sort Python imports." optional = true -python-versions = ">=3.8.0" +python-versions = ">=3.10.0" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, + {file = "isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1"}, + {file = "isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187"}, ] [package.extras] -colors = ["colorama (>=0.4.6)"] +colors = ["colorama"] +plugins = ["setuptools"] [[package]] name = "jedi" @@ -1179,6 +1384,8 @@ version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -1194,192 +1401,345 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jira" -version = "3.8.0" +version = "3.10.5" description = "Python library for interacting with JIRA via REST APIs." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "jira-3.8.0-py3-none-any.whl", hash = "sha256:12190dc84dad00b8a6c0341f7e8a254b0f38785afdec022bd5941e1184a5a3fb"}, - {file = "jira-3.8.0.tar.gz", hash = "sha256:63719c529a570aaa01c3373dbb5a104dab70381c5be447f6c27f997302fa335a"}, + {file = "jira-3.10.5-py3-none-any.whl", hash = "sha256:d4da1385c924ee693d6cc9838e56a34e31d74f0d6899934ef35bbd0d2d33997f"}, + {file = "jira-3.10.5.tar.gz", hash = "sha256:2d09ae3bf4741a2787dd889dfea5926a5d509aac3b28ab3b98c098709e6ee72d"}, ] [package.dependencies] defusedxml = "*" packaging = "*" -Pillow = ">=2.1.0" requests = ">=2.10.0" requests-oauthlib = ">=1.1.0" -requests-toolbelt = "*" -typing-extensions = ">=3.7.4.2" +requests_toolbelt = "*" +typing_extensions = ">=3.7.4.2" [package.extras] async = ["requests-futures (>=0.9.7)"] cli = ["ipython (>=4.0.0)", "keyring"] docs = ["furo", "sphinx (>=5.0.0)", "sphinx-copybutton"] -opt = ["PyJWT", "filemagic (>=1.6)", "requests-jwt", "requests-kerberos"] -test = ["MarkupSafe (>=0.23)", "PyYAML (>=5.1)", "docutils (>=0.12)", "flaky", "oauthlib", "parameterized (>=0.8.1)", "pytest (>=6.0.0)", "pytest-cache", "pytest-cov", "pytest-instafail", "pytest-sugar", "pytest-timeout (>=1.3.1)", "pytest-xdist (>=2.2)", "requests-mock", "requires.io", "tenacity", "wheel (>=0.24.0)", "yanc (>=0.3.3)"] +opt = ["PyJWT", "filemagic (>=1.6)", "requests_jwt", "requests_kerberos"] +test = ["MarkupSafe (>=0.23)", "PyYAML (>=5.1)", "docutils (>=0.21.2)", "flaky", "oauthlib", "parameterized (>=0.8.1)", "parameterized (>=0.8.1)", "pip", "pytest (>=6.0.0)", "pytest-cache", "pytest-cov", "pytest-instafail", "pytest-sugar", "pytest-timeout (>=1.3.1)", "pytest-xdist (>=2.2)", "requests_mock", "requires.io", "tenacity", "wheel (>=0.24.0)", "yanc (>=0.3.3)"] [[package]] name = "jsonpickle" -version = "4.0.1" +version = "4.1.1" description = "jsonpickle encodes/decodes any Python object to/from JSON" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "jsonpickle-4.0.1-py3-none-any.whl", hash = "sha256:2973c0b0d988c6792ed6c446fa582c48352e79c2880fa2c013f1abde15905555"}, - {file = "jsonpickle-4.0.1.tar.gz", hash = "sha256:b5336144d902958b92cb08bc1e76bfa47199b8afd454303693894defd2fa50c5"}, + {file = "jsonpickle-4.1.1-py3-none-any.whl", hash = "sha256:bb141da6057898aa2438ff268362b126826c812a1721e31cf08a6e142910dc91"}, + {file = "jsonpickle-4.1.1.tar.gz", hash = "sha256:f86e18f13e2b96c1c1eede0b7b90095bbb61d99fedc14813c44dc2f361dbbae1"}, ] [package.extras] cov = ["pytest-cov"] dev = ["black", "pyupgrade"] docs = ["furo", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -packaging = ["build", "setuptools (>=61.2)", "setuptools-scm[toml] (>=6.0)", "twine"] -testing = ["PyYAML", "atheris (>=2.3.0,<2.4.0)", "bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=6.0,!=8.1.*)", "pytest-benchmark", "pytest-benchmark[histogram]", "pytest-checkdocs (>=1.2.3)", "pytest-enabler (>=1.0.1)", "pytest-ruff (>=0.2.1)", "scikit-learn", "scipy", "scipy (>=1.9.3)", "simplejson", "sqlalchemy", "ujson"] +packaging = ["build", "setuptools (>=61.2)", "setuptools_scm[toml] (>=6.0)", "twine"] +testing = ["PyYAML", "atheris (>=2.3.0,<2.4.0) ; python_version < \"3.12\"", "bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=6.0,!=8.1.*)", "pytest-benchmark", "pytest-benchmark[histogram]", "pytest-checkdocs (>=1.2.3)", "pytest-enabler (>=1.0.1)", "pytest-ruff (>=0.2.1)", "scikit-learn", "scipy (>=1.9.3) ; python_version > \"3.10\"", "scipy ; python_version <= \"3.10\"", "simplejson", "sqlalchemy", "ujson"] [[package]] name = "jsonschema" -version = "4.23.0" +version = "4.26.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, + {file = "jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce"}, + {file = "jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326"}, ] [package.dependencies] attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.03.6" +jsonschema-specifications = ">=2023.3.6" referencing = ">=0.28.4" -rpds-py = ">=0.7.1" +rpds-py = ">=0.25.0" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" -version = "2024.10.1" +version = "2025.9.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, + {file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"}, + {file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"}, ] [package.dependencies] referencing = ">=0.31.0" +[[package]] +name = "librt" +version = "0.8.0" +description = "Mypyc runtime library" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and extra == \"code-quality\"" +files = [ + {file = "librt-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db63cf3586a24241e89ca1ce0b56baaec9d371a328bd186c529b27c914c9a1ef"}, + {file = "librt-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba9d9e60651615bc614be5e21a82cdb7b1769a029369cf4b4d861e4f19686fb6"}, + {file = "librt-0.8.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb4b3ad543084ed79f186741470b251b9d269cd8b03556f15a8d1a99a64b7de5"}, + {file = "librt-0.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d2720335020219197380ccfa5c895f079ac364b4c429e96952cd6509934d8eb"}, + {file = "librt-0.8.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9726305d3e53419d27fc8cdfcd3f9571f0ceae22fa6b5ea1b3662c2e538f833e"}, + {file = "librt-0.8.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cc3d107f603b5ee7a79b6aa6f166551b99b32fb4a5303c4dfcb4222fc6a0335e"}, + {file = "librt-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41064a0c07b4cc7a81355ccc305cb097d6027002209ffca51306e65ee8293630"}, + {file = "librt-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c6e4c10761ddbc0d67d2f6e2753daf99908db85d8b901729bf2bf5eaa60e0567"}, + {file = "librt-0.8.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:ba581acad5ac8f33e2ff1746e8a57e001b47c6721873121bf8bbcf7ba8bd3aa4"}, + {file = "librt-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bdab762e2c0b48bab76f1a08acb3f4c77afd2123bedac59446aeaaeed3d086cf"}, + {file = "librt-0.8.0-cp310-cp310-win32.whl", hash = "sha256:6a3146c63220d814c4a2c7d6a1eacc8d5c14aed0ff85115c1dfea868080cd18f"}, + {file = "librt-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:bbebd2bba5c6ae02907df49150e55870fdd7440d727b6192c46b6f754723dde9"}, + {file = "librt-0.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ce33a9778e294507f3a0e3468eccb6a698b5166df7db85661543eca1cfc5369"}, + {file = "librt-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8070aa3368559de81061ef752770d03ca1f5fc9467d4d512d405bd0483bfffe6"}, + {file = "librt-0.8.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:20f73d4fecba969efc15cdefd030e382502d56bb6f1fc66b580cce582836c9fa"}, + {file = "librt-0.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a512c88900bdb1d448882f5623a0b1ad27ba81a9bd75dacfe17080b72272ca1f"}, + {file = "librt-0.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:015e2dde6e096d27c10238bf9f6492ba6c65822dfb69d2bf74c41a8e88b7ddef"}, + {file = "librt-0.8.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1c25a131013eadd3c600686a0c0333eb2896483cbc7f65baa6a7ee761017aef9"}, + {file = "librt-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:21b14464bee0b604d80a638cf1ee3148d84ca4cc163dcdcecb46060c1b3605e4"}, + {file = "librt-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:05a3dd3f116747f7e1a2b475ccdc6fb637fd4987126d109e03013a79d40bf9e6"}, + {file = "librt-0.8.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fa37f99bff354ff191c6bcdffbc9d7cdd4fc37faccfc9be0ef3a4fd5613977da"}, + {file = "librt-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1566dbb9d1eb0987264c9b9460d212e809ba908d2f4a3999383a84d765f2f3f1"}, + {file = "librt-0.8.0-cp311-cp311-win32.whl", hash = "sha256:70defb797c4d5402166787a6b3c66dfb3fa7f93d118c0509ffafa35a392f4258"}, + {file = "librt-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:db953b675079884ffda33d1dca7189fb961b6d372153750beb81880384300817"}, + {file = "librt-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:75d1a8cab20b2043f03f7aab730551e9e440adc034d776f15f6f8d582b0a5ad4"}, + {file = "librt-0.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:17269dd2745dbe8e42475acb28e419ad92dfa38214224b1b01020b8cac70b645"}, + {file = "librt-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f4617cef654fca552f00ce5ffdf4f4b68770f18950e4246ce94629b789b92467"}, + {file = "librt-0.8.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5cb11061a736a9db45e3c1293cfcb1e3caf205912dfa085734ba750f2197ff9a"}, + {file = "librt-0.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4bb00bd71b448f16749909b08a0ff16f58b079e2261c2e1000f2bbb2a4f0a45"}, + {file = "librt-0.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95a719a049f0eefaf1952673223cf00d442952273cbd20cf2ed7ec423a0ef58d"}, + {file = "librt-0.8.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bd32add59b58fba3439d48d6f36ac695830388e3da3e92e4fc26d2d02670d19c"}, + {file = "librt-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4f764b2424cb04524ff7a486b9c391e93f93dc1bd8305b2136d25e582e99aa2f"}, + {file = "librt-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f04ca50e847abc486fa8f4107250566441e693779a5374ba211e96e238f298b9"}, + {file = "librt-0.8.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9ab3a3475a55b89b87ffd7e6665838e8458e0b596c22e0177e0f961434ec474a"}, + {file = "librt-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e36a8da17134ffc29373775d88c04832f9ecfab1880470661813e6c7991ef79"}, + {file = "librt-0.8.0-cp312-cp312-win32.whl", hash = "sha256:4eb5e06ebcc668677ed6389164f52f13f71737fc8be471101fa8b4ce77baeb0c"}, + {file = "librt-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a33335eb59921e77c9acc05d0e654e4e32e45b014a4d61517897c11591094f8"}, + {file = "librt-0.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:24a01c13a2a9bdad20997a4443ebe6e329df063d1978bbe2ebbf637878a46d1e"}, + {file = "librt-0.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7f820210e21e3a8bf8fde2ae3c3d10106d4de9ead28cbfdf6d0f0f41f5b12fa1"}, + {file = "librt-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4831c44b8919e75ca0dfb52052897c1ef59fdae19d3589893fbd068f1e41afbf"}, + {file = "librt-0.8.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:88c6e75540f1f10f5e0fc5e87b4b6c290f0e90d1db8c6734f670840494764af8"}, + {file = "librt-0.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9646178cd794704d722306c2c920c221abbf080fede3ba539d5afdec16c46dad"}, + {file = "librt-0.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e1af31a710e17891d9adf0dbd9a5fcd94901a3922a96499abdbf7ce658f4e01"}, + {file = "librt-0.8.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:507e94f4bec00b2f590fbe55f48cd518a208e2474a3b90a60aa8f29136ddbada"}, + {file = "librt-0.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f1178e0de0c271231a660fbef9be6acdfa1d596803464706862bef6644cc1cae"}, + {file = "librt-0.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:71fc517efc14f75c2f74b1f0a5d5eb4a8e06aa135c34d18eaf3522f4a53cd62d"}, + {file = "librt-0.8.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0583aef7e9a720dd40f26a2ad5a1bf2ccbb90059dac2b32ac516df232c701db3"}, + {file = "librt-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d0f76fc73480d42285c609c0ea74d79856c160fa828ff9aceab574ea4ecfd7b"}, + {file = "librt-0.8.0-cp313-cp313-win32.whl", hash = "sha256:e79dbc8f57de360f0ed987dc7de7be814b4803ef0e8fc6d3ff86e16798c99935"}, + {file = "librt-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:25b3e667cbfc9000c4740b282df599ebd91dbdcc1aa6785050e4c1d6be5329ab"}, + {file = "librt-0.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:e9a3a38eb4134ad33122a6d575e6324831f930a771d951a15ce232e0237412c2"}, + {file = "librt-0.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:421765e8c6b18e64d21c8ead315708a56fc24f44075059702e421d164575fdda"}, + {file = "librt-0.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:48f84830a8f8ad7918afd743fd7c4eb558728bceab7b0e38fd5a5cf78206a556"}, + {file = "librt-0.8.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9f09d4884f882baa39a7e36bbf3eae124c4ca2a223efb91e567381d1c55c6b06"}, + {file = "librt-0.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:693697133c3b32aa9b27f040e3691be210e9ac4d905061859a9ed519b1d5a376"}, + {file = "librt-0.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5512aae4648152abaf4d48b59890503fcbe86e85abc12fb9b096fe948bdd816"}, + {file = "librt-0.8.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:995d24caa6bbb34bcdd4a41df98ac6d1af637cfa8975cb0790e47d6623e70e3e"}, + {file = "librt-0.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b9aef96d7593584e31ef6ac1eb9775355b0099fee7651fae3a15bc8657b67b52"}, + {file = "librt-0.8.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:4f6e975377fbc4c9567cb33ea9ab826031b6c7ec0515bfae66a4fb110d40d6da"}, + {file = "librt-0.8.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:daae5e955764be8fd70a93e9e5133c75297f8bce1e802e1d3683b98f77e1c5ab"}, + {file = "librt-0.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7bd68cebf3131bb920d5984f75fe302d758db33264e44b45ad139385662d7bc3"}, + {file = "librt-0.8.0-cp314-cp314-win32.whl", hash = "sha256:1e6811cac1dcb27ca4c74e0ca4a5917a8e06db0d8408d30daee3a41724bfde7a"}, + {file = "librt-0.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:178707cda89d910c3b28bf5aa5f69d3d4734e0f6ae102f753ad79edef83a83c7"}, + {file = "librt-0.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3e8b77b5f54d0937b26512774916041756c9eb3e66f1031971e626eea49d0bf4"}, + {file = "librt-0.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:789911e8fa40a2e82f41120c936b1965f3213c67f5a483fc5a41f5839a05dcbb"}, + {file = "librt-0.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2b37437e7e4ef5e15a297b36ba9e577f73e29564131d86dd75875705e97402b5"}, + {file = "librt-0.8.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:671a6152edf3b924d98a5ed5e6982ec9cb30894085482acadce0975f031d4c5c"}, + {file = "librt-0.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8992ca186a1678107b0af3d0c9303d8c7305981b9914989b9788319ed4d89546"}, + {file = "librt-0.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:001e5330093d887b8b9165823eca6c5c4db183fe4edea4fdc0680bbac5f46944"}, + {file = "librt-0.8.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d920789eca7ef71df7f31fd547ec0d3002e04d77f30ba6881e08a630e7b2c30e"}, + {file = "librt-0.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:82fb4602d1b3e303a58bfe6165992b5a78d823ec646445356c332cd5f5bbaa61"}, + {file = "librt-0.8.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:4d3e38797eb482485b486898f89415a6ab163bc291476bd95712e42cf4383c05"}, + {file = "librt-0.8.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a905091a13e0884701226860836d0386b88c72ce5c2fdfba6618e14c72be9f25"}, + {file = "librt-0.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:375eda7acfce1f15f5ed56cfc960669eefa1ec8732e3e9087c3c4c3f2066759c"}, + {file = "librt-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:2ccdd20d9a72c562ffb73098ac411de351b53a6fbb3390903b2d33078ef90447"}, + {file = "librt-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:25e82d920d4d62ad741592fcf8d0f3bda0e3fc388a184cb7d2f566c681c5f7b9"}, + {file = "librt-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:92249938ab744a5890580d3cb2b22042f0dce71cdaa7c1369823df62bedf7cbc"}, + {file = "librt-0.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b705f85311ee76acec5ee70806990a51f0deb519ea0c29c1d1652d79127604d"}, + {file = "librt-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7ce0a8cb67e702dcb06342b2aaaa3da9fb0ddc670417879adfa088b44cf7b3b6"}, + {file = "librt-0.8.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:aaadec87f45a3612b6818d1db5fbfe93630669b7ee5d6bdb6427ae08a1aa2141"}, + {file = "librt-0.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56901f1eec031396f230db71c59a01d450715cbbef9856bf636726994331195d"}, + {file = "librt-0.8.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b055bb3abaf69abed25743d8fc1ab691e4f51a912ee0a6f9a6c84f4bbddb283d"}, + {file = "librt-0.8.0-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1ef3bd856373cf8e7382402731f43bfe978a8613b4039e49e166e1e0dc590216"}, + {file = "librt-0.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e0ffe88ebb5962f8fb0ddcbaaff30f1ea06a79501069310e1e030eafb1ad787"}, + {file = "librt-0.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82e61cd1c563745ad495387c3b65806bfd453badb4adbc019df3389dddee1bf6"}, + {file = "librt-0.8.0-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:667e2513cf69bfd1e1ed9a00d6c736d5108714ec071192afb737987955888a25"}, + {file = "librt-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b6caff69e25d80c269b1952be8493b4d94ef745f438fa619d7931066bdd26de"}, + {file = "librt-0.8.0-cp39-cp39-win32.whl", hash = "sha256:02a9fe85410cc9bef045e7cb7fd26fdde6669e6d173f99df659aa7f6335961e9"}, + {file = "librt-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:de076eaba208d16efb5962f99539867f8e2c73480988cb513fcf1b5dbb0c9dcf"}, + {file = "librt-0.8.0.tar.gz", hash = "sha256:cb74cdcbc0103fc988e04e5c58b0b31e8e5dd2babb9182b6f9490488eb36324b"}, +] + [[package]] name = "license-expression" -version = "30.4.1" +version = "30.4.4" description = "license-expression is a comprehensive utility library to parse, compare, simplify and normalize license expressions (such as SPDX license expressions) using boolean logic." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "license_expression-30.4.1-py3-none-any.whl", hash = "sha256:679646bc3261a17690494a3e1cada446e5ee342dbd87dcfa4a0c24cc5dce13ee"}, - {file = "license_expression-30.4.1.tar.gz", hash = "sha256:9f02105f9e0fcecba6a85dfbbed7d94ea1c3a70cf23ddbfb5adf3438a6f6fce0"}, + {file = "license_expression-30.4.4-py3-none-any.whl", hash = "sha256:421788fdcadb41f049d2dc934ce666626265aeccefddd25e162a26f23bcbf8a4"}, + {file = "license_expression-30.4.4.tar.gz", hash = "sha256:73448f0aacd8d0808895bdc4b2c8e01a8d67646e4188f887375398c761f340fd"}, ] [package.dependencies] "boolean.py" = ">=4.0" [package.extras] -docs = ["Sphinx (>=5.0.2)", "doc8 (>=0.11.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-dark-mode (>=1.3.0)", "sphinx-rtd-theme (>=1.0.0)", "sphinxcontrib-apidoc (>=0.4.0)"] -testing = ["black", "isort", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)", "twine"] +dev = ["Sphinx (>=5.0.2)", "doc8 (>=0.11.2)", "pytest (>=7.0.1)", "pytest-xdist (>=2)", "ruff", "sphinx-autobuild", "sphinx-copybutton", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-dark-mode (>=1.3.0)", "sphinx-rtd-theme (>=1.0.0)", "sphinxcontrib-apidoc (>=0.4.0)", "twine"] + +[[package]] +name = "licenselynx" +version = "2.0.2" +description = "Deterministically map license strings to its canonical identifier" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "licenselynx-2.0.2-py3-none-any.whl", hash = "sha256:24b9b99de59379351271c32df54b54f6cb27e3529368c4f23c10ec265159befe"}, + {file = "licenselynx-2.0.2.tar.gz", hash = "sha256:070c8d6d672b1dbf4da5629fb224a92596a3be7e63bc992a9a6f744e8e47a905"}, +] [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" description = "Safely add untrusted strings to HTML/XML markup." optional = true python-versions = ">=3.9" -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +groups = ["main"] +markers = "extra == \"dev\"" +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, ] [[package]] name = "matplotlib-inline" -version = "0.1.7" +version = "0.2.1" description = "Inline Matplotlib backend for Jupyter" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, + {file = "matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76"}, + {file = "matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe"}, ] [package.dependencies] traitlets = "*" +[package.extras] +test = ["flake8", "nbdime", "nbval", "notebook", "pytest"] + [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -1387,65 +1747,69 @@ files = [ [[package]] name = "more-itertools" -version = "10.5.0" +version = "10.8.0" description = "More routines for operating on iterables, beyond itertools" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, - {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, + {file = "more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b"}, + {file = "more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd"}, ] [[package]] name = "mypy" -version = "1.14.1" +version = "1.19.1" description = "Optional static typing for Python" optional = true -python-versions = ">=3.8" -files = [ - {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, - {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, - {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, - {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, - {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, - {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, - {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, - {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, - {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, - {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, - {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, - {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, - {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, - {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, - {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, - {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, - {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, - {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, - {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, - {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, - {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, - {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, - {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, - {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, - {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, - {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, - {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, - {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, - {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, - {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, - {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, - {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, - {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, - {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, - {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, - {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, - {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, - {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" +files = [ + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e"}, + {file = "mypy-1.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376"}, + {file = "mypy-1.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, ] [package.dependencies] +librt = {version = ">=0.6.2", markers = "platform_python_implementation != \"PyPy\""} mypy_extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +pathspec = ">=0.9.0" typing_extensions = ">=4.6.0" [package.extras] @@ -1457,35 +1821,27 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = true -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "oauthlib" -version = "3.2.2" +version = "3.3.1" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, ] [package.extras] @@ -1499,6 +1855,7 @@ version = "3.1.5" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, @@ -1509,13 +1866,14 @@ et-xmlfile = "*" [[package]] name = "packageurl-python" -version = "0.16.0" +version = "0.17.6" description = "A purl aka. Package URL parser and builder" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "packageurl_python-0.16.0-py3-none-any.whl", hash = "sha256:5c3872638b177b0f1cf01c3673017b7b27ebee485693ae12a8bed70fa7fa7c35"}, - {file = "packageurl_python-0.16.0.tar.gz", hash = "sha256:69e3bf8a3932fe9c2400f56aaeb9f86911ecee2f9398dbe1b58ec34340be365d"}, + {file = "packageurl_python-0.17.6-py3-none-any.whl", hash = "sha256:31a85c2717bc41dd818f3c62908685ff9eebcb68588213745b14a6ee9e7df7c9"}, + {file = "packageurl_python-0.17.6.tar.gz", hash = "sha256:1252ce3a102372ca6f86eb968e16f9014c4ba511c5c37d95a7f023e2ca6e5c25"}, ] [package.extras] @@ -1526,47 +1884,60 @@ test = ["pytest"] [[package]] name = "packaging" -version = "24.2" +version = "26.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, ] [[package]] name = "parso" -version = "0.8.4" +version = "0.8.6" description = "A Python Parser" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, + {file = "parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff"}, + {file = "parso-0.8.6.tar.gz", hash = "sha256:2b9a0332696df97d454fa67b81618fd69c35a7b90327cbe6ba5c92d2c68a7bfd"}, ] [package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +qa = ["flake8 (==5.0.4)", "types-setuptools (==67.2.0.1)", "zuban (==0.5.1)"] testing = ["docopt", "pytest"] [[package]] name = "pathspec" -version = "0.12.1" +version = "1.0.4" description = "Utility library for gitignore style pattern matching of file paths." optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, + {file = "pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723"}, + {file = "pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645"}, ] +[package.extras] +hyperscan = ["hyperscan (>=0.7)"] +optional = ["typing-extensions (>=4)"] +re2 = ["google-re2 (>=1.1)"] +tests = ["pytest (>=9)", "typing-extensions (>=4.15)"] + [[package]] name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"dev\" and sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -1575,148 +1946,42 @@ files = [ [package.dependencies] ptyprocess = ">=0.5" -[[package]] -name = "pillow" -version = "11.1.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, - {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, - {file = "pillow-11.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2"}, - {file = "pillow-11.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26"}, - {file = "pillow-11.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07"}, - {file = "pillow-11.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482"}, - {file = "pillow-11.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e"}, - {file = "pillow-11.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269"}, - {file = "pillow-11.1.0-cp310-cp310-win32.whl", hash = "sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49"}, - {file = "pillow-11.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a"}, - {file = "pillow-11.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65"}, - {file = "pillow-11.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457"}, - {file = "pillow-11.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35"}, - {file = "pillow-11.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2"}, - {file = "pillow-11.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070"}, - {file = "pillow-11.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6"}, - {file = "pillow-11.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1"}, - {file = "pillow-11.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2"}, - {file = "pillow-11.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96"}, - {file = "pillow-11.1.0-cp311-cp311-win32.whl", hash = "sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f"}, - {file = "pillow-11.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761"}, - {file = "pillow-11.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71"}, - {file = "pillow-11.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a"}, - {file = "pillow-11.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b"}, - {file = "pillow-11.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3"}, - {file = "pillow-11.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a"}, - {file = "pillow-11.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1"}, - {file = "pillow-11.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f"}, - {file = "pillow-11.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91"}, - {file = "pillow-11.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c"}, - {file = "pillow-11.1.0-cp312-cp312-win32.whl", hash = "sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6"}, - {file = "pillow-11.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf"}, - {file = "pillow-11.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5"}, - {file = "pillow-11.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc"}, - {file = "pillow-11.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0"}, - {file = "pillow-11.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1"}, - {file = "pillow-11.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec"}, - {file = "pillow-11.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5"}, - {file = "pillow-11.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114"}, - {file = "pillow-11.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352"}, - {file = "pillow-11.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3"}, - {file = "pillow-11.1.0-cp313-cp313-win32.whl", hash = "sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9"}, - {file = "pillow-11.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c"}, - {file = "pillow-11.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65"}, - {file = "pillow-11.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861"}, - {file = "pillow-11.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081"}, - {file = "pillow-11.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c"}, - {file = "pillow-11.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547"}, - {file = "pillow-11.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab"}, - {file = "pillow-11.1.0-cp313-cp313t-win32.whl", hash = "sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9"}, - {file = "pillow-11.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe"}, - {file = "pillow-11.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756"}, - {file = "pillow-11.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6"}, - {file = "pillow-11.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e"}, - {file = "pillow-11.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc"}, - {file = "pillow-11.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2"}, - {file = "pillow-11.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade"}, - {file = "pillow-11.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884"}, - {file = "pillow-11.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196"}, - {file = "pillow-11.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8"}, - {file = "pillow-11.1.0-cp39-cp39-win32.whl", hash = "sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5"}, - {file = "pillow-11.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f"}, - {file = "pillow-11.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73"}, - {file = "pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0"}, - {file = "pillow-11.1.0.tar.gz", hash = "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - [[package]] name = "platformdirs" -version = "4.3.6" +version = "4.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = true -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, + {file = "platformdirs-4.8.0-py3-none-any.whl", hash = "sha256:1c1328b4d2ea997bbcb904175a9bde14e824a3fa79f751ea3888d63d7d727557"}, + {file = "platformdirs-4.8.0.tar.gz", hash = "sha256:c1d4a51ab04087041dd602707fbe7ee8b62b64e590f30e336e5c99c2d0c542d2"}, ] -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - [[package]] name = "ply" version = "3.11" description = "Python Lex & Yacc" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, ] -[[package]] -name = "pre-commit" -version = "4.0.1" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = true -python-versions = ">=3.9" -files = [ - {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, - {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - [[package]] name = "prompt-toolkit" -version = "3.0.48" +version = "3.0.52" description = "Library for building powerful interactive command lines in Python" optional = true -python-versions = ">=3.7.0" +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, - {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, + {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, + {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, ] [package.dependencies] @@ -1724,99 +1989,93 @@ wcwidth = "*" [[package]] name = "psycopg" -version = "3.2.3" +version = "3.3.2" description = "PostgreSQL database adapter for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, + {file = "psycopg-3.3.2-py3-none-any.whl", hash = "sha256:3e94bc5f4690247d734599af56e51bae8e0db8e4311ea413f801fef82b14a99b"}, + {file = "psycopg-3.3.2.tar.gz", hash = "sha256:707a67975ee214d200511177a6a80e56e654754c9afca06a7194ea6bbfde9ca7"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +psycopg-binary = {version = "3.3.2", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.3.2) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.3.2) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "cython-lint (>=0.16)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.19.0)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=4.0)", "mypy (>=1.19.0) ; implementation_name != \"pypy\"", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.2.3" +version = "3.3.2" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, +python-versions = ">=3.10" +groups = ["main"] +markers = "implementation_name != \"pypy\"" +files = [ + {file = "psycopg_binary-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0768c5f32934bb52a5df098317eca9bdcf411de627c5dca2ee57662b64b54b41"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:09b3014013f05cd89828640d3a1db5f829cc24ad8fa81b6e42b2c04685a0c9d4"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:3789d452a9d17a841c7f4f97bbcba51a21f957ea35641a4c98507520e6b6a068"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44e89938d36acc4495735af70a886d206a5bfdc80258f95b69b52f68b2968d9e"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90ed9da805e52985b0202aed4f352842c907c6b4fc6c7c109c6e646c32e2f43b"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c3a9ccdfee4ae59cf9bf1822777e763bc097ed208f4901e21537fca1070e1391"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de9173f8cc0efd88ac2a89b3b6c287a9a0011cdc2f53b2a12c28d6fd55f9f81c"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0611f4822674f3269e507a307236efb62ae5a828fcfc923ac85fe22ca19fd7c8"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:522b79c7db547767ca923e441c19b97a2157f2f494272a119c854bba4804e186"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ea41c0229f3f5a3844ad0857a83a9f869aa7b840448fa0c200e6bcf85d33d19"}, + {file = "psycopg_binary-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:8ea05b499278790a8fa0ff9854ab0de2542aca02d661ddff94e830df971ff640"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:94503b79f7da0b65c80d0dbb2f81dd78b300319ec2435d5e6dcf9622160bc2fa"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07a5f030e0902ec3e27d0506ceb01238c0aecbc73ecd7fa0ee55f86134600b5b"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e09d0d93d35c134704a2cb2b15f81ffc8174fd602f3e08f7b1a3d8896156cf0"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:649c1d33bedda431e0c1df646985fbbeb9274afa964e1aef4be053c0f23a2924"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5774272f754605059521ff037a86e680342e3847498b0aa86b0f3560c70963c"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d391b70c9cc23f6e1142729772a011f364199d2c5ddc0d596f5f43316fbf982d"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f3f601f32244a677c7b029ec39412db2772ad04a28bc2cbb4b1f0931ed0ffad7"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0ae60e910531cfcc364a8f615a7941cac89efeb3f0fffe0c4824a6d11461eef7"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c43a773dd1a481dbb2fe64576aa303d80f328cce0eae5e3e4894947c41d1da7"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5a327327f1188b3fbecac41bf1973a60b86b2eb237db10dc945bd3dc97ec39e4"}, + {file = "psycopg_binary-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:136c43f185244893a527540307167f5d3ef4e08786508afe45d6f146228f5aa9"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a9387ab615f929e71ef0f4a8a51e986fa06236ccfa9f3ec98a88f60fbf230634"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3ff7489df5e06c12d1829544eaec64970fe27fe300f7cf04c8495fe682064688"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:9742580ecc8e1ac45164e98d32ca6df90da509c2d3ff26be245d94c430f92db4"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d45acedcaa58619355f18e0f42af542fcad3fd84ace4b8355d3a5dea23318578"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d88f32ff8c47cb7f4e7e7a9d1747dcee6f3baa19ed9afa9e5694fd2fb32b61ed"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:59d0163c4617a2c577cb34afbed93d7a45b8c8364e54b2bd2020ff25d5f5f860"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e750afe74e6c17b2c7046d2c3e3173b5a3f6080084671c8aa327215323df155b"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f26f113013c4dcfbfe9ced57b5bad2035dda1a7349f64bf726021968f9bccad3"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8309ee4569dced5e81df5aa2dcd48c7340c8dee603a66430f042dfbd2878edca"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6464150e25b68ae3cb04c4e57496ea11ebfaae4d98126aea2f4702dd43e3c12"}, + {file = "psycopg_binary-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:716a586f99bbe4f710dc58b40069fcb33c7627e95cc6fc936f73c9235e07f9cf"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc5a189e89cbfff174588665bb18d28d2d0428366cc9dae5864afcaa2e57380b"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:083c2e182be433f290dc2c516fd72b9b47054fcd305cce791e0a50d9e93e06f2"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:ac230e3643d1c436a2dfb59ca84357dfc6862c9f372fc5dbd96bafecae581f9f"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d8c899a540f6c7585cee53cddc929dd4d2db90fd828e37f5d4017b63acbc1a5d"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50ff10ab8c0abdb5a5451b9315538865b50ba64c907742a1385fdf5f5772b73e"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:23d2594af848c1fd3d874a9364bef50730124e72df7bb145a20cb45e728c50ed"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ea4fe6b4ead3bbbe27244ea224fcd1f53cb119afc38b71a2f3ce570149a03e30"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:742ce48cde825b8e52fb1a658253d6d1ff66d152081cbc76aa45e2986534858d"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e22bf6b54df994aff37ab52695d635f1ef73155e781eee1f5fa75bc08b58c8da"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8db9034cde3bcdafc66980f0130813f5c5d19e74b3f2a19fb3cfbc25ad113121"}, + {file = "psycopg_binary-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:df65174c7cf6b05ea273ce955927d3270b3a6e27b0b12762b009ce6082b8d3fc"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9ca24062cd9b2270e4d77576042e9cc2b1d543f09da5aba1f1a3d016cea28390"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c749770da0947bc972e512f35366dd4950c0e34afad89e60b9787a37e97cb443"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:03b7cd73fb8c45d272a34ae7249713e32492891492681e3cf11dff9531cf37e9"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:43b130e3b6edcb5ee856c7167ccb8561b473308c870ed83978ae478613764f1c"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c1feba5a8c617922321aef945865334e468337b8fc5c73074f5e63143013b5a"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cabb2a554d9a0a6bf84037d86ca91782f087dfff2a61298d0b00c19c0bc43f6d"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74bc306c4b4df35b09bc8cecf806b271e1c5d708f7900145e4e54a2e5dedfed0"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d79b0093f0fbf7a962d6a46ae292dc056c65d16a8ee9361f3cfbafd4c197ab14"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:1586e220be05547c77afc326741dd41cc7fba38a81f9931f616ae98865439678"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:458696a5fa5dad5b6fb5d5862c22454434ce4fe1cf66ca6c0de5f904cbc1ae3e"}, + {file = "psycopg_binary-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:04bb2de4ba69d6f8395b446ede795e8884c040ec71d01dd07ac2b2d18d4153d1"}, ] [[package]] @@ -1825,6 +2084,8 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"dev\" and sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -1836,6 +2097,8 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -1846,121 +2109,242 @@ tests = ["pytest"] [[package]] name = "py-ocsf-models" -version = "0.2.0" +version = "0.8.1" description = "This is a Python implementation of the OCSF models. The models are used to represent the data of the OCSF Schema defined in https://schema.ocsf.io/." optional = false -python-versions = "<3.13,>=3.9" +python-versions = "<3.15,>3.9.1" +groups = ["main"] files = [ - {file = "py_ocsf_models-0.2.0-py3-none-any.whl", hash = "sha256:ac75fd21077694b343ebaad3479194db113c274879b114277560ff287d5cd7b5"}, - {file = "py_ocsf_models-0.2.0.tar.gz", hash = "sha256:3e12648d05329e6776a0e6b1ffea87a3eb60aa7d8cb2c4afd69e5724f443ce03"}, + {file = "py_ocsf_models-0.8.1-py3-none-any.whl", hash = "sha256:061eb446c4171534c09a8b37f5a9d2a2fe9f87c5db32edbd1182446bc5fd097e"}, + {file = "py_ocsf_models-0.8.1.tar.gz", hash = "sha256:c9045237857f951e073c9f9d1f57954c90d86875b469260725292d47f7a7d73c"}, ] [package.dependencies] -cryptography = "43.0.1" +cryptography = ">=44.0.3,<47" email-validator = "2.2.0" -pydantic = "1.10.18" +pydantic = ">=2.12.0,<3.0.0" + +[[package]] +name = "py-serializable" +version = "2.1.0" +description = "Library for serializing and deserializing Python Objects to and from JSON and XML." +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "py_serializable-2.1.0-py3-none-any.whl", hash = "sha256:b56d5d686b5a03ba4f4db5e769dc32336e142fc3bd4d68a8c25579ebb0a67304"}, + {file = "py_serializable-2.1.0.tar.gz", hash = "sha256:9d5db56154a867a9b897c0163b33a793c804c80cee984116d02d49e4578fc103"}, +] + +[package.dependencies] +defusedxml = ">=0.7.1,<0.8.0" [[package]] name = "pycodestyle" -version = "2.12.1" +version = "2.14.0" description = "Python style guide checker" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, - {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, + {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, + {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, ] [[package]] name = "pycparser" -version = "2.22" +version = "3.0" description = "C parser in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] +markers = "implementation_name != \"PyPy\"" files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, + {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, + {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, ] [[package]] name = "pydantic" -version = "1.10.18" -description = "Data validation and settings management using python type hints" +version = "2.12.5" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, - {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, - {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, - {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, - {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, - {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, - {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, - {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, - {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, - {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, - {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" [[package]] name = "pyflakes" -version = "3.2.0" +version = "3.4.0" description = "passive checker of Python programs" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, + {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, + {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, ] [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [package.extras] @@ -1972,6 +2356,7 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -1985,27 +2370,27 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "3.3.3" +version = "4.0.4" description = "python code static checker" optional = true -python-versions = ">=3.9.0" +python-versions = ">=3.10.0" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "pylint-3.3.3-py3-none-any.whl", hash = "sha256:26e271a2bc8bce0fc23833805a9076dd9b4d5194e2a02164942cb3cdc37b4183"}, - {file = "pylint-3.3.3.tar.gz", hash = "sha256:07c607523b17e6d16e2ae0d7ef59602e332caa762af64203c24b41c27139f36a"}, + {file = "pylint-4.0.4-py3-none-any.whl", hash = "sha256:63e06a37d5922555ee2c20963eb42559918c20bd2b21244e4ef426e7c43b92e0"}, + {file = "pylint-4.0.4.tar.gz", hash = "sha256:d9b71674e19b1c36d79265b5887bf8e55278cbe236c9e95d22dc82cf044fdbd2"}, ] [package.dependencies] -astroid = ">=3.3.8,<=3.4.0-dev0" +astroid = ">=4.0.2,<=4.1.dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=0.3.6", markers = "python_version == \"3.11\""}, ] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +isort = ">=5,<5.13 || >5.13,<8" mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +platformdirs = ">=2.2" tomlkit = ">=0.10.1" [package.extras] @@ -2014,17 +2399,18 @@ testutils = ["gitpython (>3)"] [[package]] name = "pylint-django" -version = "2.6.1" +version = "2.7.0" description = "A Pylint plugin to help Pylint understand the Django web framework" optional = true python-versions = "<4.0,>=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "pylint-django-2.6.1.tar.gz", hash = "sha256:19e8c85a8573a04e3de7be2ba91e9a7c818ebf05e1b617be2bbae67a906b725f"}, - {file = "pylint_django-2.6.1-py3-none-any.whl", hash = "sha256:359f68fe8c810ee6bc8e1ab4c83c19b15a43b234a24b08978f47a23462b5ce28"}, + {file = "pylint_django-2.7.0-py3-none-any.whl", hash = "sha256:76ef7e7bbbcf7ee86adbb2beac0ffaa7232509a17bf4a488d81467a1bbaa215b"}, ] [package.dependencies] -pylint = ">=3.0,<4" +pylint = ">=3.0,<5" pylint-plugin-utils = ">=0.8" [package.extras] @@ -2032,13 +2418,15 @@ with-django = ["Django (>=2.2)"] [[package]] name = "pylint-plugin-utils" -version = "0.8.2" +version = "0.9.0" description = "Utilities and helpers for writing Pylint plugins" optional = true -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "pylint_plugin_utils-0.8.2-py3-none-any.whl", hash = "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507"}, - {file = "pylint_plugin_utils-0.8.2.tar.gz", hash = "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4"}, + {file = "pylint_plugin_utils-0.9.0-py3-none-any.whl", hash = "sha256:16e9b84e5326ba893a319a0323fcc8b4bcc9c71fc654fcabba0605596c673818"}, + {file = "pylint_plugin_utils-0.9.0.tar.gz", hash = "sha256:5468d763878a18d5cc4db46eaffdda14313b043c962a263a7d78151b90132055"}, ] [package.dependencies] @@ -2046,13 +2434,14 @@ pylint = ">=1.7" [[package]] name = "pymysql" -version = "1.1.1" +version = "1.1.2" description = "Pure Python MySQL Driver" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"}, - {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"}, + {file = "pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9"}, + {file = "pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03"}, ] [package.extras] @@ -2061,131 +2450,263 @@ rsa = ["cryptography"] [[package]] name = "pyparsing" -version = "3.2.1" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" +version = "3.3.2" +description = "pyparsing - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, - {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, + {file = "pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d"}, + {file = "pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc"}, ] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pytokens" +version = "0.4.1" +description = "A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons." +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"code-quality\"" +files = [ + {file = "pytokens-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a44ed93ea23415c54f3face3b65ef2b844d96aeb3455b8a69b3df6beab6acc5"}, + {file = "pytokens-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:add8bf86b71a5d9fb5b89f023a80b791e04fba57960aa790cc6125f7f1d39dfe"}, + {file = "pytokens-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:670d286910b531c7b7e3c0b453fd8156f250adb140146d234a82219459b9640c"}, + {file = "pytokens-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e691d7f5186bd2842c14813f79f8884bb03f5995f0575272009982c5ac6c0f7"}, + {file = "pytokens-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:27b83ad28825978742beef057bfe406ad6ed524b2d28c252c5de7b4a6dd48fa2"}, + {file = "pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440"}, + {file = "pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc"}, + {file = "pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d"}, + {file = "pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16"}, + {file = "pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6"}, + {file = "pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083"}, + {file = "pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1"}, + {file = "pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1"}, + {file = "pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9"}, + {file = "pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68"}, + {file = "pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b"}, + {file = "pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f"}, + {file = "pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1"}, + {file = "pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4"}, + {file = "pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78"}, + {file = "pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321"}, + {file = "pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa"}, + {file = "pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d"}, + {file = "pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324"}, + {file = "pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9"}, + {file = "pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb"}, + {file = "pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3"}, + {file = "pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975"}, + {file = "pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a"}, + {file = "pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918"}, + {file = "pytokens-0.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:da5baeaf7116dced9c6bb76dc31ba04a2dc3695f3d9f74741d7910122b456edc"}, + {file = "pytokens-0.4.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11edda0942da80ff58c4408407616a310adecae1ddd22eef8c692fe266fa5009"}, + {file = "pytokens-0.4.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0fc71786e629cef478cbf29d7ea1923299181d0699dbe7c3c0f4a583811d9fc1"}, + {file = "pytokens-0.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dcafc12c30dbaf1e2af0490978352e0c4041a7cde31f4f81435c2a5e8b9cabb6"}, + {file = "pytokens-0.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:42f144f3aafa5d92bad964d471a581651e28b24434d184871bd02e3a0d956037"}, + {file = "pytokens-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:34bcc734bd2f2d5fe3b34e7b3c0116bfb2397f2d9666139988e7a3eb5f7400e3"}, + {file = "pytokens-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:941d4343bf27b605e9213b26bfa1c4bf197c9c599a9627eb7305b0defcfe40c1"}, + {file = "pytokens-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3ad72b851e781478366288743198101e5eb34a414f1d5627cdd585ca3b25f1db"}, + {file = "pytokens-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:682fa37ff4d8e95f7df6fe6fe6a431e8ed8e788023c6bcc0f0880a12eab80ad1"}, + {file = "pytokens-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:30f51edd9bb7f85c748979384165601d028b84f7bd13fe14d3e065304093916a"}, + {file = "pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de"}, + {file = "pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pytest-cov", "setuptools", "tox", "twine", "wheel"] + [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +groups = ["main"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] name = "rdflib" -version = "7.1.2" +version = "7.6.0" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." optional = false -python-versions = "<4,>=3.9" +python-versions = ">=3.8.1" +groups = ["main"] files = [ - {file = "rdflib-7.1.2-py3-none-any.whl", hash = "sha256:5a694a64f48a751079999c37dccf91a6210077d845d09adf7c3ce23a876265a7"}, - {file = "rdflib-7.1.2.tar.gz", hash = "sha256:4fc8f6d50b199dc38fbc5256370f038c1cedca6102ccbde4e37c0fd2b7f36e65"}, + {file = "rdflib-7.6.0-py3-none-any.whl", hash = "sha256:30c0a3ebf4c0e09215f066be7246794b6492e054e782d7ac2a34c9f70a15e0dd"}, + {file = "rdflib-7.6.0.tar.gz", hash = "sha256:6c831288d5e4a5a7ece85d0ccde9877d512a3d0f02d7c06455d00d6d0ea379df"}, ] [package.dependencies] -isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""} -pyparsing = ">=3.2.0,<4" +pyparsing = ">=2.1.0,<4" [package.extras] -berkeleydb = ["berkeleydb (>18.1.0)"] -html = ["html5rdf (>=1.2.1,<2)"] -lxml = ["lxml (>=4.8.0,<6.0)", "lxml (>=4.9.3,<6.0)"] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +graphdb = ["httpx (>=0.28.1,<0.29.0)"] +html = ["html5rdf (>=1.2,<2)"] +lxml = ["lxml (>=4.3,<6.0)"] networkx = ["networkx (>=2,<4)"] orjson = ["orjson (>=3.9.14,<4)"] +rdf4j = ["httpx (>=0.28.1,<0.29.0)"] [[package]] name = "referencing" -version = "0.35.1" +version = "0.37.0" description = "JSON Referencing + Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, + {file = "referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231"}, + {file = "referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} + +[[package]] +name = "regopy" +version = "1.2.0" +description = "Python interface for the OPA Rego Language and Runtime" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +markers = "platform_machine == \"x86_64\" or platform_machine == \"AMD64\"" +files = [ + {file = "regopy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7fe114cae867fdd4ce5cb93481b103d25e9ba43f582536c68d8dd33d6742d2fb"}, + {file = "regopy-1.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:414c440897c2e82f3b2c1093df69518dfc8a1f9b52a448d375a35892ff144834"}, + {file = "regopy-1.2.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa8b81a04abc2f7a5a2b3a64b57caa770f733074e943f4eb097e5d2c3cb923f0"}, + {file = "regopy-1.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:55eaed9a02243de8bc3db76b2fdff0591e21ff2bf720034a6652d96f55224dab"}, + {file = "regopy-1.2.0-cp310-cp310-win32.whl", hash = "sha256:8ecf9382e2940b3d0721567efcb3f12597ce569cc7ba5c6d4bcb10d0e106c238"}, + {file = "regopy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1595557c0c00e776adb6d844882223c49dbd5a6d804c19436938bdf067ee3be"}, + {file = "regopy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cba98d0a6dd52a001c1a3017a3a4b237ea27e044bb503de3d0a6436c0b76f91"}, + {file = "regopy-1.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:00ce6eccb9700956124117865bc3187453cfe5b75c6ef55610e1601ccc3eeb6a"}, + {file = "regopy-1.2.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:735c86156743e78d68217ad0c0bf1da4c4d6cfdf05845939f84c6ffc54f2f99c"}, + {file = "regopy-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:48c57fb3ed35c1430e2ec43de958fc300098c780459293d49c26bd492c3c4eac"}, + {file = "regopy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:7bb3b19c6ef0e20f785a111ae9b3e1b36e33a5ddb7de6cc4d2cdf0ee73cbf26a"}, + {file = "regopy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:724015962ed54df6d05b6dcfb7150dfec3d35b0690fa43bc72dcbb81312c455f"}, + {file = "regopy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:443d5edc7a2077bf18bee7e78129152f9c5372b85f360a37c4ba95f4aae49427"}, + {file = "regopy-1.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:12a4f7f1d855ab673a49e2f4e43cc3d7ec252111946d4acd9a85908490c75b16"}, + {file = "regopy-1.2.0-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:116b9ea70f3fce4e7f7e88ef296b32cbedd724d161735abb40e2f1997a63a3ea"}, + {file = "regopy-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a57bfd3b4ae4687f99f9c0a396b593706c0dd8f6b4219cb4c195f4dd2d9d406c"}, + {file = "regopy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:3dcc10fc106bf21ac9ce02fa1f6fa2a3a2edb7de6d99cf1993fabcfc2225b17d"}, + {file = "regopy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:cfa7e51802d5fc9a68c18360562ed0cc0ec67a5d39ffc6934a2eb89b370681b9"}, + {file = "regopy-1.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd3b0d85f6c3f5292af6a4017115f03e29de0dfcc32bf49d65adbaccfcef1988"}, + {file = "regopy-1.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:fb2dee03f0807c44de5bf2fec6fcd55f212f849918a3f21df39549ed265212e4"}, + {file = "regopy-1.2.0-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6026e955b04252bd6ac27d6c93a193ce0f8cca923b4d4d5813573260787ee284"}, + {file = "regopy-1.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e098d8b0d604d0ad42b0c6c77d92ae711ed1ec779d931140536c536a051f69c5"}, + {file = "regopy-1.2.0-cp313-cp313-win32.whl", hash = "sha256:5a8c6ab1aa01150efa83322721cc1c9a4406369e2d3da3b31981735f300a52b4"}, + {file = "regopy-1.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:c46123148242b5039d08524690a8008fc589bfa6dfbb4c227b9edc0b3f30113b"}, + {file = "regopy-1.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd8b08fd94d5890ba6b366be530faef3270f7427362e61606d3a58df5190a562"}, + {file = "regopy-1.2.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9fcc60d535681b79b88272a8a58e7344d7d3ef27a12e2e9490b71df810d643f3"}, + {file = "regopy-1.2.0-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10a4e141fa710328c48a1c28050f2592a111fdae7a7d4a53362990bdc657ee50"}, + {file = "regopy-1.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a58965cdfca3ba5710a4a19b66a6e30336771a42780a1ba37821c26acc163798"}, + {file = "regopy-1.2.0-cp314-cp314-win32.whl", hash = "sha256:bada7a1a385a4ad1e9ccca2ff220c32d22a83ba73d9c72df630a4c9b9a20580e"}, + {file = "regopy-1.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:8751765c08909b2c1ababc21bd87a41b7682c44cd9d7c866cf121ce14d36fb48"}, + {file = "regopy-1.2.0-cp39-cp39-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73673f493d343ff0e5c6d6dafee084530c65aba5032b3cd2fb417eb2437c0a53"}, + {file = "regopy-1.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bea37e7a689bbd681ef516120b9605ffb6757743e2dbe8989be89498679800a2"}, + {file = "regopy-1.2.0-cp39-cp39-win32.whl", hash = "sha256:9ec12e46bcb3d0fa4961aaebd10e542e459705f7c13472d8e7d14ec9fc4b0d83"}, + {file = "regopy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e87869e384dd33c70b7c9b52b0f95f2c0b96e0c053639e6c29825e97c47c1938"}, + {file = "regopy-1.2.0.tar.gz", hash = "sha256:88bf25d94eaa4cc034837478f37edae3f4854535873c993e8d145e2df1ff1b79"}, +] + +[package.extras] +dev = ["enum-tools[sphinx]", "flake8", "flake8-bugbear", "flake8-builtins", "flake8-docstrings", "flake8-import-order", "flake8-quotes", "pep8-naming", "pytest", "sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "twine", "wheel"] [[package]] name = "requests" -version = "2.32.3" +version = "2.32.5" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -2199,6 +2720,7 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -2217,6 +2739,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2227,114 +2750,127 @@ requests = ">=2.0.1,<3.0.0" [[package]] name = "rpds-py" -version = "0.22.3" +version = "0.30.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.9" -files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288"}, + {file = "rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389a2d49eded1896c3d48b0136ead37c48e221b391c052fba3f4055c367f60a6"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:32c8528634e1bf7121f3de08fa85b138f4e0dc47657866630611b03967f041d7"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f207f69853edd6f6700b86efb84999651baf3789e78a466431df1331608e5324"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67b02ec25ba7a9e8fa74c63b6ca44cf5707f2fbfadae3ee8e7494297d56aa9df"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:a452763cc5198f2f98898eb98f7569649fe5da666c2dc6b5ddb10fde5a574221"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0b65193a413ccc930671c55153a03ee57cecb49e6227204b04fae512eb657a7"}, + {file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:858738e9c32147f78b3ac24dc0edb6610000e56dc0f700fd5f651d0a0f0eb9ff"}, + {file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:da279aa314f00acbb803da1e76fa18666778e8a8f83484fba94526da5de2cba7"}, + {file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c64d38fb49b6cdeda16ab49e35fe0da2e1e9b34bc38bd78386530f218b37139"}, + {file = "rpds_py-0.30.0-cp310-cp310-win32.whl", hash = "sha256:6de2a32a1665b93233cde140ff8b3467bdb9e2af2b91079f0333a0974d12d464"}, + {file = "rpds_py-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:1726859cd0de969f88dc8673bdd954185b9104e05806be64bcd87badbe313169"}, + {file = "rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425"}, + {file = "rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038"}, + {file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7"}, + {file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed"}, + {file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85"}, + {file = "rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c"}, + {file = "rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825"}, + {file = "rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229"}, + {file = "rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad"}, + {file = "rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51"}, + {file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5"}, + {file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e"}, + {file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394"}, + {file = "rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf"}, + {file = "rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b"}, + {file = "rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e"}, + {file = "rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2"}, + {file = "rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d"}, + {file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7"}, + {file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31"}, + {file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95"}, + {file = "rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d"}, + {file = "rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15"}, + {file = "rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1"}, + {file = "rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a"}, + {file = "rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0"}, + {file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94"}, + {file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08"}, + {file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27"}, + {file = "rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6"}, + {file = "rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d"}, + {file = "rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0"}, + {file = "rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f"}, + {file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65"}, + {file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f"}, + {file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53"}, + {file = "rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed"}, + {file = "rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950"}, + {file = "rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6"}, + {file = "rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb"}, + {file = "rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5"}, + {file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404"}, + {file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856"}, + {file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40"}, + {file = "rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0"}, + {file = "rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e"}, + {file = "rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84"}, ] [[package]] @@ -2343,24 +2879,158 @@ version = "2.10.0" description = "A library implementing the 'SemVer' scheme." optional = false python-versions = ">=2.7" +groups = ["main"] files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] [package.extras] -dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1) ; python_version == \"3.4\"", "coverage", "flake8", "nose2", "readme-renderer (<25.0) ; python_version == \"3.4\"", "tox", "wheel", "zest.releaser[recommended]"] doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "semver" -version = "3.0.2" +version = "3.0.4" description = "Python helper for Semantic Versioning (https://semver.org)" optional = false python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, + {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, +] + +[[package]] +name = "simplejson" +version = "3.20.2" +description = "Simple, fast, extensible JSON encoder/decoder for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.5" +groups = ["main"] +files = [ + {file = "simplejson-3.20.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:11847093fd36e3f5a4f595ff0506286c54885f8ad2d921dfb64a85bce67f72c4"}, + {file = "simplejson-3.20.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d291911d23b1ab8eb3241204dd54e3ec60ddcd74dfcb576939d3df327205865"}, + {file = "simplejson-3.20.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:da6d16d7108d366bbbf1c1f3274662294859c03266e80dd899fc432598115ea4"}, + {file = "simplejson-3.20.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9ddf9a07694c5bbb4856271cbc4247cc6cf48f224a7d128a280482a2f78bae3d"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3a0d2337e490e6ab42d65a082e69473717f5cc75c3c3fb530504d3681c4cb40c"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8ba88696351ed26a8648f8378a1431223f02438f8036f006d23b4f5b572778fa"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:00bcd408a4430af99d1f8b2b103bb2f5133bb688596a511fcfa7db865fbb845e"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4fc62feb76f590ccaff6f903f52a01c58ba6423171aa117b96508afda9c210f0"}, + {file = "simplejson-3.20.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6d7286dc11af60a2f76eafb0c2acde2d997e87890e37e24590bb513bec9f1bc5"}, + {file = "simplejson-3.20.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c01379b4861c3b0aa40cba8d44f2b448f5743999aa68aaa5d3ef7049d4a28a2d"}, + {file = "simplejson-3.20.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a16b029ca25645b3bc44e84a4f941efa51bf93c180b31bd704ce6349d1fc77c1"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e22a5fb7b1437ffb057e02e1936a3bfb19084ae9d221ec5e9f4cf85f69946b6"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8b6ff02fc7b8555c906c24735908854819b0d0dc85883d453e23ca4c0445d01"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bfc1c396ad972ba4431130b42307b2321dba14d988580c1ac421ec6a6b7cee3"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a97249ee1aee005d891b5a211faf58092a309f3d9d440bc269043b08f662eda"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f1036be00b5edaddbddbb89c0f80ed229714a941cfd21e51386dc69c237201c2"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5d6f5bacb8cdee64946b45f2680afa3f54cd38e62471ceda89f777693aeca4e4"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8db6841fb796ec5af632f677abf21c6425a1ebea0d9ac3ef1a340b8dc69f52b8"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0a341f7cc2aae82ee2b31f8a827fd2e51d09626f8b3accc441a6907c88aedb7"}, + {file = "simplejson-3.20.2-cp310-cp310-win32.whl", hash = "sha256:27f9c01a6bc581d32ab026f515226864576da05ef322d7fc141cd8a15a95ce53"}, + {file = "simplejson-3.20.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0a63ec98a4547ff366871bf832a7367ee43d047bcec0b07b66c794e2137b476"}, + {file = "simplejson-3.20.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:06190b33cd7849efc413a5738d3da00b90e4a5382fd3d584c841ac20fb828c6f"}, + {file = "simplejson-3.20.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ad4eac7d858947a30d2c404e61f16b84d16be79eb6fb316341885bdde864fa8"}, + {file = "simplejson-3.20.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b392e11c6165d4a0fde41754a0e13e1d88a5ad782b245a973dd4b2bdb4e5076a"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51eccc4e353eed3c50e0ea2326173acdc05e58f0c110405920b989d481287e51"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:306e83d7c331ad833d2d43c76a67f476c4b80c4a13334f6e34bb110e6105b3bd"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f820a6ac2ef0bc338ae4963f4f82ccebdb0824fe9caf6d660670c578abe01013"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e7a066528a5451433eb3418184f05682ea0493d14e9aae690499b7e1eb6b81"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:438680ddde57ea87161a4824e8de04387b328ad51cfdf1eaf723623a3014b7aa"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cac78470ae68b8d8c41b6fca97f5bf8e024ca80d5878c7724e024540f5cdaadb"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7524e19c2da5ef281860a3d74668050c6986be15c9dd99966034ba47c68828c2"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e9b6d845a603b2eef3394eb5e21edb8626cd9ae9a8361d14e267eb969dbe413"}, + {file = "simplejson-3.20.2-cp311-cp311-win32.whl", hash = "sha256:47d8927e5ac927fdd34c99cc617938abb3624b06ff86e8e219740a86507eb961"}, + {file = "simplejson-3.20.2-cp311-cp311-win_amd64.whl", hash = "sha256:ba4edf3be8e97e4713d06c3d302cba1ff5c49d16e9d24c209884ac1b8455520c"}, + {file = "simplejson-3.20.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4376d5acae0d1e91e78baeba4ee3cf22fbf6509d81539d01b94e0951d28ec2b6"}, + {file = "simplejson-3.20.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f8fe6de652fcddae6dec8f281cc1e77e4e8f3575249e1800090aab48f73b4259"}, + {file = "simplejson-3.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25ca2663d99328d51e5a138f22018e54c9162438d831e26cfc3458688616eca8"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a6b2816b6cab6c3fd273d43b1948bc9acf708272074c8858f579c394f4cbc9"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac20dc3fcdfc7b8415bfc3d7d51beccd8695c3f4acb7f74e3a3b538e76672868"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db0804d04564e70862ef807f3e1ace2cc212ef0e22deb1b3d6f80c45e5882c6b"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:979ce23ea663895ae39106946ef3d78527822d918a136dbc77b9e2b7f006237e"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a2ba921b047bb029805726800819675249ef25d2f65fd0edb90639c5b1c3033c"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:12d3d4dc33770069b780cc8f5abef909fe4a3f071f18f55f6d896a370fd0f970"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:aff032a59a201b3683a34be1169e71ddda683d9c3b43b261599c12055349251e"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30e590e133b06773f0dc9c3f82e567463df40598b660b5adf53eb1c488202544"}, + {file = "simplejson-3.20.2-cp312-cp312-win32.whl", hash = "sha256:8d7be7c99939cc58e7c5bcf6bb52a842a58e6c65e1e9cdd2a94b697b24cddb54"}, + {file = "simplejson-3.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:2c0b4a67e75b945489052af6590e7dca0ed473ead5d0f3aad61fa584afe814ab"}, + {file = "simplejson-3.20.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90d311ba8fcd733a3677e0be21804827226a57144130ba01c3c6a325e887dd86"}, + {file = "simplejson-3.20.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:feed6806f614bdf7f5cb6d0123cb0c1c5f40407ef103aa935cffaa694e2e0c74"}, + {file = "simplejson-3.20.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6b1d8d7c3e1a205c49e1aee6ba907dcb8ccea83651e6c3e2cb2062f1e52b0726"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552f55745044a24c3cb7ec67e54234be56d5d6d0e054f2e4cf4fb3e297429be5"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2da97ac65165d66b0570c9e545786f0ac7b5de5854d3711a16cacbcaa8c472d"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f59a12966daa356bf68927fca5a67bebac0033cd18b96de9c2d426cd11756cd0"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:133ae2098a8e162c71da97cdab1f383afdd91373b7ff5fe65169b04167da976b"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7977640af7b7d5e6a852d26622057d428706a550f7f5083e7c4dd010a84d941f"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b530ad6d55e71fa9e93e1109cf8182f427a6355848a4ffa09f69cc44e1512522"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bd96a7d981bf64f0e42345584768da4435c05b24fd3c364663f5fbc8fabf82e3"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f28ee755fadb426ba2e464d6fcf25d3f152a05eb6b38e0b4f790352f5540c769"}, + {file = "simplejson-3.20.2-cp313-cp313-win32.whl", hash = "sha256:472785b52e48e3eed9b78b95e26a256f59bb1ee38339be3075dad799e2e1e661"}, + {file = "simplejson-3.20.2-cp313-cp313-win_amd64.whl", hash = "sha256:a1a85013eb33e4820286139540accbe2c98d2da894b2dcefd280209db508e608"}, + {file = "simplejson-3.20.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a135941a50795c934bdc9acc74e172b126e3694fe26de3c0c1bc0b33ea17e6ce"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ba488decb18738f5d6bd082018409689ed8e74bc6c4d33a0b81af6edf1c9f4"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d81f8e982923d5e9841622ff6568be89756428f98a82c16e4158ac32b92a3787"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdad497ccb1edc5020bef209e9c3e062a923e8e6fca5b8a39f0fb34380c8a66c"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a3f1db97bcd9fb592928159af7a405b18df7e847cbcc5682a209c5b2ad5d6b1"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:215b65b0dc2c432ab79c430aa4f1e595f37b07a83c1e4c4928d7e22e6b49a748"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:ece4863171ba53f086a3bfd87f02ec3d6abc586f413babfc6cf4de4d84894620"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:4a76d7c47d959afe6c41c88005f3041f583a4b9a1783cf341887a3628a77baa0"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:e9b0523582a57d9ea74f83ecefdffe18b2b0a907df1a9cef06955883341930d8"}, + {file = "simplejson-3.20.2-cp36-cp36m-win32.whl", hash = "sha256:16366591c8e08a4ac76b81d76a3fc97bf2bcc234c9c097b48d32ea6bfe2be2fe"}, + {file = "simplejson-3.20.2-cp36-cp36m-win_amd64.whl", hash = "sha256:732cf4c4ac1a258b4e9334e1e40a38303689f432497d3caeb491428b7547e782"}, + {file = "simplejson-3.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6c3a98e21e5f098e4f982ef302ebb1e681ff16a5d530cfce36296bea58fe2396"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10cf9ca1363dc3711c72f4ec7c1caed2bbd9aaa29a8d9122e31106022dc175c6"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106762f8aedf3fc3364649bfe8dc9a40bf5104f872a4d2d86bae001b1af30d30"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b21659898b7496322e99674739193f81052e588afa8b31b6a1c7733d8829b925"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fa1db6a02bca88829f2b2057c76a1d2dc2fccb8c5ff1199e352f213e9ec719"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:156139d94b660448ec8a4ea89f77ec476597f752c2ff66432d3656704c66b40e"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:b2620ac40be04dff08854baf6f4df10272f67079f61ed1b6274c0e840f2e2ae1"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:9ccef5b5d3e3ac5d9da0a0ca1d2de8cf2b0fb56b06aa0ab79325fa4bcc5a1d60"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f526304c2cc9fd8b8d18afacb75bc171650f83a7097b2c92ad6a431b5d7c1b72"}, + {file = "simplejson-3.20.2-cp37-cp37m-win32.whl", hash = "sha256:e0f661105398121dd48d9987a2a8f7825b8297b3b2a7fe5b0d247370396119d5"}, + {file = "simplejson-3.20.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dab98625b3d6821e77ea59c4d0e71059f8063825a0885b50ed410e5c8bd5cb66"}, + {file = "simplejson-3.20.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b8205f113082e7d8f667d6cd37d019a7ee5ef30b48463f9de48e1853726c6127"}, + {file = "simplejson-3.20.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fc8da64929ef0ff16448b602394a76fd9968a39afff0692e5ab53669df1f047f"}, + {file = "simplejson-3.20.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfe704864b5fead4f21c8d448a89ee101c9b0fc92a5f40b674111da9272b3a90"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40ca7cbe7d2f423b97ed4e70989ef357f027a7e487606628c11b79667639dc84"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cec1868b237fe9fb2d466d6ce0c7b772e005aadeeda582d867f6f1ec9710cad"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:792debfba68d8dd61085ffb332d72b9f5b38269cda0c99f92c7a054382f55246"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e022b2c4c54cb4855e555f64aa3377e3e5ca912c372fa9e3edcc90ebbad93dce"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5de26f11d5aca575d3825dddc65f69fdcba18f6ca2b4db5cef16f41f969cef15"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e2162b2a43614727ec3df75baeda8881ab129824aa1b49410d4b6c64f55a45b4"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e11a1d6b2f7e72ca546bdb4e6374b237ebae9220e764051b867111df83acbd13"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:daf7cd18fe99eb427fa6ddb6b437cfde65125a96dc27b93a8969b6fe90a1dbea"}, + {file = "simplejson-3.20.2-cp38-cp38-win32.whl", hash = "sha256:da795ea5f440052f4f497b496010e2c4e05940d449ea7b5c417794ec1be55d01"}, + {file = "simplejson-3.20.2-cp38-cp38-win_amd64.whl", hash = "sha256:6a4b5e7864f952fcce4244a70166797d7b8fd6069b4286d3e8403c14b88656b6"}, + {file = "simplejson-3.20.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b3bf76512ccb07d47944ebdca44c65b781612d38b9098566b4bb40f713fc4047"}, + {file = "simplejson-3.20.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:214e26acf2dfb9ff3314e65c4e168a6b125bced0e2d99a65ea7b0f169db1e562"}, + {file = "simplejson-3.20.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2fb1259ca9c385b0395bad59cdbf79535a5a84fb1988f339a49bfbc57455a35a"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34e028a2ba8553a208ded1da5fa8501833875078c4c00a50dffc33622057881"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b538f9d9e503b0dd43af60496780cb50755e4d8e5b34e5647b887675c1ae9fee"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab998e416ded6c58f549a22b6a8847e75a9e1ef98eb9fbb2863e1f9e61a4105b"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a8f1c307edf5fbf0c6db3396c5d3471409c4a40c7a2a466fbc762f20d46601a"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5a7bbac80bdb82a44303f5630baee140aee208e5a4618e8b9fde3fc400a42671"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5ef70ec8fe1569872e5a3e4720c1e1dcb823879a3c78bc02589eb88fab920b1f"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:cb11c09c99253a74c36925d461c86ea25f0140f3b98ff678322734ddc0f038d7"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:66f7c78c6ef776f8bd9afaad455e88b8197a51e95617bcc44b50dd974a7825ba"}, + {file = "simplejson-3.20.2-cp39-cp39-win32.whl", hash = "sha256:619ada86bfe3a5aa02b8222ca6bfc5aa3e1075c1fb5b3263d24ba579382df472"}, + {file = "simplejson-3.20.2-cp39-cp39-win_amd64.whl", hash = "sha256:44a6235e09ca5cc41aa5870a952489c06aa4aee3361ae46daa947d8398e57502"}, + {file = "simplejson-3.20.2-py3-none-any.whl", hash = "sha256:3b6bb7fb96efd673eac2e4235200bfffdc2353ad12c54117e1e4e2fc485ac017"}, + {file = "simplejson-3.20.2.tar.gz", hash = "sha256:5fe7a6ce14d1c300d80d08695b7f7e633de6cd72c80644021874d985b3393649"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +groups = ["main"] files = [ - {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, - {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] [[package]] @@ -2369,6 +3039,7 @@ version = "0.8.3" description = "SPDX parser and tools." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "spdx-tools-0.8.3.tar.gz", hash = "sha256:68b8f9ce2893b5216bd90b2e63f1c821c2884e4ebc4fd295ebbf1fa8b8a94b93"}, {file = "spdx_tools-0.8.3-py3-none-any.whl", hash = "sha256:638fd9bd8be61901316eb6d063574e16d5403a1870073ec4d9241426a997501a"}, @@ -2393,17 +3064,18 @@ test = ["pyshacl", "pytest", "tzdata"] [[package]] name = "sqlparse" -version = "0.5.3" +version = "0.5.5" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, - {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, + {file = "sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba"}, + {file = "sqlparse-0.5.5.tar.gz", hash = "sha256:e20d4a9b0b8585fdf63b10d30066c7c94c5d7a7ec47c889a2d83a3caa93ff28e"}, ] [package.extras] -dev = ["build", "hatch"] +dev = ["build"] doc = ["sphinx"] [[package]] @@ -2412,6 +3084,8 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -2425,56 +3099,17 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = true -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - [[package]] name = "tomlkit" -version = "0.13.2" +version = "0.14.0" description = "Style preserving TOML library" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, + {file = "tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680"}, + {file = "tomlkit-0.14.0.tar.gz", hash = "sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064"}, ] [[package]] @@ -2483,6 +3118,8 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -2494,53 +3131,56 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "typeguard" -version = "4.4.1" +version = "4.4.4" description = "Run-time type checker for Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, - {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, + {file = "typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e"}, + {file = "typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74"}, ] [package.dependencies] -typing-extensions = ">=4.10.0" - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] -test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] +typing_extensions = ">=4.14.0" [[package]] name = "types-pymysql" -version = "1.1.0.20241103" +version = "1.1.0.20251220" description = "Typing stubs for PyMySQL" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "types-PyMySQL-1.1.0.20241103.tar.gz", hash = "sha256:a7628542919a0ba87625fb79eefb2a2de45fb4ad32afe6e561e8f2f27fb58b8c"}, - {file = "types_PyMySQL-1.1.0.20241103-py3-none-any.whl", hash = "sha256:1a32efd8a74b5bf74c4de92a86c1cc6edaf3802dcfd5546635ab501eb5e3c096"}, + {file = "types_pymysql-1.1.0.20251220-py3-none-any.whl", hash = "sha256:fa1082af7dea6c53b6caa5784241924b1296ea3a8d3bd060417352c5e10c0618"}, + {file = "types_pymysql-1.1.0.20251220.tar.gz", hash = "sha256:ae1c3df32a777489431e2e9963880a0df48f6591e0aa2fd3a6fabd9dee6eca54"}, ] [[package]] name = "types-pyyaml" -version = "6.0.12.20241230" +version = "6.0.12.20250915" description = "Typing stubs for PyYAML" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, - {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, + {file = "types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6"}, + {file = "types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3"}, ] [[package]] name = "types-requests" -version = "2.32.0.20241016" +version = "2.32.4.20260107" description = "Typing stubs for requests" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"code-quality\"" files = [ - {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, - {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, + {file = "types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d"}, + {file = "types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f"}, ] [package.dependencies] @@ -2548,136 +3188,143 @@ urllib3 = ">=2" [[package]] name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" -version = "2024.2" +version = "2025.3" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1"}, + {file = "tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7"}, ] [[package]] name = "uritemplate" -version = "4.1.1" +version = "4.2.0" description = "Implementation of RFC 6570 URI Templates" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, - {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, + {file = "uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686"}, + {file = "uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e"}, ] [[package]] name = "uritools" -version = "4.0.3" +version = "6.0.1" description = "URI parsing, classification and composition" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "uritools-4.0.3-py3-none-any.whl", hash = "sha256:bae297d090e69a0451130ffba6f2f1c9477244aa0a5543d66aed2d9f77d0dd9c"}, - {file = "uritools-4.0.3.tar.gz", hash = "sha256:ee06a182a9c849464ce9d5fa917539aacc8edd2a4924d1b7aabeeecabcae3bc2"}, + {file = "uritools-6.0.1-py3-none-any.whl", hash = "sha256:d9507b82206c857d2f93d8fcc84f3b05ae4174096761102be690aa76a360cc1b"}, + {file = "uritools-6.0.1.tar.gz", hash = "sha256:2f9e9cb954e7877232b2c863f724a44a06eb98d9c7ebdd69914876e9487b94f8"}, ] [[package]] name = "urllib3" -version = "2.3.0" +version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "validators" -version = "0.34.0" +version = "0.35.0" description = "Python Data Validation for Humansâ„¢" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "validators-0.34.0-py3-none-any.whl", hash = "sha256:c804b476e3e6d3786fa07a30073a4ef694e617805eb1946ceee3fe5a9b8b1321"}, - {file = "validators-0.34.0.tar.gz", hash = "sha256:647fe407b45af9a74d245b943b18e6a816acf4926974278f6dd617778e1e781f"}, + {file = "validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd"}, + {file = "validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a"}, ] [package.extras] crypto-eth-addresses = ["eth-hash[pycryptodome] (>=0.7.0)"] -[[package]] -name = "virtualenv" -version = "20.28.1" -description = "Virtual Python Environment builder" -optional = true -python-versions = ">=3.8" -files = [ - {file = "virtualenv-20.28.1-py3-none-any.whl", hash = "sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb"}, - {file = "virtualenv-20.28.1.tar.gz", hash = "sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.6.0" description = "Measures the displayed width of unicode strings in a terminal" optional = true -python-versions = "*" +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, + {file = "wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad"}, + {file = "wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159"}, ] [[package]] name = "werkzeug" -version = "3.1.3" +version = "3.1.5" description = "The comprehensive WSGI web application library." optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\"" files = [ - {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, - {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, + {file = "werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc"}, + {file = "werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67"}, ] [package.dependencies] -MarkupSafe = ">=2.1.1" +markupsafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] [[package]] name = "whitenoise" -version = "6.8.2" +version = "6.11.0" description = "Radically simplified static file serving for WSGI applications" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "whitenoise-6.8.2-py3-none-any.whl", hash = "sha256:df12dce147a043d1956d81d288c6f0044147c6d2ab9726e5772ac50fb45d2280"}, - {file = "whitenoise-6.8.2.tar.gz", hash = "sha256:486bd7267a375fa9650b136daaec156ac572971acc8bf99add90817a530dd1d4"}, + {file = "whitenoise-6.11.0-py3-none-any.whl", hash = "sha256:b2aeb45950597236f53b5342b3121c5de69c8da0109362aee506ce88e022d258"}, + {file = "whitenoise-6.11.0.tar.gz", hash = "sha256:0f5bfce6061ae6611cd9396a8231e088722e4fc67bc13a111be74c738d99375f"}, ] [package.extras] @@ -2685,21 +3332,25 @@ brotli = ["brotli"] [[package]] name = "xmltodict" -version = "0.14.2" +version = "1.0.2" description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, - {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, + {file = "xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d"}, + {file = "xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649"}, ] +[package.extras] +test = ["pytest", "pytest-cov"] + [extras] -code-quality = ["black", "django-stubs", "djangorestframework-stubs", "flake8", "flake8-isort", "mypy", "pre-commit", "pylint", "pylint-django", "types-PyMySQL"] -dev = ["Werkzeug", "django-extensions", "ipdb"] +code-quality = ["black", "django-stubs", "djangorestframework-stubs", "flake8", "flake8-isort", "mypy", "pylint", "pylint-django", "types-PyMySQL"] +dev = ["Werkzeug", "django-extensions", "django-silk", "ipdb"] unittests = ["coverage", "django-coverage-plugin", "django-extensions"] [metadata] -lock-version = "2.0" -python-versions = ">= 3.10, < 3.13" -content-hash = "704f3c09af388e57ae384880e04fbc27412fa9ae9d83f889c1afd15e7211abd9" +lock-version = "2.1" +python-versions = ">= 3.11, < 3.15" +content-hash = "929ad929260444c7ae93f8af4ad39d7217b660c0a9ff597899f8b6d5a6d171a2" diff --git a/backend/poetry_requirements.txt b/backend/poetry_requirements.txt index b3e999a9b..cc54e4988 100644 --- a/backend/poetry_requirements.txt +++ b/backend/poetry_requirements.txt @@ -1 +1 @@ -poetry==2.0.1 # https://python-poetry.org/ +poetry==2.3.2 # https://python-poetry.org/ diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 351f579e2..329301554 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -1,96 +1,119 @@ [tool.poetry] name = "SecObserve" -version = "1.26.0" +version = "1.48.0" description = "SecObserve is an open source vulnerability management system for software development and cloud environments." license = "BSD-3-Clause" -authors = [ - "Stefan Fleckenstein ", -] +authors = [ "Stefan Fleckenstein", ] +package-mode = false [tool.poetry.dependencies] -python = ">= 3.10, < 3.13" +python = ">= 3.11, < 3.15" # Django # ------------------------------------------------------------------------------ -gunicorn = "23.0.0" # https://github.com/benoitc/gunicorn -django = "5.1.5" # https://www.djangoproject.com/ -django-environ = "0.12.0" # https://github.com/joke2k/django-environ -django-filter = "24.3" # https://github.com/carltongibson/django-filter -django-csp = "3.8" # https://github.com/mozilla/django-csp -django-picklefield = "3.2" # https://github.com/gintas/django-picklefield +gunicorn = "25.1.0" # https://github.com/benoitc/gunicorn +django = "5.2.11" # https://www.djangoproject.com/ +django-environ = "0.12.1" # https://github.com/joke2k/django-environ +django-filter = "25.2" # https://github.com/carltongibson/django-filter +django-csp = "4.0" # https://github.com/mozilla/django-csp +django-picklefield = "3.4.0" # https://github.com/gintas/django-picklefield django-encrypted-model-fields = "0.6.5" # https://gitlab.com/lansharkconsulting/django/django-encrypted-model-fields -argon2-cffi = "23.1.0" # https://github.com/hynek/argon2_cffi -whitenoise = "6.8.2" # https://github.com/evansd/whitenoise +argon2-cffi = "25.1.0" # https://github.com/hynek/argon2_cffi +whitenoise = "6.11.0" # https://github.com/evansd/whitenoise +django-dirtyfields = "1.9.9" # https://github.com/romgar/django-dirtyfields + # Django REST Framework # ------------------------------------------------------------------------------ -djangorestframework = "3.15.2" # https://github.com/encode/django-rest-framework -django-cors-headers = "4.6.0" # https://github.com/adamchainz/django-cors-headers +djangorestframework = "3.16.1" # https://github.com/encode/django-rest-framework +django-cors-headers = "4.9.0" # https://github.com/adamchainz/django-cors-headers + # OpenAPI 3 # ------------------------------------------------------------------------------ -drf-spectacular = "0.28.0" # https://github.com/tfranzel/drf-spectacular -drf-spectacular-sidecar = "2024.12.1" # https://github.com/tfranzel/drf-spectacular-sidecar +drf-spectacular = "0.29.0" # https://github.com/tfranzel/drf-spectacular +drf-spectacular-sidecar = "2026.1.1" # https://github.com/tfranzel/drf-spectacular-sidecar + # Token authentication # ------------------------------------------------------------------------------ PyJWT = "2.10.1" # https://github.com/jpadilla/pyjwt + # HTTP requests # ------------------------------------------------------------------------------ -requests = "2.32.3" # https://github.com/psf/requests +requests = "2.32.5" # https://github.com/psf/requests + # Database # ------------------------------------------------------------------------------ -pymysql = "1.1.1" # https://github.com/PyMySQL/PyMySQL -psycopg = { version = "3.2.3", extras = ["binary"] } # https://github.com/psycopg/psycopg +pymysql = "1.1.2" # https://github.com/PyMySQL/PyMySQL +psycopg = { version = "3.3.2", extras = ["binary"] } # https://github.com/psycopg/psycopg + # Dataformats # ------------------------------------------------------------------------------ -defusedcsv = "2.0.0" # https://github.com/raphaelm/defusedcsv +defusedcsv = "3.0.0" # https://github.com/raphaelm/defusedcsv openpyxl = "3.1.5" # https://foss.heptapod.net/openpyxl/openpyxl -jsonpickle = "4.0.1" # https://github.com/jsonpickle/jsonpickle -pyyaml = "6.0.2" # https://github.com/yaml/pyyaml +jsonpickle = "4.1.1" # https://github.com/jsonpickle/jsonpickle +pyyaml = "6.0.3" # https://github.com/yaml/pyyaml + # Field specifications # ------------------------------------------------------------------------------ -packageurl-python = "0.16.0" # https://github.com/package-url/packageurl-python -cvss = "3.3" # https://github.com/RedHatProductSecurity/cvss -semver = "3.0.2" # https://github.com/python-semver/python-semver -license-expression = "30.4.1" # https://github.com/aboutcode-org/license-expression +packageurl-python = "0.17.6" # https://github.com/package-url/packageurl-python +cvss = "3.6" # https://github.com/RedHatProductSecurity/cvss +semver = "3.0.4" # https://github.com/python-semver/python-semver +license-expression = "30.4.4" # https://github.com/aboutcode-org/license-expression + # Task queue # ------------------------------------------------------------------------------ -huey = "2.5.2" # https://github.com/coleifer/huey +huey = "2.6.0" # https://github.com/coleifer/huey + # Issue tracker # ------------------------------------------------------------------------------ -jira = "3.8.0" # https://github.com/pycontribs/jira +jira = "3.10.5" # https://github.com/pycontribs/jira + # Others # ------------------------------------------------------------------------------ inflect = "7.5.0" # https://github.com/jaraco/inflect -validators = "0.34.0" # https://github.com/python-validators/validators -py-ocsf-models = "0.2.0" # https://github.com/prowler-cloud/py-ocsf-models +validators = "0.35.0" # https://github.com/python-validators/validators +py-ocsf-models = "0.8.1" # https://github.com/prowler-cloud/py-ocsf-models spdx-tools = "0.8.3" # https://github.com/spdx/tools-python +html-to-markdown = "2.25.0" # https://github.com/Goldziher/html-to-markdown +cyclonedx-python-lib = "11.6.0" # https://github.com/CycloneDX/cyclonedx-python-lib +licenselynx = "2.0.2" # https://github.com/licenselynx/licenselynx +regopy = { version = "1.2.0", markers = "platform_machine == 'x86_64' or platform_machine == 'AMD64'" } # https://microsoft.github.io/rego-cpp/#python-example +simplejson = "3.20.2" # Development dependencies # ------------------------------------------------------------------------------ # Werkzeug = { version = "3.0.4", extras = ["watchdog"], optional = true } # https://github.com/pallets/werkzeug -Werkzeug = { version = "3.1.3", optional = true } # https://github.com/pallets/werkzeug +Werkzeug = { version = "3.1.5", optional = true } # https://github.com/pallets/werkzeug ipdb = { version = "0.13.13", optional = true } # https://github.com/gotcha/ipdb # watchgod = { version = "0.8.2", optional = true } # https://github.com/samuelcolvin/watchgod -django-extensions = { version = "3.2.3", optional = true } # https://github.com/django-extensions/django-extensions +django-extensions = { version = "4.1", optional = true } # https://github.com/django-extensions/django-extensions +django-silk = { version = "5.4.3", optional = true } # https://github.com/jazzband/django-silk # Unittest dependencies # ------------------------------------------------------------------------------ -coverage = { version = "7.6.10", optional = true } # https://github.com/nedbat/coveragepy -django-coverage-plugin = { version = "3.1.0", optional = true } # https://github.com/nedbat/django_coverage_plugin +coverage = { version = "7.13.4", optional = true } # https://github.com/nedbat/coveragepy +django-coverage-plugin = { version = "3.2.0", optional = true } # https://github.com/nedbat/django_coverage_plugin # Code quality dependencies # ------------------------------------------------------------------------------ -flake8 = { version = "7.1.1", optional = true } # https://github.com/PyCQA/flake8 -flake8-isort = { version = "6.1.1", optional = true } # https://github.com/gforcada/flake8-isort -black = { version = "24.10.0", optional = true } # https://github.com/psf/black -pylint = { version = "3.3.3", optional = true } # https://github.com/pylint-dev/pylint -pylint-django = { version = "2.6.1", optional = true } # https://github.com/PyCQA/pylint-django -pre-commit = { version = "4.0.1", optional = true } # https://github.com/pre-commit/pre-commit -mypy = { version = "1.14.1", optional = true } # https://github.com/python/mypy -django-stubs = { version = "5.1.2", optional = true } # https://github.com/typeddjango/django-stubs -djangorestframework-stubs = { version = "3.15.2", optional = true } # https://github.com/typeddjango/djangorestframework-stubs -types-PyMySQL = { version = "1.1.0.20241103", optional = true } # https://github.com/python/typeshed +flake8 = { version = "7.3.0", optional = true } # https://github.com/PyCQA/flake8 +flake8-isort = { version = "7.0.0", optional = true } # https://github.com/gforcada/flake8-isort +black = { version = "26.1.0", optional = true } # https://github.com/psf/black +pylint = { version = "4.0.4", optional = true } # https://github.com/pylint-dev/pylint +pylint-django = { version = "2.7.0", optional = true } # https://github.com/PyCQA/pylint-django +mypy = { version = "1.19.1", optional = true } # https://github.com/python/mypy +django-stubs = { version = "5.2.9", optional = true } # https://github.com/typeddjango/django-stubs +djangorestframework-stubs = { version = "3.16.8", extras = ["requests"], optional = true } # https://github.com/typeddjango/djangorestframework-stubs +types-PyMySQL = { version = "1.1.0.20251220", optional = true } # https://github.com/python/typeshed # import-linter = "2.0" # https://github.com/seddonym/import-linter --> Cannot be build with Docker [tool.poetry.extras] -dev = ["Werkzeug", "ipdb", "watchgod", "django-extensions"] +dev = ["Werkzeug", "ipdb", "watchgod", "django-extensions", "django-silk"] unittests = ["coverage", "django-extensions", "django-coverage-plugin"] code_quality = ["flake8", "flake8-isort", "black", "pylint", "pylint-django", "pre-commit", "mypy", "django-stubs", "djangorestframework-stubs", "types-PyMySQL"] + +[tool.black] +line-length = 120 +target-version = ['py314'] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/backend/setup.cfg b/backend/setup.cfg index 8eef10344..b725be435 100644 --- a/backend/setup.cfg +++ b/backend/setup.cfg @@ -1,18 +1,19 @@ [flake8] extend-ignore = I001, I003, I004, I005 max-line-length = 120 -exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,unittests/*,*unittests.py +exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,.venv,unittests/*,*unittests.py [pycodestyle] max-line-length = 120 -exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,unittests/*,*unittests.py +exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,.venv,unittests/*,*unittests.py [isort] profile = black known_first_party = application,config, unittests [mypy] -python_version = 3.10 +python_version = 3.14 +disallow_untyped_defs = True implicit_optional = True no_namespace_packages = True check_untyped_defs = True @@ -47,12 +48,6 @@ max-locals=20 max-parents=13 max-returns=7 -[coverage:run] -include = application/* -omit = *migrations*, *tests* -plugins = - django_coverage_plugin - [importlinter] root_package = application @@ -60,20 +55,24 @@ root_package = application name = General layers type = layers layers = - application.import_observations | application.metrics - application.epss | application.rules | application.vex - application.access_control : application.commons : application.core : application.issue_tracker : application.licenses + application.background_tasks + application.import_observations + application.vex | application.rules | application.epss | application.metrics + application.core : application.issue_tracker : application.licenses : application.notifications + application.access_control : application.authorization + application.commons ignore_imports = - application.access_control.services.authorization -> application.*.models + application.access_control.api.serializers -> application.core.models + application.access_control.queries.user -> application.core.queries.product_member + application.authorization.services.authorization -> application.*.models + application.authorization.services.authorization -> application.core.queries.product_member application.core.api.serializers_observation -> application.import_observations.types application.core.api.serializers_observation -> application.import_observations.api.serializers application.core.api.serializers_observation -> application.import_observations.models + application.core.api.serializers_product -> application.import_observations.models application.core.api.serializers_product -> application.rules.models application.core.api.serializers_product -> application.rules.types - application.core.api.views -> application.licenses.api.serializers application.core.api.views -> application.rules.services.rule_engine - application.licenses.services.license_component -> application.import_observations.models - application.rules.models -> application.import_observations.models application.core.migrations.0051_convert_origin_component_dependencies -> application.import_observations.parsers.cyclone_dx.dependencies [importlinter:contract:module_layers] @@ -87,13 +86,38 @@ layers = (types) containers = application.access_control + application.authorization + application.background_tasks application.commons application.core application.epss + application.import_observations application.issue_tracker application.licenses application.metrics + application.notifications application.rules application.vex ignore_imports = - application.core.models -> application.core.services.observation + application.access_control.queries.api_token -> application.access_control.services.current_user + application.access_control.queries.authorization_group_member -> application.access_control.services.current_user + application.access_control.queries.authorization_group -> application.access_control.services.current_user + application.access_control.queries.user -> application.access_control.services.current_user + application.access_control.queries.user -> application.core.queries.product_member + application.commons.models -> application.commons.services.request_cache + application.core.queries.product_member -> application.access_control.services.current_user + +[coverage:run] +branch = True +relative_files = True +include = + application/** + config/** +omit = + **/migrations/** +plugins = + django_coverage_plugin + +[coverage:report] +show_missing = True +skip_empty = True diff --git a/backend/sonar-project.properties b/backend/sonar-project.properties new file mode 100644 index 000000000..9ded0a0c0 --- /dev/null +++ b/backend/sonar-project.properties @@ -0,0 +1,18 @@ +sonar.projectKey=secobserve_secobserve-backend +sonar.organization=secobserve + + +# This is the name and version displayed in the SonarCloud UI. +sonar.projectName=SecObserve Backend +#sonar.projectVersion=1.0 + + +# Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. +#sonar.sources=. + +# Encoding of the source code. Default is default system encoding +#sonar.sourceEncoding=UTF-8 + +sonar.python.version=3.14 +sonar.exclusions=**/unittests/**,**/migrations/** +sonar.python.coverage.reportPaths=coverage.xml diff --git a/backend/unittests/access_control/api/test_authentication.py b/backend/unittests/access_control/api/test_authentication.py index 75d4999db..70b3818b4 100644 --- a/backend/unittests/access_control/api/test_authentication.py +++ b/backend/unittests/access_control/api/test_authentication.py @@ -26,15 +26,9 @@ def _check_not_authenticated(self, methods: list[str], url: str): self.assertEqual(401, response.status_code) - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) - def _check_api_token_not_authenticated( - self, methods: list[str], url: str, mock_authentication - ): - mock_authentication.side_effect = AuthenticationFailed( - "authentication failed message" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def _check_api_token_not_authenticated(self, methods: list[str], url: str, mock_authentication): + mock_authentication.side_effect = AuthenticationFailed("authentication failed message") api_client = APIClient() @@ -53,18 +47,12 @@ def _check_api_token_not_authenticated( raise Exception(f"Unknown method: {method}") self.assertEqual(401, response.status_code) - self.assertEqual( - "authentication failed message", response.data.get("message") - ) + self.assertEqual("authentication failed message", response.data.get("message")) mock_authentication.assert_called_once() mock_authentication.reset_mock() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) - def _check_api_token_authenticated( - self, methods: list[str], url: str, mock_authentication - ): + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def _check_api_token_authenticated(self, methods: list[str], url: str, mock_authentication): mock_authentication.return_value = self.user_admin, None api_client = APIClient() @@ -87,15 +75,9 @@ def _check_api_token_authenticated( mock_authentication.assert_called_once() mock_authentication.reset_mock() - @patch( - "application.access_control.services.jwt_authentication.JWTAuthentication.authenticate" - ) - def _check_jwt_not_authenticated( - self, methods: list[str], url: str, mock_authentication - ): - mock_authentication.side_effect = AuthenticationFailed( - "authentication failed message" - ) + @patch("application.access_control.services.jwt_authentication.JWTAuthentication.authenticate") + def _check_jwt_not_authenticated(self, methods: list[str], url: str, mock_authentication): + mock_authentication.side_effect = AuthenticationFailed("authentication failed message") api_client = APIClient() @@ -114,18 +96,12 @@ def _check_jwt_not_authenticated( raise Exception(f"Unknown method: {method}") self.assertEqual(401, response.status_code) - self.assertEqual( - "authentication failed message", response.data.get("message") - ) + self.assertEqual("authentication failed message", response.data.get("message")) mock_authentication.assert_called_once() mock_authentication.reset_mock() - @patch( - "application.access_control.services.jwt_authentication.JWTAuthentication.authenticate" - ) - def _check_jwt_authenticated( - self, methods: list[str], url: str, mock_authentication - ): + @patch("application.access_control.services.jwt_authentication.JWTAuthentication.authenticate") + def _check_jwt_authenticated(self, methods: list[str], url: str, mock_authentication): mock_authentication.return_value = self.user_admin, None api_client = APIClient() @@ -148,15 +124,9 @@ def _check_jwt_authenticated( mock_authentication.assert_called_once() mock_authentication.reset_mock() - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication.authenticate" - ) - def _check_oidc_not_authenticated( - self, methods: list[str], url: str, mock_authentication - ): - mock_authentication.side_effect = AuthenticationFailed( - "authentication failed message" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication.authenticate") + def _check_oidc_not_authenticated(self, methods: list[str], url: str, mock_authentication): + mock_authentication.side_effect = AuthenticationFailed("authentication failed message") api_client = APIClient() @@ -181,12 +151,8 @@ def _check_oidc_not_authenticated( mock_authentication.assert_called_once() mock_authentication.reset_mock() - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication.authenticate" - ) - def _check_oidc_authenticated( - self, methods: list[str], url: str, mock_authentication - ): + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication.authenticate") + def _check_oidc_authenticated(self, methods: list[str], url: str, mock_authentication): mock_authentication.return_value = self.user_admin, None api_client = APIClient() @@ -218,22 +184,19 @@ def _check_authentication(self, methods: list[str], url: str): self._check_oidc_not_authenticated(methods, url) self._check_oidc_authenticated(methods, url) - @patch("application.commons.services.global_request.get_current_user") + @patch("application.access_control.services.current_user.get_current_user") def test_authentication(self, mock_user): mock_user.return_value = self.user_admin self._check_authentication(["get", "post"], "/api/api_configurations/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/api_configurations/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/api_configurations/1/") self._check_authentication(["get"], "/api/vulnerability_checks/") self._check_authentication(["get"], "/api/vulnerability_checks/1/") self._check_authentication(["get", "post"], "/api/general_rules/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/general_rules/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/general_rules/1/") + self._check_authentication(["post"], "/api/general_rules/1/simulate/") self._check_authentication(["get"], "/api/metrics/export_csv/") self._check_authentication(["get"], "/api/metrics/export_excel/") @@ -243,9 +206,7 @@ def test_authentication(self, mock_user): self._check_authentication(["get"], "/api/metrics/product_metrics_status/") self._check_authentication(["get"], "/api/observations/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/observations/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/observations/1/") self._check_authentication(["patch"], "/api/observations/1/assessment/") self._check_authentication(["patch"], "/api/observations/1/remove_assessment/") self._check_authentication(["post"], "/api/observations/bulk_assessment/") @@ -265,71 +226,54 @@ def test_authentication(self, mock_user): self._check_authentication(["get"], "/api/parsers/1/") self._check_authentication(["get", "post"], "/api/branches/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/branches/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/branches/1/") self._check_authentication(["get"], "/api/branch_names/") self._check_authentication(["get"], "/api/branch_names/1/") - self._check_authentication(["get"], "/api/services/") - self._check_authentication(["delete", "get"], "/api/services/1/") + self._check_authentication(["get", "post"], "/api/services/") + self._check_authentication(["delete", "get", "put", "patch"], "/api/services/1/") + + self._check_authentication(["get"], "/api/service_names/") + self._check_authentication(["get"], "/api/service_names/1/") self._check_authentication(["get", "post"], "/api/product_members/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/product_members/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/product_members/1/") self._check_authentication(["get", "post"], "/api/product_rules/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/product_rules/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/product_rules/1/") + self._check_authentication(["post"], "/api/product_rules/1/simulate/") self._check_authentication(["get", "post"], "/api/product_groups/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/product_groups/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/product_groups/1/") self._check_authentication(["get"], "/api/product_group_names/") self._check_authentication(["get"], "/api/product_group_names/1/") self._check_authentication(["get", "post"], "/api/products/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/products/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/products/1/") self._check_authentication(["post"], "/api/products/1/apply_rules/") - self._check_authentication( - ["post"], "/api/products/1/observations_bulk_assessment/" - ) - self._check_authentication( - ["post"], "/api/products/1/observations_bulk_delete/" - ) - self._check_authentication( - ["post"], "/api/products/1/observations_bulk_mark_duplicates/" - ) - self._check_authentication( - ["post"], "/api/products/1/license_components_bulk_delete/" - ) + self._check_authentication(["post"], "/api/products/1/observations_bulk_assessment/") + self._check_authentication(["post"], "/api/products/1/observations_bulk_delete/") + self._check_authentication(["post"], "/api/products/1/observations_bulk_mark_duplicates/") + self._check_authentication(["post"], "/api/products/1/license_components_bulk_delete/") self._check_authentication(["get"], "/api/products/1/export_observations_csv/") - self._check_authentication( - ["get"], "/api/products/1/export_observations_excel/" - ) + self._check_authentication(["get"], "/api/products/1/export_observations_excel/") - self._check_authentication( - ["get"], "/api/products/1/export_license_components_csv/" - ) - self._check_authentication( - ["get"], "/api/products/1/export_license_components_excel/" - ) + self._check_authentication(["get"], "/api/products/1/export_license_components_csv/") + self._check_authentication(["get"], "/api/products/1/export_license_components_excel/") self._check_authentication(["get"], "/api/product_names/") self._check_authentication(["get"], "/api/product_names/1/") self._check_authentication(["get"], "/api/evidences/1/") + self._check_authentication(["get"], "/api/components/") + self._check_authentication(["get"], "/api/components/1/") + self._check_authentication(["get"], "/api/status/version/") self._check_authentication(["get", "post"], "/api/product_api_tokens/") @@ -337,7 +281,8 @@ def test_authentication(self, mock_user): self._check_authentication(["get"], "/api/notifications/") self._check_authentication(["delete", "get"], "/api/notifications/1/") - self._check_authentication(["post"], "/api/notifications/bulk_delete/") + self._check_authentication(["post"], "/api/notifications/1/mark_as_viewed/") + self._check_authentication(["post"], "/api/notifications/bulk_mark_as_viewed/") self._check_authentication(["get"], "/api/licenses/") self._check_authentication(["get"], "/api/licenses/1/") @@ -352,33 +297,22 @@ def test_authentication(self, mock_user): self._check_authentication(["get"], "/api/license_component_evidences/1/") self._check_authentication(["get", "post"], "/api/license_groups/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/license_groups/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/license_groups/1/") self._check_authentication(["post"], "/api/license_groups/1/copy/") self._check_authentication(["post"], "/api/license_groups/1/add_license/") self._check_authentication(["post"], "/api/license_groups/1/remove_license/") - self._check_authentication( - ["post"], "/api/license_groups/import_scancode_licensedb/" - ) self._check_authentication(["get", "post"], "/api/license_group_members/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/license_group_members/1001/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/license_group_members/1001/") - self._check_authentication( - ["get", "post"], "/api/license_group_authorization_group_members/" - ) + self._check_authentication(["get", "post"], "/api/license_group_authorization_group_members/") self._check_authentication( ["delete", "get", "put", "patch"], "/api/license_group_authorization_group_members/1001/", ) self._check_authentication(["get", "post"], "/api/license_policies/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/license_policies/1/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/license_policies/1/") self._check_authentication(["post"], "/api/license_policies/1/copy/") self._check_authentication(["post"], "/api/license_policies/1/apply/") self._check_authentication(["post"], "/api/license_policies/apply_product/") @@ -386,23 +320,23 @@ def test_authentication(self, mock_user): self._check_authentication(["get"], "/api/license_policies/1/export_yaml/") self._check_authentication(["get", "post"], "/api/license_policy_items/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/license_policy_items/1001/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/license_policy_items/1001/") self._check_authentication(["get", "post"], "/api/license_policy_members/") - self._check_authentication( - ["delete", "get", "put", "patch"], "/api/license_policy_members/1001/" - ) + self._check_authentication(["delete", "get", "put", "patch"], "/api/license_policy_members/1001/") - self._check_authentication( - ["get", "post"], "/api/license_policy_authorization_group_members/" - ) + self._check_authentication(["get", "post"], "/api/license_policy_authorization_group_members/") self._check_authentication( ["delete", "get", "put", "patch"], "/api/license_policy_authorization_group_members/1001/", ) + self._check_authentication(["get"], "/api/concluded_licenses/") + self._check_authentication(["delete", "get"], "/api/concluded_licenses/1/") + + self._check_authentication(["get"], "/api/periodic_tasks/") + self._check_authentication(["get"], "/api/periodic_tasks/1/") + def test_authentication_users(self): self._check_authentication(["get"], "/api/users/me/") self._check_authentication(["get"], "/api/users/") diff --git a/backend/unittests/access_control/api/test_authorization_api_tokens.py b/backend/unittests/access_control/api/test_authorization_api_tokens.py deleted file mode 100644 index 08019a203..000000000 --- a/backend/unittests/access_control/api/test_authorization_api_tokens.py +++ /dev/null @@ -1,34 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) - - -class TestAuthorizationApiTokens(TestAuthorizationBase): - def test_authorization_api_tokens(self): - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 5, 'name': '-product-2-api_token-', 'product': 2, 'product_group': None}]}" - self._test_api( - APITest( - "db_admin", - "get", - "/api/api_tokens/", - None, - 200, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/api_tokens/", - None, - 403, - expected_data, - no_second_user=True, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_branches.py b/backend/unittests/access_control/api/test_authorization_branches.py deleted file mode 100644 index 751a314e0..000000000 --- a/backend/unittests/access_control/api/test_authorization_branches.py +++ /dev/null @@ -1,151 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationBranches(TestAuthorizationBase): - def test_authorization_branches_product_member(self): - self._test_authorization_branches() - - def _test_authorization_branches_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_branches() - - def _test_authorization_branches(self): - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_branch_internal_dev (db_product_internal)', 'is_default_branch': True, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_dev', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}, {'id': 2, 'name_with_product': 'db_branch_internal_main (db_product_internal)', 'is_default_branch': False, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_main', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}, {'id': 3, 'name_with_product': 'db_branch_external (db_product_external)', 'is_default_branch': True, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_external', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 2}]}" - self._test_api( - APITest("db_admin", "get", "/api/branches/", None, 200, expected_data) - ) - - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_branch_internal_dev (db_product_internal)', 'is_default_branch': True, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_dev', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}, {'id': 2, 'name_with_product': 'db_branch_internal_main (db_product_internal)', 'is_default_branch': False, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_main', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/branches/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'name_with_product': 'db_branch_internal_dev (db_product_internal)', 'is_default_branch': True, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_dev', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/branches/1/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'message': 'No Branch matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/branches/3/", - None, - 404, - expected_data, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/branches/99999/", - None, - 404, - expected_data, - ) - ) - - post_data = {"name": "string", "product": 1} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/branches/", - post_data, - 403, - expected_data, - ) - ) - - expected_data = "{'id': 4, 'name_with_product': 'string (db_product_internal)', 'is_default_branch': False, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'string', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/branches/", - post_data, - 201, - expected_data, - ) - ) - - post_data = {"name": "changed"} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/branches/1/", - post_data, - 403, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'name_with_product': 'changed (db_product_internal)', 'is_default_branch': True, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'changed', 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'product': 1}" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/branches/1/", - post_data, - 200, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "delete", - "/api/branches/1/", - None, - 403, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You cannot delete the default branch of a product.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/branches/1/", - None, - 400, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_license_components.py b/backend/unittests/access_control/api/test_authorization_license_components.py deleted file mode 100644 index 3d0eefea8..000000000 --- a/backend/unittests/access_control/api/test_authorization_license_components.py +++ /dev/null @@ -1,102 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationLicense_Components(TestAuthorizationBase): - def test_authorization_license_components_product_member(self): - self._test_authorization_license_components() - - def test_authorization_license_components_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_license_components() - - def _test_authorization_license_components(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'license_data': None, 'component_purl_namespace': '', 'branch_name': '', 'type': 'Non-SPDX', 'title': 'internal license / internal_component:1.0.0', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'license_name': 'internal license', 'license_expression': '', 'non_spdx_license': 'internal license', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'license': None}, {'id': 2, 'license_data': None, 'component_purl_namespace': '', 'branch_name': '', 'type': 'Non-SPDX', 'title': 'external license / external_component:2.0.0', 'identity_hash': 'bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007', 'upload_filename': '', 'component_name': 'external_component', 'component_version': '2.0.0', 'component_name_version': 'external_component:2.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'license_name': 'external license', 'license_expression': '', 'non_spdx_license': 'external license', 'evaluation_result': 'Review required', 'numerical_evaluation_result': 2, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 2, 'branch': None, 'license': None}]}" - self._test_api( - APITest( - "db_admin", "get", "/api/license_components/", None, 200, expected_data - ) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'license_data': None, 'component_purl_namespace': '', 'branch_name': '', 'type': 'Non-SPDX', 'title': 'internal license / internal_component:1.0.0', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'license_name': 'internal license', 'license_expression': '', 'non_spdx_license': 'internal license', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'license': None}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_components/", - None, - 200, - expected_data, - ) - ) - expected_data = "{'id': 1, 'license_data': None, 'component_purl_namespace': '', 'branch_name': '', 'license_policy_name': '', 'license_policy_id': 0, 'evidences': [{'id': 1, 'name': 'internal_license_evidence_name'}], 'type': 'Non-SPDX', 'title': 'internal license / internal_component:1.0.0', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_dependencies': '', 'license_name': 'internal license', 'license_expression': '', 'non_spdx_license': 'internal license', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'license': None}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_components/1/", - None, - 200, - expected_data, - ) - ) - expected_data = "{'message': 'No License_Component matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_components/2/", - None, - 404, - expected_data, - ) - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_components/99999/", - None, - 404, - expected_data, - ) - ) - - post_data = {"product": 1} - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/license_components/", - post_data, - 405, - None, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/license_components/1/", - {"title": "changed"}, - 405, - None, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/license_components/1/", - None, - 405, - None, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_license_policy_items.py b/backend/unittests/access_control/api/test_authorization_license_policy_items.py deleted file mode 100644 index 41be6adb4..000000000 --- a/backend/unittests/access_control/api/test_authorization_license_policy_items.py +++ /dev/null @@ -1,292 +0,0 @@ -from application.licenses.models import License_Policy -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) - - -class TestAuthorizationLicensePolicyItems(TestAuthorizationBase): - def test_authorization_license_policy_items(self): - License_Policy.objects.filter(pk__lt=1000).delete() - - expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_spdx_id': '', 'license_group_name': 'Permissive Model (Blue Oak Council)', 'license_policy_data': {'id': 1000, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1000, 'license_group': 1, 'license': None}, {'id': 1001, 'license_spdx_id': '0BSD', 'license_group_name': '', 'license_policy_data': {'id': 1001, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Forbidden', 'comment': '', 'license_policy': 1001, 'license_group': None, 'license': 1}, {'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}, {'id': 1003, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Three non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1003, 'license_group': None, 'license': None}, {'id': 1004, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Four non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1004, 'license_group': None, 'license': None}]}" - self._test_api( - APITest( - "db_admin", - "get", - "/api/license_policy_items/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_spdx_id': '', 'license_group_name': 'Permissive Model (Blue Oak Council)', 'license_policy_data': {'id': 1000, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1000, 'license_group': 1, 'license': None}, {'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}, {'id': 1003, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Three non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1003, 'license_group': None, 'license': None}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_items/", - None, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_spdx_id': '', 'license_group_name': 'Permissive Model (Blue Oak Council)', 'license_policy_data': {'id': 1000, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1000, 'license_group': 1, 'license': None}, {'id': 1003, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Three non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1003, 'license_group': None, 'license': None}, {'id': 1004, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Four non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1004, 'license_group': None, 'license': None}]}" - self._test_api( - APITest( - "db_product_group_user", - "get", - "/api/license_policy_items/", - None, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = "{'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_items/1002/", - None, - 200, - expected_data, - no_second_user=True, - ) - ) - expected_data = "{'message': 'No License_Policy_Item matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_items/1001/", - None, - 404, - expected_data, - no_second_user=True, - ) - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_items/99999/", - None, - 404, - expected_data, - no_second_user=True, - ) - ) - - post_data = { - "license_policy": 1002, - "license_group": 2, - "non_spdx_license": "", - "evaluation_result": "Allowed", - } - expected_data = "{'id': 1005, 'license_spdx_id': '', 'license_group_name': 'Permissive Gold (Blue Oak Council)', 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1002, 'license_group': 2, 'license': None}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/license_policy_items/", - post_data, - 201, - expected_data, - no_second_user=True, - ) - ) - - post_data = { - "license_policy": 1000, - "license_group": 2, - "non_spdx_license": "", - "evaluation_result": "Allowed", - } - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/license_policy_items/", - post_data, - 403, - expected_data, - no_second_user=True, - ) - ) - - post_data = { - "license_policy": 1004, - "license_group": 2, - "non_spdx_license": "", - "evaluation_result": "Allowed", - } - expected_data = "{'id': 1006, 'license_spdx_id': '', 'license_group_name': 'Permissive Gold (Blue Oak Council)', 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1004, 'license_group': 2, 'license': None}" - self._test_api( - APITest( - "db_product_group_user", - "post", - "/api/license_policy_items/", - post_data, - 201, - expected_data, - no_second_user=True, - ) - ) - - post_data = { - "license_policy": 1003, - "license_group": 2, - "non_spdx_license": "", - "evaluation_result": "Allowed", - } - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_product_group_user", - "post", - "/api/license_policy_items/", - post_data, - 403, - expected_data, - no_second_user=True, - ) - ) - - post_data = { - "license_policy": 1001, - "license_group": 2, - "non_spdx_license": "", - "evaluation_result": "Allowed", - } - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/license_policy_items/", - post_data, - 403, - expected_data, - no_second_user=True, - ) - ) - - expected_data = "{'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Review required', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/license_policy_items/1002/", - { - "non_spdx_license": "Two non-spdx licenses", - "evaluation_result": "Review required", - }, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/license_policy_items/1001/", - {"is_manager": "True"}, - 403, - expected_data, - no_second_user=True, - ) - ) - - expected_data = "{'id': 1004, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Four non-spdx licenses', 'evaluation_result': 'Review required', 'comment': '', 'license_policy': 1004, 'license_group': None, 'license': None}" - self._test_api( - APITest( - "db_product_group_user", - "patch", - "/api/license_policy_items/1004/", - { - "non_spdx_license": "Four non-spdx licenses", - "evaluation_result": "Review required", - }, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_product_group_user", - "patch", - "/api/license_policy_items/1003/", - {"is_manager": "True"}, - 403, - expected_data, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/license_policy_items/1002/", - None, - 204, - None, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_internal_read", - "delete", - "/api/license_policy_items/1001/", - None, - 403, - None, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_product_group_user", - "delete", - "/api/license_policy_items/1004/", - None, - 204, - None, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_product_group_user", - "delete", - "/api/license_policy_items/1003/", - None, - 403, - None, - no_second_user=True, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_license_policy_members.py b/backend/unittests/access_control/api/test_authorization_license_policy_members.py deleted file mode 100644 index b89c0c323..000000000 --- a/backend/unittests/access_control/api/test_authorization_license_policy_members.py +++ /dev/null @@ -1,168 +0,0 @@ -from application.licenses.models import License_Policy -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) - - -class TestAuthorizationLicensePolicyMembers(TestAuthorizationBase): - def test_authorization_license_policy_members(self): - License_Policy.objects.filter(pk__lt=1000).delete() - - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1001, 'license_policy_data': {'id': 1001, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'is_manager': False, 'license_policy': 1001, 'user': 3}, {'id': 1002, 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_policy': 1002, 'user': 2}, {'id': 1003, 'license_policy_data': {'id': 1001, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, 'is_manager': False, 'license_policy': 1001, 'user': 4}]}" - self._test_api( - APITest( - "db_admin", - "get", - "/api/license_policy_members/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1002, 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_policy': 1002, 'user': 2}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_members/", - None, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = "{'id': 1002, 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_policy': 1002, 'user': 2}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_members/1002/", - None, - 200, - expected_data, - no_second_user=True, - ) - ) - expected_data = ( - "{'message': 'No License_Policy_Member matches the given query.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_members/1001/", - None, - 404, - expected_data, - no_second_user=True, - ) - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/license_policy_members/99999/", - None, - 404, - expected_data, - no_second_user=True, - ) - ) - - post_data = {"license_policy": 1002, "user": 6, "is_manager": False} - expected_data = "{'id': 1004, 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': False, 'license_policy': 1002, 'user': 6}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/license_policy_members/", - post_data, - 201, - expected_data, - no_second_user=True, - ) - ) - - post_data = {"license_policy": 1000, "user": 6, "is_manager": False} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/license_policy_members/", - post_data, - 403, - expected_data, - no_second_user=True, - ) - ) - - post_data = {"license_policy": 1001, "user": 6, "is_manager": False} - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/license_policy_members/", - post_data, - 403, - expected_data, - no_second_user=True, - ) - ) - - expected_data = "{'id': 1004, 'license_policy_data': {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': True, 'license_policy': 1002, 'user': 6}" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/license_policy_members/1004/", - {"is_manager": "True"}, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/license_policy_members/1001/", - {"is_manager": "True"}, - 403, - expected_data, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/license_policy_members/1004/", - None, - 204, - None, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_internal_read", - "delete", - "/api/license_policy_members/1001/", - None, - 403, - None, - no_second_user=True, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_notifications.py b/backend/unittests/access_control/api/test_authorization_notifications.py deleted file mode 100644 index b84738d43..000000000 --- a/backend/unittests/access_control/api/test_authorization_notifications.py +++ /dev/null @@ -1,83 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationNotifications(TestAuthorizationBase): - def test_authorization_notifications_product_member(self): - self._test_authorization_notifications() - - def test_authorization_notifications_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_notifications() - - def _test_authorization_notifications(self): - expected_data = "{'count': 6, 'next': None, 'previous': None, 'results': [{'id': 1, 'message': 'message_exception_internal', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'name': 'exception_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Exception', 'function': '', 'arguments': '', 'user': 2, 'product': 1, 'observation': 1}, {'id': 2, 'message': 'message_exception_external', 'product_name': 'db_product_external', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_external', 'name': 'exception_external', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Exception', 'function': '', 'arguments': '', 'user': 4, 'product': 2, 'observation': 2}, {'id': 3, 'message': '', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'name': 'security_gate_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Security gate', 'function': '', 'arguments': '', 'user': 2, 'product': 1, 'observation': 1}, {'id': 4, 'message': '', 'product_name': 'db_product_external', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_external', 'name': 'security_gate_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Security gate', 'function': '', 'arguments': '', 'user': 4, 'product': 2, 'observation': 2}, {'id': 5, 'message': 'message_task_internal', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'name': 'task_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Task', 'function': 'function_task_internal', 'arguments': 'arguments_task_internal', 'user': 2, 'product': 1, 'observation': 1}, {'id': 6, 'message': 'message_task_external', 'product_name': 'db_product_external', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_external', 'name': 'task_external', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Task', 'function': 'function_task_external', 'arguments': 'arguments_task_external', 'user': 4, 'product': 2, 'observation': 2}]}" - self._test_api( - APITest("db_admin", "get", "/api/notifications/", None, 200, expected_data) - ) - - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 3, 'message': '', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'name': 'security_gate_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Security gate', 'function': '', 'arguments': '', 'user': 2, 'product': 1, 'observation': 1}, {'id': 5, 'message': '...', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'name': 'task_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Task', 'function': 'function_task_internal', 'arguments': 'arguments_task_internal', 'user': 2, 'product': 1, 'observation': 1}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/notifications/", - None, - 200, - expected_data, - ) - ) - - self._test_api( - APITest( - "db_internal_write", "get", "/api/notifications/1/", None, 404, None - ) - ) - - self._test_api( - APITest( - "db_internal_write", "get", "/api/notifications/3/", None, 200, None - ) - ) - - post_data = {"notifications": [1, 3, 5]} - expected_data = "{'message': 'Some notifications do not exist'}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/notifications/bulk_delete/", - post_data, - 400, - expected_data, - ) - ) - - post_data = {"notifications": [3, 5]} - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/notifications/bulk_delete/", - post_data, - 204, - None, - ) - ) - - expected_data = "{'count': 0, 'next': None, 'previous': None, 'results': []}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/notifications/", - None, - 200, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_observation_logs.py b/backend/unittests/access_control/api/test_authorization_observation_logs.py deleted file mode 100644 index fb320f30b..000000000 --- a/backend/unittests/access_control/api/test_authorization_observation_logs.py +++ /dev/null @@ -1,54 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationObservationLogs(TestAuthorizationBase): - def test_authorization_observation_logs_product_member(self): - self._test_authorization_observation_logs() - - def test_authorization_observation_logs_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_observation_logs() - - def _test_authorization_observation_logs(self): - expected_data = "{'count': 4, 'next': None, 'previous': None, 'results': [{'id': 2, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': '', 'status': 'Duplicate', 'comment': 'Set by product rule', 'created': '2022-12-15T17:10:35.524000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 1, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'comment': 'Set by parser', 'created': '2022-12-15T17:10:35.518000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 4, 'observation_data': {'id': 2, 'product_data': {'id': 2, 'product_group_name': '', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'branch_name': '', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'vex_statement': None}, 'user_full_name': 'db_external', 'approval_user_full_name': None, 'severity': '', 'status': 'False positive', 'comment': 'Set by product rule', 'created': '2022-12-15T17:12:23.196000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 2, 'user': 4, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 3, 'observation_data': {'id': 2, 'product_data': {'id': 2, 'product_group_name': '', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'branch_name': '', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'vex_statement': None}, 'user_full_name': 'db_external', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'comment': 'Set by parser', 'created': '2022-12-15T17:11:28.326000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 2, 'user': 4, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}]}" - self._test_api( - APITest( - "db_admin", "get", "/api/observation_logs/", None, 200, expected_data - ) - ) - - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 2, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': '', 'status': 'Duplicate', 'comment': 'Set by product rule', 'created': '2022-12-15T17:10:35.524000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 1, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'comment': 'Set by parser', 'created': '2022-12-15T17:10:35.518000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/observation_logs/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'references': [], 'evidences': [{'id': 1, 'name': 'db_evidence_internal'}], 'origin_source_file_url': None, 'origin_component_purl_type': '', 'origin_component_purl_namespace': '', 'issue_tracker_issue_url': None, 'assessment_needs_approval': None, 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'comment': 'Set by parser', 'created': '2022-12-15T17:10:35.518000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/observation_logs/1/", - None, - 200, - expected_data, - ) - ) - - self._test_api( - APITest( - "db_internal_write", "get", "/api/observation_logs/3/", None, 404, None - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_observations.py b/backend/unittests/access_control/api/test_authorization_observations.py deleted file mode 100644 index fc415a331..000000000 --- a/backend/unittests/access_control/api/test_authorization_observations.py +++ /dev/null @@ -1,193 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationObservations(TestAuthorizationBase): - def test_authorization_observations_product_member(self): - self._test_authorization_observations() - - def test_authorization_observations_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_observations() - - def _test_authorization_observations(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, {'id': 2, 'product_data': {'id': 2, 'product_group_name': '', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'branch_name': '', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'vex_statement': None}]}" - self._test_api( - APITest("db_admin", "get", "/api/observations/", None, 200, expected_data) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/observations/", - None, - 200, - expected_data, - ) - ) - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'module_name': '', 'class_name': ''}, 'references': [], 'evidences': [{'id': 1, 'name': 'db_evidence_internal'}], 'origin_source_file_url': None, 'origin_component_purl_type': '', 'origin_component_purl_namespace': '', 'issue_tracker_issue_url': None, 'assessment_needs_approval': None, 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/observations/1/", - None, - 200, - expected_data, - ) - ) - expected_data = "{'message': 'No Observation matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/observations/2/", - None, - 404, - expected_data, - ) - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/observations/99999/", - None, - 404, - expected_data, - ) - ) - - post_data = {"product": 1} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/observations/", - post_data, - 403, - expected_data, - ) - ) - expected_data = "{'message': 'Title: This field is required.'}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/observations/", - post_data, - 400, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/observations/1/", - {"title": "changed"}, - 403, - expected_data, - ) - ) - expected_data = ( - "{'message': 'Non field errors: Only manual observations can be updated'}" - ) - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/observations/1/", - {"title": "changed"}, - 400, - expected_data, - ) - ) - - post_data = {"comment": "reason for assessment"} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/observations/1/assessment/", - post_data, - 403, - expected_data, - ) - ) - expected_data = "{'message': 'Observation 99999 not found'}" - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/observations/99999/assessment/", - post_data, - 404, - expected_data, - ) - ) - expected_data = "None" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/observations/1/assessment/", - post_data, - 200, - expected_data, - ) - ) - - post_data = {"comment": "reason for assessment removal"} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/observations/1/remove_assessment/", - post_data, - 403, - expected_data, - ) - ) - expected_data = "{'message': 'Observation 99999 not found'}" - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/observations/99999/remove_assessment/", - post_data, - 404, - expected_data, - ) - ) - expected_data = "None" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/observations/1/remove_assessment/", - post_data, - 200, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_potential_duplicates.py b/backend/unittests/access_control/api/test_authorization_potential_duplicates.py deleted file mode 100644 index c639bf7d7..000000000 --- a/backend/unittests/access_control/api/test_authorization_potential_duplicates.py +++ /dev/null @@ -1,46 +0,0 @@ -from django.core.management import call_command - -from application.core.models import Observation -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationPotentialDuplicates(TestAuthorizationBase): - def test_authorization_potential_duplicates_product_member(self): - self._test_authorization_potential_duplicates() - - def test_authorization_potential_duplicates_product_authorization_group_member( - self, - ): - prepare_authorization_groups() - self._test_authorization_potential_duplicates() - - def _test_authorization_potential_duplicates(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'potential_duplicate_observation': {'id': 1, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'type': 'Component', 'observation': 1}, {'id': 2, 'potential_duplicate_observation': {'id': 2, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'vex_statement': None}, 'type': 'Source', 'observation': 2}]}" - self._test_api( - APITest( - "db_admin", - "get", - "/api/potential_duplicates/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'potential_duplicate_observation': {'id': 1, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'assessment_status': '', 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'vex_statement': None}, 'type': 'Component', 'observation': 1}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/potential_duplicates/", - None, - 200, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_product_groups.py b/backend/unittests/access_control/api/test_authorization_product_groups.py deleted file mode 100644 index a2f7aa632..000000000 --- a/backend/unittests/access_control/api/test_authorization_product_groups.py +++ /dev/null @@ -1,95 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationProductGroups(TestAuthorizationBase): - def test_product_groups_authorization_product_member(self): - self._test_product_groups_authorization() - - def test_product_groups_authorization_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_product_groups_authorization() - - def _test_product_groups_authorization(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 3, 'name': 'db_product_group', 'description': '', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}, {'id': 4, 'name': 'db_product_group_admin_only', 'description': '', 'products_count': 0, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}]}" - self._test_api( - APITest("db_admin", "get", "/api/product_groups/", None, 200, expected_data) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 3, 'name': 'db_product_group', 'description': '', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}]}" - self._test_api( - APITest( - "db_product_group_user", - "get", - "/api/product_groups/", - None, - 200, - expected_data, - ) - ) - expected_data = "{'id': 3, 'name': 'db_product_group', 'description': '', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}" - self._test_api( - APITest( - "db_product_group_user", - "get", - "/api/product_groups/3/", - None, - 200, - expected_data, - ) - ) - expected_data = "{'message': 'No Product matches the given query.'}" - self._test_api( - APITest( - "db_product_group_user", - "get", - "/api/product_groups/99999/", - None, - 404, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_external", - "post", - "/api/product_groups/", - {"name": "string"}, - 403, - expected_data, - ) - ) - expected_data = "{'id': 5, 'name': 'string', 'description': '', 'products_count': 0, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}" - self._test_api( - APITest( - "db_product_group_user", - "post", - "/api/product_groups/", - { - "name": "string", - }, - 201, - expected_data, - ) - ) - - expected_data = "{'id': 3, 'name': 'db_product_group', 'description': 'string', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}" - self._test_api( - APITest( - "db_product_group_user", - "patch", - "/api/product_groups/3/", - {"description": "string"}, - 200, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_product_rules.py b/backend/unittests/access_control/api/test_authorization_product_rules.py deleted file mode 100644 index 5f074eb5a..000000000 --- a/backend/unittests/access_control/api/test_authorization_product_rules.py +++ /dev/null @@ -1,152 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationProductRules(TestAuthorizationBase): - def test_authorization_product_rules_product_member(self): - self._test_authorization_product_rules() - - def test_authorization_product_rules_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_product_rules() - - def _test_authorization_product_rules(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_internal', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'enabled': True, 'product': 1, 'parser': 1}, {'id': 2, 'product_data': {'id': 2, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_external', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_external', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'False positive', 'new_vex_justification': '', 'enabled': True, 'product': 2, 'parser': 1}]}" - self._test_api( - APITest("db_admin", "get", "/api/product_rules/", None, 200, expected_data) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_internal', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'enabled': True, 'product': 1, 'parser': 1}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/product_rules/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_internal', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'enabled': True, 'product': 1, 'parser': 1}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/product_rules/1/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'message': 'No Rule matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/product_rules/3/", - None, - 404, - expected_data, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/product_rules/99999/", - None, - 404, - expected_data, - ) - ) - - post_data = {"name": "string", "product": 1, "parser": 1} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/product_rules/", - post_data, - 403, - expected_data, - ) - ) - - expected_data = "{'id': 4, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': 'db_internal_write', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'name': 'string', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'enabled': True, 'product': 1, 'parser': 1}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/product_rules/", - post_data, - 201, - expected_data, - ) - ) - - post_data = {"name": "changed", "scanner_prefix": "also_changed"} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/product_rules/1/", - post_data, - 403, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': 'db_internal_write', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'name': 'changed', 'description': '', 'scanner_prefix': 'also_changed', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'enabled': True, 'product': 1, 'parser': 1}" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/product_rules/1/", - post_data, - 200, - expected_data, - no_second_user=True, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "delete", - "/api/product_rules/1/", - None, - 403, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'Cannot delete Rule because it still has Observations.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/product_rules/1/", - None, - 409, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_products.py b/backend/unittests/access_control/api/test_authorization_products.py deleted file mode 100644 index 36ea5ae4e..000000000 --- a/backend/unittests/access_control/api/test_authorization_products.py +++ /dev/null @@ -1,212 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationProducts(TestAuthorizationBase): - def test_authorization_products_product_member(self): - self._test_authorization_products() - - def test_authorization_products_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_products() - - def _test_authorization_products(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'repository_default_branch_name': 'db_branch_internal_dev', 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, {'id': 2, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': '', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'repository_default_branch_name': 'db_branch_external', 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}]}" - self._test_api( - APITest("db_admin", "get", "/api/products/", None, 200, expected_data) - ) - - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'repository_default_branch_name': 'db_branch_internal_dev', 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}]}" - self._test_api( - APITest( - "db_internal_write", "get", "/api/products/", None, 200, expected_data - ) - ) - expected_data = "{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'repository_default_branch_name': 'db_branch_internal_dev', 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}" - self._test_api( - APITest( - "db_internal_write", "get", "/api/products/1/", None, 200, expected_data - ) - ) - expected_data = "{'message': 'No Product matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", "get", "/api/products/2/", None, 404, expected_data - ) - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/products/99999/", - None, - 404, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_external", - "post", - "/api/products/", - {"name": "string"}, - 403, - expected_data, - ) - ) - expected_data = "{'id': 5, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_name': '', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'repository_default_branch_name': '', 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': False, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': False, 'has_licenses': False, 'product_group_license_policy': None, 'name': 'string', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': None, 'license_policy': None}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/products/", - { - "name": "string", - "last_observation_change": "2022-12-16T17:13:18.283000+01:00", - }, - 201, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "patch", - "/api/products/1/", - {"description": "string"}, - 403, - expected_data, - ) - ) - expected_data = "{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'repository_default_branch_name': 'db_branch_internal_dev', 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'name': 'db_product_internal', 'description': 'string', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}" - self._test_api( - APITest( - "db_internal_write", - "patch", - "/api/products/1/", - {"description": "string"}, - 200, - expected_data, - ) - ) - - post_data = None - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/products/1/apply_rules/", - post_data, - 403, - expected_data, - ) - ) - expected_data = "None" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/products/1/apply_rules/", - post_data, - 204, - expected_data, - ) - ) - - post_data = { - "severity": "Critical", - "status": "Open", - "comment": "string", - "observations": [], - } - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/products/1/observations_bulk_assessment/", - post_data, - 403, - expected_data, - ) - ) - expected_data = "None" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/products/1/observations_bulk_assessment/", - post_data, - 204, - expected_data, - ) - ) - - post_data = {"observations": []} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "post", - "/api/products/1/observations_bulk_delete/", - post_data, - 403, - expected_data, - ) - ) - expected_data = "None" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/products/1/observations_bulk_delete/", - post_data, - 204, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "delete", - "/api/products/1/", - None, - 403, - expected_data, - ) - ) - expected_data = "{'message': 'Cannot delete Product because it still has Services, Observations, License_Components.'}" - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/products/1/", - None, - 409, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_services.py b/backend/unittests/access_control/api/test_authorization_services.py deleted file mode 100644 index cb22870e2..000000000 --- a/backend/unittests/access_control/api/test_authorization_services.py +++ /dev/null @@ -1,97 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationServices(TestAuthorizationBase): - def test_authorization_services_product_member(self): - self._test_authorization_services() - - def test_authorization_services_product_authorization_group_member(self): - prepare_authorization_groups() - self._test_authorization_services() - - def _test_authorization_services(self): - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_service_internal_backend (db_product_internal)', 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'name': 'db_service_internal_backend', 'product': 1}, {'id': 2, 'name_with_product': 'db_service_internal_frontend (db_product_internal)', 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'name': 'db_service_internal_frontend', 'product': 1}, {'id': 3, 'name_with_product': 'db_service_external (db_product_external)', 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'name': 'db_service_external', 'product': 2}]}" - self._test_api( - APITest("db_admin", "get", "/api/services/", None, 200, expected_data) - ) - - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_service_internal_backend (db_product_internal)', 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'name': 'db_service_internal_backend', 'product': 1}, {'id': 2, 'name_with_product': 'db_service_internal_frontend (db_product_internal)', 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'name': 'db_service_internal_frontend', 'product': 1}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/services/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'name_with_product': 'db_service_internal_backend (db_product_internal)', 'open_critical_observation_count': 0, 'open_high_observation_count': 0, 'open_medium_observation_count': 0, 'open_low_observation_count': 0, 'open_none_observation_count': 0, 'open_unknown_observation_count': 0, 'name': 'db_service_internal_backend', 'product': 1}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/services/1/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'message': 'No Service matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/services/3/", - None, - 404, - expected_data, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/services/99999/", - None, - 404, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_read", - "delete", - "/api/services/1/", - None, - 403, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'Cannot delete Service because it still has Observations.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "delete", - "/api/services/1/", - None, - 409, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_vulnerability_checks.py b/backend/unittests/access_control/api/test_authorization_vulnerability_checks.py deleted file mode 100644 index 5ee0ea946..000000000 --- a/backend/unittests/access_control/api/test_authorization_vulnerability_checks.py +++ /dev/null @@ -1,78 +0,0 @@ -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) -from unittests.access_control.services.test_authorization import ( - prepare_authorization_groups, -) - - -class TestAuthorizationVulnerabilityChecks(TestAuthorizationBase): - def test_authorization_vulnerability_checks_product_member(self): - self._test_authorization_vulnerability_checks() - - def test_authorization_vulnerability_checks_product_authorization_group_member( - self, - ): - prepare_authorization_groups() - self._test_authorization_vulnerability_checks() - - def _test_authorization_vulnerability_checks(self): - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'branch_name': '', 'scanner_name': 'scanner_internal_no_branch', 'filename': 'filename_internal_no_branch', 'api_configuration_name': '', 'scanner': 'scanner_internal_no_branch / 1.0.0', 'first_import': '2022-12-15T17:10:35.521000+01:00', 'last_import': '2022-12-15T17:10:35.854000+01:00', 'last_import_observations_new': 1, 'last_import_observations_updated': 2, 'last_import_observations_resolved': 3, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': None}, {'id': 2, 'branch_name': 'db_branch_internal_dev', 'scanner_name': 'scanner_internal_dev', 'filename': '', 'api_configuration_name': 'api_configuration_internal_dev', 'scanner': 'scanner_internal_dev', 'first_import': '2022-12-16T17:10:35.521000+01:00', 'last_import': '2022-12-16T17:10:35.854000+01:00', 'last_import_observations_new': 4, 'last_import_observations_updated': 5, 'last_import_observations_resolved': 6, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': 1}, {'id': 3, 'branch_name': 'db_branch_external', 'scanner_name': 'scanner_external', 'filename': 'filename_external', 'api_configuration_name': '', 'scanner': 'scanner_external', 'first_import': '2022-12-17T17:10:35.521000+01:00', 'last_import': '2022-12-17T17:10:35.854000+01:00', 'last_import_observations_new': 7, 'last_import_observations_updated': 8, 'last_import_observations_resolved': 9, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 2, 'branch': 3}]}" - self._test_api( - APITest( - "db_admin", - "get", - "/api/vulnerability_checks/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'branch_name': '', 'scanner_name': 'scanner_internal_no_branch', 'filename': 'filename_internal_no_branch', 'api_configuration_name': '', 'scanner': 'scanner_internal_no_branch / 1.0.0', 'first_import': '2022-12-15T17:10:35.521000+01:00', 'last_import': '2022-12-15T17:10:35.854000+01:00', 'last_import_observations_new': 1, 'last_import_observations_updated': 2, 'last_import_observations_resolved': 3, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': None}, {'id': 2, 'branch_name': 'db_branch_internal_dev', 'scanner_name': 'scanner_internal_dev', 'filename': '', 'api_configuration_name': 'api_configuration_internal_dev', 'scanner': 'scanner_internal_dev', 'first_import': '2022-12-16T17:10:35.521000+01:00', 'last_import': '2022-12-16T17:10:35.854000+01:00', 'last_import_observations_new': 4, 'last_import_observations_updated': 5, 'last_import_observations_resolved': 6, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': 1}]}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/vulnerability_checks/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'id': 1, 'branch_name': '', 'scanner_name': 'scanner_internal_no_branch', 'filename': 'filename_internal_no_branch', 'api_configuration_name': '', 'scanner': 'scanner_internal_no_branch / 1.0.0', 'first_import': '2022-12-15T17:10:35.521000+01:00', 'last_import': '2022-12-15T17:10:35.854000+01:00', 'last_import_observations_new': 1, 'last_import_observations_updated': 2, 'last_import_observations_resolved': 3, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': None}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/vulnerability_checks/1/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'message': 'No Vulnerability_Check matches the given query.'}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/vulnerability_checks/3/", - None, - 404, - expected_data, - ) - ) - - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/vulnerability_checks/99999/", - None, - 404, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_product_metrics.py b/backend/unittests/access_control/api/test_product_metrics.py deleted file mode 100644 index e11bd5be0..000000000 --- a/backend/unittests/access_control/api/test_product_metrics.py +++ /dev/null @@ -1,119 +0,0 @@ -from datetime import timedelta - -from django.utils import timezone - -from unittests.access_control.api.test_authorization import ( - APITest, - TestAuthorizationBase, -) - - -class TestAuthorizationProductMetrics(TestAuthorizationBase): - def test_authorization_metrics(self): - yesterday = (timezone.now() - timedelta(days=1)).date().isoformat() - today = timezone.now().date().isoformat() - - expected_data = "{'open_critical': 7, 'open_high': 9, 'open_medium': 11, 'open_low': 13, 'open_none': 15, 'open_unknown': 17, 'open': 19, 'resolved': 21, 'duplicate': 23, 'false_positive': 25, 'in_review': 27, 'not_affected': 29, 'not_security': 31, 'risk_accepted': 33}" - self._test_api( - APITest( - "db_admin", - "get", - "/api/metrics/product_metrics_current/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'2023-07-09': {'open_critical': 5, 'open_high': 7, 'open_medium': 9, 'open_low': 11, 'open_none': 13, 'open_unknown': 15, 'open': 17, 'resolved': 19, 'duplicate': 21, 'false_positive': 23, 'in_review': 25, 'not_affected': 27, 'not_security': 29, 'risk_accepted': 31}, '2023-07-10': {'open_critical': 7, 'open_high': 9, 'open_medium': 11, 'open_low': 13, 'open_none': 15, 'open_unknown': 17, 'open': 19, 'resolved': 21, 'duplicate': 23, 'false_positive': 25, 'in_review': 27, 'not_affected': 29, 'not_security': 31, 'risk_accepted': 33}}" - expected_data = expected_data.replace("2023-07-10", today) - expected_data = expected_data.replace("2023-07-09", yesterday) - self._test_api( - APITest( - "db_admin", - "get", - "/api/metrics/product_metrics_timeline/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'open_critical': 2, 'open_high': 3, 'open_medium': 4, 'open_low': 5, 'open_none': 6, 'open_unknown': 7, 'open': 8, 'resolved': 9, 'duplicate': 10, 'false_positive': 11, 'in_review': 12, 'not_affected': 13, 'not_security': 14, 'risk_accepted': 15}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/metrics/product_metrics_current/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'2023-07-09': {'open_critical': 1, 'open_high': 2, 'open_medium': 3, 'open_low': 4, 'open_none': 5, 'open_unknown': 6, 'open': 7, 'resolved': 8, 'duplicate': 9, 'false_positive': 10, 'in_review': 11, 'not_affected': 12, 'not_security': 13, 'risk_accepted': 14}, '2023-07-10': {'open_critical': 2, 'open_high': 3, 'open_medium': 4, 'open_low': 5, 'open_none': 6, 'open_unknown': 7, 'open': 8, 'resolved': 9, 'duplicate': 10, 'false_positive': 11, 'in_review': 12, 'not_affected': 13, 'not_security': 14, 'risk_accepted': 15}}" - expected_data = expected_data.replace("2023-07-10", today) - expected_data = expected_data.replace("2023-07-09", yesterday) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/metrics/product_metrics_timeline/", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'open_critical': 2, 'open_high': 3, 'open_medium': 4, 'open_low': 5, 'open_none': 6, 'open_unknown': 7, 'open': 8, 'resolved': 9, 'duplicate': 10, 'false_positive': 11, 'in_review': 12, 'not_affected': 13, 'not_security': 14, 'risk_accepted': 15}" - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/metrics/product_metrics_current/?product_id=1", - None, - 200, - expected_data, - ) - ) - - expected_data = "{'2023-07-09': {'open_critical': 1, 'open_high': 2, 'open_medium': 3, 'open_low': 4, 'open_none': 5, 'open_unknown': 6, 'open': 7, 'resolved': 8, 'duplicate': 9, 'false_positive': 10, 'in_review': 11, 'not_affected': 12, 'not_security': 13, 'risk_accepted': 14}, '2023-07-10': {'open_critical': 2, 'open_high': 3, 'open_medium': 4, 'open_low': 5, 'open_none': 6, 'open_unknown': 7, 'open': 8, 'resolved': 9, 'duplicate': 10, 'false_positive': 11, 'in_review': 12, 'not_affected': 13, 'not_security': 14, 'risk_accepted': 15}}" - expected_data = expected_data.replace("2023-07-10", today) - expected_data = expected_data.replace("2023-07-09", yesterday) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/metrics/product_metrics_timeline/?product_id=1", - None, - 200, - expected_data, - ) - ) - - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/metrics/product_metrics_current/?product_id=2", - None, - 403, - expected_data, - ) - ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) - self._test_api( - APITest( - "db_internal_write", - "get", - "/api/metrics/product_metrics_timeline/?product_id=2", - None, - 403, - expected_data, - ) - ) diff --git a/backend/unittests/access_control/api/test_serializers.py b/backend/unittests/access_control/api/test_serializers.py index 6bdb384be..c3876d580 100644 --- a/backend/unittests/access_control/api/test_serializers.py +++ b/backend/unittests/access_control/api/test_serializers.py @@ -4,17 +4,17 @@ from application.access_control.api.serializers import ( AuthorizationGroupMemberSerializer, + _get_user_permissions, ) from application.access_control.models import Authorization_Group +from application.authorization.services.roles_permissions import Permissions from unittests.base_test_case import BaseTestCase class TestAuthorizationGroupMemberSerializer(BaseTestCase): def test_validate_authorization_group_change(self): auhorization_group_2 = Authorization_Group.objects.create(name="group_2") - authorization_group_member_serializer = AuthorizationGroupMemberSerializer( - self.authorization_group_member_1 - ) + authorization_group_member_serializer = AuthorizationGroupMemberSerializer(self.authorization_group_member_1) attrs = { "authorization_group": auhorization_group_2, } @@ -28,9 +28,7 @@ def test_validate_authorization_group_change(self): ) def test_validate_user_change(self): - authorization_group_member_serializer = AuthorizationGroupMemberSerializer( - self.authorization_group_member_1 - ) + authorization_group_member_serializer = AuthorizationGroupMemberSerializer(self.authorization_group_member_1) attrs = { "user": self.user_external, } @@ -59,14 +57,10 @@ def test_validate_already_exists(self, mock_authorization_group_member): "[ErrorDetail(string='Authorization group member authorization_group_1 / user_internal@example.com already exists', code='invalid')]", str(e.exception), ) - mock_authorization_group_member.assert_called_with( - self.authorization_group_1, self.user_internal - ) + mock_authorization_group_member.assert_called_with(self.authorization_group_1, self.user_internal) def test_validate_successful_with_instance(self): - authorization_group_member_serializer = AuthorizationGroupMemberSerializer( - self.authorization_group_member_1 - ) + authorization_group_member_serializer = AuthorizationGroupMemberSerializer(self.authorization_group_member_1) attrs = {"is_manager": False} new_attrs = authorization_group_member_serializer.validate(attrs) @@ -86,6 +80,17 @@ def test_validate_successful_no_instance(self, mock_authorization_group_member): new_attrs = authorization_group_member_serializer.validate(attrs) self.assertEqual(new_attrs, attrs) - mock_authorization_group_member.assert_called_with( - self.authorization_group_1, self.user_external - ) + mock_authorization_group_member.assert_called_with(self.authorization_group_1, self.user_external) + + +class TestUserListSerializer(BaseTestCase): + def test_get_user_permission_internal(self): + permissions = _get_user_permissions(self.user_internal) + self.assertEqual([Permissions.Product_Create, Permissions.Product_Group_Create], permissions) + + @patch("application.access_control.api.serializers.get_current_user") + def test_get_user_permission_external(self, mock): + mock.return_value = self.user_external + + permissions = _get_user_permissions() + self.assertEqual([], permissions) diff --git a/backend/unittests/access_control/api/test_views.py b/backend/unittests/access_control/api/test_views.py index 7ae1cac2b..692c09969 100644 --- a/backend/unittests/access_control/api/test_views.py +++ b/backend/unittests/access_control/api/test_views.py @@ -1,3 +1,4 @@ +from datetime import date from unittest.mock import patch from django.core.exceptions import ValidationError as DjangoValidationError @@ -17,10 +18,8 @@ def test_create_api_token_view_not_authenticated(self, mock): mock.side_effect = PermissionDenied("Invalid credentials") api_client = APIClient() - request_data = {"username": "user@example.com", "password": "not-so-secret"} - response = api_client.post( - reverse("create_user_api_token"), request_data, "json" - ) + request_data = {"username": "user@example.com", "password": "not-so-secret", "name": "api_token_name"} + response = api_client.post(reverse("create_user_api_token"), request_data, "json") self.assertEqual(403, response.status_code) self.assertEqual("Invalid credentials", response.data["message"]) @@ -29,23 +28,40 @@ def test_create_api_token_view_not_authenticated(self, mock): @patch("application.access_control.api.views._get_authenticated_user") @patch("application.access_control.api.views.create_user_api_token") def test_create_api_token_view_validation_error(self, api_mock, user_mock): - api_mock.side_effect = ValidationError( - "Only one API token per user is allowed." - ) + api_mock.side_effect = ValidationError("Only one API token per user is allowed.") user_mock.return_value = self.user_internal api_client = APIClient() - request_data = {"username": "user@example.com", "password": "not-so-secret"} - response = api_client.post( - reverse("create_user_api_token"), request_data, "json" - ) + request_data = { + "username": "user@example.com", + "password": "not-so-secret", + "name": "api_token_name", + "expiration_date": date.today(), + } + response = api_client.post(reverse("create_user_api_token"), request_data, "json") self.assertEqual(400, response.status_code) - self.assertEqual( - "Only one API token per user is allowed.", response.data["message"] - ) + self.assertEqual("Only one API token per user is allowed.", response.data["message"]) user_mock.assert_called_with(request_data) - api_mock.assert_called_with(self.user_internal) + api_mock.assert_called_with(self.user_internal, "api_token_name", date.today()) + + @patch("application.access_control.api.views._get_authenticated_user") + @patch("application.access_control.api.views.create_user_api_token") + def test_create_api_token_view_expiration_date_past(self, api_mock, user_mock): + api_mock.side_effect = ValidationError("Only one API token per user is allowed.") + user_mock.return_value = self.user_internal + + api_client = APIClient() + request_data = { + "username": "user@example.com", + "password": "not-so-secret", + "name": "api_token_name", + "expiration_date": date(2022, 2, 2), + } + response = api_client.post(reverse("create_user_api_token"), request_data, "json") + + self.assertEqual(400, response.status_code) + self.assertEqual("Expiration date: Expiration date cannot be in the past", response.data["message"]) @patch("application.access_control.api.views._get_authenticated_user") @patch("application.access_control.api.views.create_user_api_token") @@ -54,14 +70,12 @@ def test_create_api_token_view_successful(self, api_mock, user_mock): user_mock.return_value = self.user_internal api_client = APIClient() - request_data = {"username": "user@example.com", "password": "not-so-secret"} - response = api_client.post( - reverse("create_user_api_token"), request_data, "json" - ) + request_data = {"username": "user@example.com", "password": "not-so-secret", "name": "api_token_name"} + response = api_client.post(reverse("create_user_api_token"), request_data, "json") self.assertEqual(201, response.status_code) self.assertEqual("api_token", response.data["token"]) user_mock.assert_called_with(request_data) - api_mock.assert_called_with(self.user_internal) + api_mock.assert_called_with(self.user_internal, "api_token_name", None) # --- revoke_user_api_token --- @@ -70,10 +84,8 @@ def test_revoke_api_token_view_not_authenticated(self, mock): mock.side_effect = PermissionDenied("Invalid credentials") api_client = APIClient() - request_data = {"username": "user@example.com", "password": "not-so-secret"} - response = api_client.post( - reverse("revoke_user_api_token"), request_data, "json" - ) + request_data = {"username": "user@example.com", "password": "not-so-secret", "name": "api_token_name"} + response = api_client.post(reverse("revoke_user_api_token"), request_data, "json") self.assertEqual(403, response.status_code) self.assertEqual("Invalid credentials", response.data["message"]) @@ -81,18 +93,16 @@ def test_revoke_api_token_view_not_authenticated(self, mock): @patch("application.access_control.api.views._get_authenticated_user") @patch("application.access_control.api.views.revoke_user_api_token") - def test_revoke_api_token_view_successful(self, api_mock, user_mock): + def test_revoke_api_token_view_successful(self, revoke_mock, user_mock): user_mock.return_value = self.user_internal api_client = APIClient() - request_data = {"username": "user@example.com", "password": "not-so-secret"} - response = api_client.post( - reverse("revoke_user_api_token"), request_data, "json" - ) + request_data = {"username": "user@example.com", "password": "not-so-secret", "name": "api_token_name"} + response = api_client.post(reverse("revoke_user_api_token"), request_data, "json") self.assertEqual(204, response.status_code) user_mock.assert_called_with(request_data) - api_mock.assert_called_with(self.user_internal) + revoke_mock.assert_called_with(self.user_internal, "api_token_name") class TestAuthenticate(BaseTestCase): @@ -126,16 +136,6 @@ def test_authenticate_view_successful(self, jwt_mock, user_mock): class TestGetAuthenticatedUser(BaseTestCase): - def test_get_authenticated_user_no_user(self): - data = {"password": "not_so_secret"} - with self.assertRaises(ValidationError): - _get_authenticated_user(data) - - def test_get_authenticated_user_no_password(self): - data = {"username": "user@example.com"} - with self.assertRaises(ValidationError): - _get_authenticated_user(data) - @patch("application.access_control.api.views.django_authenticate") def test_get_authenticated_user_not_authenticated(self, mock): mock.return_value = None @@ -157,9 +157,7 @@ def test_get_authenticated_user_successful(self, mock): class TestChangePassword(BaseTestCase): - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.access_control.api.views.UserViewSet.get_object") @patch("application.access_control.models.User.set_password") @patch("application.access_control.models.User.save") @@ -176,24 +174,18 @@ def test_change_password_unusable_password( "new_password_1": "new", "new_password_2": "new", } - response = api_client.patch( - "/api/users/123/change_password/", request_data, "json" - ) + response = api_client.patch("/api/users/123/change_password/", request_data, "json") self.assertEqual(400, response.status_code) self.assertEqual("User's password cannot be changed", response.data["message"]) save_mock.assert_not_called() set_password_mock.assert_not_called() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.access_control.api.views.UserViewSet.get_object") @patch("application.access_control.models.User.set_password") @patch("application.access_control.models.User.save") - def test_change_password_oidc_user( - self, save_mock, set_password_mock, get_object_mock, authentication_mock - ): + def test_change_password_oidc_user(self, save_mock, set_password_mock, get_object_mock, authentication_mock): self.user_internal.is_oidc_user = True get_object_mock.return_value = self.user_internal authentication_mock.return_value = self.user_admin, None @@ -204,24 +196,18 @@ def test_change_password_oidc_user( "new_password_1": "new", "new_password_2": "new", } - response = api_client.patch( - "/api/users/123/change_password/", request_data, "json" - ) + response = api_client.patch("/api/users/123/change_password/", request_data, "json") self.assertEqual(400, response.status_code) self.assertEqual("User's password cannot be changed", response.data["message"]) save_mock.assert_not_called() set_password_mock.assert_not_called() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.access_control.api.views.UserViewSet.get_object") @patch("application.access_control.models.User.set_password") @patch("application.access_control.models.User.save") - def test_change_password_do_not_match( - self, save_mock, set_password_mock, get_object_mock, authentication_mock - ): + def test_change_password_do_not_match(self, save_mock, set_password_mock, get_object_mock, authentication_mock): get_object_mock.return_value = self.user_internal authentication_mock.return_value = self.user_admin, None @@ -231,18 +217,14 @@ def test_change_password_do_not_match( "new_password_1": "new_1", "new_password_2": "new_2", } - response = api_client.patch( - "/api/users/123/change_password/", request_data, "json" - ) + response = api_client.patch("/api/users/123/change_password/", request_data, "json") self.assertEqual(400, response.status_code) self.assertEqual("The new passwords do not match", response.data["message"]) save_mock.assert_not_called() set_password_mock.assert_not_called() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.access_control.api.views.UserViewSet.get_object") @patch("application.access_control.models.User.set_password") @patch("application.access_control.models.User.save") @@ -265,21 +247,15 @@ def test_change_password_current_password_incorrect( "new_password_1": "new", "new_password_2": "new", } - response = api_client.patch( - "/api/users/123/change_password/", request_data, "json" - ) + response = api_client.patch("/api/users/123/change_password/", request_data, "json") self.assertEqual(400, response.status_code) self.assertEqual("Current password is incorrect", response.data["message"]) - django_authenticate_mock.assert_called_with( - username="user_admin@example.com", password="current" - ) + django_authenticate_mock.assert_called_with(username="user_admin@example.com", password="current") save_mock.assert_not_called() set_password_mock.assert_not_called() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.access_control.api.views.UserViewSet.get_object") @patch("application.access_control.models.User.set_password") @patch("application.access_control.models.User.save") @@ -297,9 +273,7 @@ def test_change_password_not_valid( get_object_mock.return_value = self.user_internal authentication_mock.return_value = self.user_admin, None django_authenticate_mock.return_value = self.user_admin - validate_password_mock.side_effect = DjangoValidationError( - ["too_short", "too_common"] - ) + validate_password_mock.side_effect = DjangoValidationError(["too_short", "too_common"]) api_client = APIClient() request_data = { @@ -307,21 +281,15 @@ def test_change_password_not_valid( "new_password_1": "new", "new_password_2": "new", } - response = api_client.patch( - "/api/users/123/change_password/", request_data, "json" - ) + response = api_client.patch("/api/users/123/change_password/", request_data, "json") self.assertEqual(400, response.status_code) self.assertEqual("too_short / too_common", response.data["message"]) - django_authenticate_mock.assert_called_with( - username="user_admin@example.com", password="current" - ) + django_authenticate_mock.assert_called_with(username="user_admin@example.com", password="current") save_mock.assert_not_called() set_password_mock.assert_not_called() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.access_control.api.views.UserViewSet.get_object") @patch("application.access_control.models.User.set_password") @patch("application.access_control.models.User.save") @@ -347,14 +315,10 @@ def test_change_password_successful( "new_password_1": "new", "new_password_2": "new", } - response = api_client.patch( - "/api/users/123/change_password/", request_data, "json" - ) + response = api_client.patch("/api/users/123/change_password/", request_data, "json") self.assertEqual(204, response.status_code) self.assertEqual(None, response.data) - django_authenticate_mock.assert_called_with( - username="user_admin@example.com", password="current" - ) + django_authenticate_mock.assert_called_with(username="user_admin@example.com", password="current") save_mock.assert_called() set_password_mock.assert_called_with("new") diff --git a/backend/unittests/access_control/services/test_api_token_authentication.py b/backend/unittests/access_control/services/test_api_token_authentication.py index 5ac7dabdc..6a27ab9cc 100644 --- a/backend/unittests/access_control/services/test_api_token_authentication.py +++ b/backend/unittests/access_control/services/test_api_token_authentication.py @@ -1,12 +1,12 @@ -from itertools import chain +from datetime import date from unittest.mock import patch from argon2 import PasswordHasher from argon2.profiles import RFC_9106_LOW_MEMORY from django.http import HttpRequest -from rest_framework.exceptions import AuthenticationFailed, ValidationError +from rest_framework.exceptions import AuthenticationFailed -from application.access_control.models import API_Token +from application.access_control.models import API_Token_Multiple from application.access_control.services.api_token_authentication import ( APITokenAuthentication, ) @@ -25,10 +25,8 @@ def setUp(self) -> None: ph = PasswordHasher.from_parameters(RFC_9106_LOW_MEMORY) api_token_hash = ph.hash(self.api_token) - api_token_object = API_Token( - user=self.user_internal, api_token_hash=api_token_hash - ) - self.api_tokens = [api_token_object] + self.api_token_object = API_Token_Multiple(user=self.user_internal, api_token_hash=api_token_hash) + self.api_tokens = [self.api_token_object] # --- authenticate_header --- @@ -38,7 +36,7 @@ def test_authenticate_header(self): # --- validate_api_token --- - @patch("application.access_control.models.API_Token.objects.all") + @patch("application.access_control.models.API_Token_Multiple.objects.all") def test_validate_api_token_none(self, mock): mock.return_value = self.api_tokens @@ -46,13 +44,13 @@ def test_validate_api_token_none(self, mock): user = api_token_authentication._validate_api_token("sss") self.assertIsNone(user) - @patch("application.access_control.models.API_Token.objects.all") + @patch("application.access_control.models.API_Token_Multiple.objects.all") def test_validate_api_token_found(self, mock): mock.return_value = self.api_tokens api_token_authentication = APITokenAuthentication() - user = api_token_authentication._validate_api_token(self.api_token) - self.assertEqual(self.user_internal, user) + api_token = api_token_authentication._validate_api_token(self.api_token) + self.assertEqual(self.api_token_object, api_token) # --- authenticate --- @@ -70,9 +68,7 @@ def test_authenticate_invalid_header_1(self): api_token_authentication = APITokenAuthentication() api_token_authentication.authenticate(request) - self.assertEqual( - "Invalid token header: No credentials provided.", str(e.exception) - ) + self.assertEqual("Invalid token header: No credentials provided.", str(e.exception)) def test_authenticate_invalid_header_spaces(self): request = HttpRequest() @@ -94,9 +90,7 @@ def test_authenticate_wrong_header(self): self.assertIsNone(user) - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token") def test_authenticate_wrong_token(self, mock): mock.return_value = None @@ -108,12 +102,10 @@ def test_authenticate_wrong_token(self, mock): self.assertEqual("Invalid API token.", str(e.exception)) - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token") def test_authenticate_user_deactivated(self, mock): - mock.return_value = self.user_internal self.user_internal.is_active = False + mock.return_value = API_Token_Multiple(user=self.user_internal) with self.assertRaises(AuthenticationFailed) as e: request = HttpRequest() @@ -123,11 +115,21 @@ def test_authenticate_user_deactivated(self, mock): self.assertEqual("User is deactivated.", str(e.exception)) - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token") + def test_authenticate_expired_token(self, mock): + mock.return_value = API_Token_Multiple(user=self.user_internal, expiration_date=date(2020, 1, 1)) + + with self.assertRaises(AuthenticationFailed) as e: + request = HttpRequest() + request.META["HTTP_AUTHORIZATION"] = b"APIToken token" + api_token_authentication = APITokenAuthentication() + api_token_authentication.authenticate(request) + + self.assertEqual("API token has expired.", str(e.exception)) + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication._validate_api_token") def test_authenticate_successful(self, mock): - mock.return_value = self.user_internal + mock.return_value = self.api_token_object request = HttpRequest() request.META["HTTP_AUTHORIZATION"] = b"APIToken token" diff --git a/backend/unittests/access_control/services/test_jwt_authentication.py b/backend/unittests/access_control/services/test_jwt_authentication.py index 19134aa58..f8cb7ad0e 100644 --- a/backend/unittests/access_control/services/test_jwt_authentication.py +++ b/backend/unittests/access_control/services/test_jwt_authentication.py @@ -77,9 +77,7 @@ def test_authenticate_invalid_header_1(self): jwt_authentication = JWTAuthentication() jwt_authentication.authenticate(request) - self.assertEqual( - "Invalid token header: No credentials provided.", str(e.exception) - ) + self.assertEqual("Invalid token header: No credentials provided.", str(e.exception)) def test_authenticate_invalid_header_spaces(self): request = HttpRequest() @@ -101,9 +99,7 @@ def test_authenticate_wrong_header(self): self.assertIsNone(user) - @patch( - "application.access_control.services.jwt_authentication.JWTAuthentication._validate_jwt" - ) + @patch("application.access_control.services.jwt_authentication.JWTAuthentication._validate_jwt") def test_authenticate_wrong_token(self, mock): mock.return_value = None @@ -115,9 +111,7 @@ def test_authenticate_wrong_token(self, mock): self.assertEqual("Invalid token.", str(e.exception)) - @patch( - "application.access_control.services.jwt_authentication.JWTAuthentication._validate_jwt" - ) + @patch("application.access_control.services.jwt_authentication.JWTAuthentication._validate_jwt") def test_authenticate_user_deactivated(self, mock): mock.return_value = self.user_internal self.user_internal.is_active = False @@ -132,9 +126,7 @@ def test_authenticate_user_deactivated(self, mock): self.user_internal.is_active = True - @patch( - "application.access_control.services.jwt_authentication.JWTAuthentication._validate_jwt" - ) + @patch("application.access_control.services.jwt_authentication.JWTAuthentication._validate_jwt") def test_authenticate_successful(self, mock): mock.return_value = self.user_internal @@ -154,9 +146,7 @@ def test_authenticate_header(self): # --- _validate_jwt --- @patch("jwt.decode") - @patch( - "application.access_control.services.jwt_authentication.get_user_by_username" - ) + @patch("application.access_control.services.jwt_authentication.get_user_by_username") @patch("application.access_control.models.JWT_Secret.load") def test_validate_jwt_user(self, secret_mock, get_user_mock, jwt_mock): jwt_secret = JWT_Secret(secret="secret") diff --git a/backend/unittests/access_control/services/test_oidc_authentication.py b/backend/unittests/access_control/services/test_oidc_authentication.py index d3cc8ca90..9bc767c7a 100644 --- a/backend/unittests/access_control/services/test_oidc_authentication.py +++ b/backend/unittests/access_control/services/test_oidc_authentication.py @@ -31,9 +31,7 @@ def test_authenticate_invalid_header_1(self): oidc_authentication = OIDCAuthentication() oidc_authentication.authenticate(request) - self.assertEqual( - "Invalid token header: No credentials provided.", str(e.exception) - ) + self.assertEqual("Invalid token header: No credentials provided.", str(e.exception)) def test_authenticate_invalid_header_spaces(self): request = HttpRequest() @@ -55,9 +53,7 @@ def test_authenticate_wrong_header(self): self.assertIsNone(user) - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._validate_jwt" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._validate_jwt") def test_authenticate_wrong_token(self, mock): mock.return_value = None @@ -69,9 +65,7 @@ def test_authenticate_wrong_token(self, mock): self.assertEqual("Invalid token.", str(e.exception)) - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._validate_jwt" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._validate_jwt") def test_authenticate_user_deactivated(self, mock): mock.return_value = self.user_internal self.user_internal.is_active = False @@ -86,9 +80,7 @@ def test_authenticate_user_deactivated(self, mock): self.user_internal.is_active = True - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._validate_jwt" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._validate_jwt") def test_authenticate_successful(self, mock): mock.return_value = self.user_internal @@ -108,20 +100,19 @@ def test_authenticate_header(self): # --- _validate_jwt --- @patch("jwt.decode") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._get_jwks_uri" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._get_jwks_uri") @patch("jwt.PyJWKClient.__init__") @patch("jwt.PyJWKClient.get_signing_key_from_jwt") - def test_validate_jwt_message( - self, get_signing_key_mock, pyjwkclient_mock, jwks_uri_mock, jwt_mock - ): + def test_validate_jwt_message(self, get_signing_key_mock, pyjwkclient_mock, jwks_uri_mock, jwt_mock): jwks_uri_mock.return_value = "test_jwks_uri" pyjwkclient_mock.return_value = None mock_py_jwk = MockPyJWK("test_key") get_signing_key_mock.return_value = mock_py_jwk jwt_mock.side_effect = jwt.ExpiredSignatureError("Signature expired") + settings = Settings.load() + settings.oidc_clock_skew = 9 + with self.assertRaises(AuthenticationFailed) as e: oidc_authentication = OIDCAuthentication() oidc_authentication._validate_jwt("token") @@ -144,20 +135,15 @@ def test_validate_jwt_message( key="test_key", algorithms=["RS256", "RS384", "RS512", "ES256 ", "ES384", "ES512", "EdDSA"], audience="client_id", + leeway=9, ) @patch("jwt.decode") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._get_jwks_uri" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._get_jwks_uri") @patch("jwt.PyJWKClient.__init__") @patch("jwt.PyJWKClient.get_signing_key_from_jwt") - @patch( - "application.access_control.services.oidc_authentication.get_user_by_username" - ) - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._create_user" - ) + @patch("application.access_control.services.oidc_authentication.get_user_by_username") + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._create_user") def test_validate_jwt_user_not_found( self, create_user_mock, @@ -176,6 +162,9 @@ def test_validate_jwt_user_not_found( expected_user = User(username="test_username") create_user_mock.return_value = expected_user + settings = Settings.load() + settings.oidc_clock_skew = 7 + oidc_authentication = OIDCAuthentication() user = oidc_authentication._validate_jwt("token") @@ -198,23 +187,16 @@ def test_validate_jwt_user_not_found( key="test_key", algorithms=["RS256", "RS384", "RS512", "ES256 ", "ES384", "ES512", "EdDSA"], audience="client_id", + leeway=7, ) - create_user_mock.assert_called_once_with( - "test_username", {"preferred_username": "test_username"} - ) + create_user_mock.assert_called_once_with("test_username", {"preferred_username": "test_username"}) @patch("jwt.decode") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._get_jwks_uri" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._get_jwks_uri") @patch("jwt.PyJWKClient.__init__") @patch("jwt.PyJWKClient.get_signing_key_from_jwt") - @patch( - "application.access_control.services.oidc_authentication.get_user_by_username" - ) - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._check_user_change" - ) + @patch("application.access_control.services.oidc_authentication.get_user_by_username") + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._check_user_change") def test_validate_jwt_user_found( self, check_user_change_mock, @@ -232,6 +214,9 @@ def test_validate_jwt_user_found( get_user_mock.return_value = self.user_internal check_user_change_mock.return_value = self.user_internal + settings = Settings.load() + settings.oidc_clock_skew = 5 + oidc_authentication = OIDCAuthentication() user = oidc_authentication._validate_jwt("token") @@ -254,6 +239,7 @@ def test_validate_jwt_user_found( key="test_key", algorithms=["RS256", "RS384", "RS512", "ES256 ", "ES384", "ES512", "EdDSA"], audience="client_id", + leeway=5, ) check_user_change_mock.assert_called_once_with( self.user_internal, {"preferred_username": self.user_internal.username} @@ -273,9 +259,7 @@ def test_get_jwks_uri(self, requests_mock): ) @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") def test_create_user(self, synchronize_groups_mock, user_save_mock): oidc_authentication = OIDCAuthentication() payload = { @@ -304,9 +288,7 @@ def test_create_user(self, synchronize_groups_mock, user_save_mock): synchronize_groups_mock.assert_called_with(user, payload) @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") def test_create_user_internal(self, synchronize_groups_mock, user_save_mock): settings = Settings.load() settings.internal_users = ".*@example.com, .*@test.com" @@ -342,9 +324,7 @@ def test_create_user_internal(self, synchronize_groups_mock, user_save_mock): settings.save() @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") def test_create_user_external(self, synchronize_groups_mock, user_save_mock): settings = Settings.load() settings.internal_users = ".*@example.com, .*@test.com" @@ -380,12 +360,8 @@ def test_create_user_external(self, synchronize_groups_mock, user_save_mock): settings.save() @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) - def test_create_user_no_claim_mappings( - self, synchronize_groups_mock, user_save_mock - ): + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") + def test_create_user_no_claim_mappings(self, synchronize_groups_mock, user_save_mock): oidc_authentication = OIDCAuthentication() payload = { "preferred_username": "test_username", @@ -418,9 +394,7 @@ def test_create_user_no_claim_mappings( synchronize_groups_mock.assert_called_with(user, payload) @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") def test_check_user_change_no_change(self, synchronize_groups_mock, user_save_mock): old_user = User( username="test_username", @@ -454,12 +428,8 @@ def test_check_user_change_no_change(self, synchronize_groups_mock, user_save_mo synchronize_groups_mock.assert_not_called() @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) - def test_check_user_change_no_claim_mappings( - self, synchronize_groups_mock, user_save_mock - ): + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") + def test_check_user_change_no_claim_mappings(self, synchronize_groups_mock, user_save_mock): old_user = User( username="test_username", first_name="test_first_name", @@ -504,12 +474,8 @@ def test_check_user_change_no_claim_mappings( synchronize_groups_mock.assert_not_called() @patch("application.access_control.services.oidc_authentication.User.save") - @patch( - "application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups" - ) - def test_check_user_change_with_changes( - self, synchronize_groups_mock, user_save_mock - ): + @patch("application.access_control.services.oidc_authentication.OIDCAuthentication._synchronize_groups") + def test_check_user_change_with_changes(self, synchronize_groups_mock, user_save_mock): old_user = User( username="test_username", first_name="test_first_name", diff --git a/backend/unittests/access_control/services/test_product_api_token.py b/backend/unittests/access_control/services/test_product_api_token.py deleted file mode 100644 index cff8fc100..000000000 --- a/backend/unittests/access_control/services/test_product_api_token.py +++ /dev/null @@ -1,152 +0,0 @@ -from itertools import chain -from unittest.mock import patch - -from rest_framework.exceptions import ValidationError - -from application.access_control.models import API_Token, User -from application.access_control.services.product_api_token import ( - create_product_api_token, - get_product_api_tokens, - revoke_product_api_token, -) -from application.access_control.services.roles_permissions import Roles -from application.core.models import Product_Member -from unittests.base_test_case import BaseTestCase - - -class TestProductApiToken(BaseTestCase): - @patch("application.access_control.services.product_api_token.get_user_by_username") - def test_create_product_api_token_exists(self, mock): - user = User(username="username", full_name="full_name") - api_token = API_Token(user=user, api_token_hash="hash") - mock.return_value = user - - with self.assertRaises(ValidationError) as e: - create_product_api_token(self.product_1, Roles.Upload) - mock.assert_called_with("-product-None-api_token-") - self.assertEqual("Only one API token per product is allowed.", str(e)) - - @patch("application.access_control.services.product_api_token.get_user_by_username") - @patch("application.access_control.models.API_Token.save") - @patch("application.access_control.models.User.save") - @patch("application.core.models.Product_Member.save") - @patch("application.access_control.models.User.set_unusable_password") - def test_create_product_api_token_with_user( - self, - set_unusable_password_mock, - product_member_save_mock, - user_save_mock, - api_token_save_mock, - user_mock, - ): - user_mock.return_value = User() - - api_token = create_product_api_token(self.product_1, Roles.Upload) - - self.assertEqual(42, len(api_token)) - - user_mock.assert_called_with("-product-None-api_token-") - api_token_save_mock.assert_called() - user_save_mock.assert_called() - product_member_save_mock.assert_called() - set_unusable_password_mock.assert_called() - - @patch("application.access_control.services.product_api_token.get_user_by_username") - @patch("application.access_control.models.API_Token.save") - @patch("application.access_control.models.User.save") - @patch("application.core.models.Product_Member.save") - @patch("application.access_control.models.User.set_unusable_password") - def test_create_product_api_token_without_user( - self, - set_unusable_password_mock, - product_member_save_mock, - user_save_mock, - api_token_save_mock, - user_mock, - ): - user_mock.return_value = None - - api_token = create_product_api_token(self.product_1, Roles.Upload) - - self.assertEqual(42, len(api_token)) - - user_mock.assert_called_with("-product-None-api_token-") - api_token_save_mock.assert_called() - user_save_mock.assert_called() - product_member_save_mock.assert_called() - set_unusable_password_mock.assert_called() - - @patch("application.access_control.services.product_api_token.get_user_by_username") - @patch("application.access_control.models.API_Token.objects.filter") - def test_revoke_product_api_token_not_exists(self, filter_mock, user_mock): - user_mock.return_value = None - revoke_product_api_token(self.product_1) - - user_mock.assert_called_with("-product-None-api_token-") - filter_mock.assert_not_called() - - @patch("application.access_control.services.product_api_token.get_user_by_username") - @patch("application.access_control.models.API_Token.delete") - @patch("application.access_control.models.User.save") - @patch("application.core.models.Product_Member.delete") - @patch("application.access_control.services.product_api_token.get_product_member") - def test_revoke_product_api_token( - self, - get_product_member_mock, - product_member_delete_mock, - user_save_mock, - api_token_delete_mock, - user_mock, - ): - user = User(username="username", full_name="full_name") - api_token = API_Token(user=user, api_token_hash="hash") - user_mock.return_value = user - - get_product_member_mock.return_value = Product_Member() - - revoke_product_api_token(self.product_1) - - user_mock.assert_called_with("-product-None-api_token-") - api_token_delete_mock.assert_called() - get_product_member_mock.assert_called_with(self.product_1, user) - product_member_delete_mock.assert_called() - user_save_mock.assert_called() - - @patch("application.access_control.services.product_api_token.get_user_by_username") - def test_get_product_api_tokens_no_user(self, user_mock): - user_mock.return_value = None - get_product_api_tokens(self.product_1) - - user_mock.assert_called_with("-product-None-api_token-") - - @patch("application.access_control.services.product_api_token.get_user_by_username") - @patch("application.access_control.services.product_api_token.get_product_member") - def test_get_product_api_tokens_no_product_member( - self, product_member_mock, user_mock - ): - user = User() - user_mock.return_value = user - - product_member_mock.return_value = None - - get_product_api_tokens(self.product_1) - - user_mock.assert_called_with("-product-None-api_token-") - product_member_mock.assert_called_with(self.product_1, user) - - @patch("application.access_control.services.product_api_token.get_user_by_username") - @patch("application.access_control.services.product_api_token.get_product_member") - def test_get_product_api_tokens_success(self, product_member_mock, user_mock): - user = User() - user_mock.return_value = user - - product_member_mock.return_value = Product_Member(role=Roles.Upload) - - product_api_tokens = get_product_api_tokens(self.product_1) - - self.assertEqual(1, len(product_api_tokens)) - self.assertEqual(self.product_1.pk, product_api_tokens[0].id) - self.assertEqual(Roles.Upload, product_api_tokens[0].role) - - user_mock.assert_called_with("-product-None-api_token-") - product_member_mock.assert_called_with(self.product_1, user) diff --git a/backend/unittests/access_control/services/test_roles_permissions.py b/backend/unittests/access_control/services/test_roles_permissions.py index 3fa677cd1..899aeb2fb 100644 --- a/backend/unittests/access_control/services/test_roles_permissions.py +++ b/backend/unittests/access_control/services/test_roles_permissions.py @@ -1,4 +1,4 @@ -from application.access_control.services.roles_permissions import ( +from application.authorization.services.roles_permissions import ( Permissions, Roles, get_permissions_for_role, @@ -22,5 +22,6 @@ def test_get_permissions_for_role_successful(self): Permissions.Api_Configuration_View, Permissions.Service_View, Permissions.VEX_View, + Permissions.Concluded_License_View, } self.assertEqual(permissions, get_permissions_for_role(Roles.Reader)) diff --git a/backend/unittests/access_control/services/test_user_api_token.py b/backend/unittests/access_control/services/test_user_api_token.py index 19d092371..7864deffc 100644 --- a/backend/unittests/access_control/services/test_user_api_token.py +++ b/backend/unittests/access_control/services/test_user_api_token.py @@ -1,9 +1,9 @@ -from itertools import chain +from datetime import date from unittest.mock import patch from rest_framework.exceptions import ValidationError -from application.access_control.models import API_Token +from application.access_control.models import API_Token_Multiple from application.access_control.services.user_api_token import ( create_user_api_token, revoke_user_api_token, @@ -12,35 +12,31 @@ class TestUserApiToken(BaseTestCase): - @patch("application.access_control.models.API_Token.objects.get") + @patch("application.access_control.models.API_Token_Multiple.objects.get") def test_create_api_token_exists(self, mock): - mock.return_value = API_Token() + mock.return_value = API_Token_Multiple() with self.assertRaises(ValidationError): - create_user_api_token(self.user_internal) - mock.assert_called_with(self.user_internal) + create_user_api_token(self.user_internal, "api_token_name", date.today()) + mock.assert_called_with(self.user_internal, name="api_token_name") - @patch("application.access_control.models.API_Token.objects.get") - @patch("application.access_control.models.API_Token.save") + @patch("application.access_control.models.API_Token_Multiple.objects.get") + @patch("application.access_control.models.API_Token_Multiple.save") def test_create_api_token_new(self, save_mock, get_mock): - get_mock.side_effect = API_Token.DoesNotExist() + get_mock.side_effect = API_Token_Multiple.DoesNotExist() - api_token = create_user_api_token(self.user_internal) + api_token = create_user_api_token(self.user_internal, "api_token_name", date.today()) self.assertEqual(42, len(api_token)) - get_mock.assert_called_with(user=self.user_internal) + get_mock.assert_called_with(user=self.user_internal, name="api_token_name") save_mock.assert_called() - @patch("application.access_control.models.API_Token.objects.filter") - @patch("application.access_control.models.API_Token.delete") - def test_revoke_api_token(self, delete_mock, filter_mock): - none_qs = API_Token.objects.none() - api_token_1 = API_Token() - api_token_2 = API_Token() - qs = list(chain(none_qs, [api_token_1, api_token_2])) - filter_mock.return_value = qs + @patch("application.access_control.models.API_Token_Multiple.objects.get") + @patch("application.access_control.models.API_Token_Multiple.delete") + def test_revoke_api_token(self, delete_mock, get_mock): + get_mock.return_value = API_Token_Multiple() - revoke_user_api_token(self.user_internal) + revoke_user_api_token(self.user_internal, "api_token_name") - filter_mock.assert_called_with(user=self.user_internal) - self.assertEqual(2, delete_mock.call_count) + get_mock.assert_called_with(user=self.user_internal, name="api_token_name") + self.assertEqual(1, delete_mock.call_count) diff --git a/backend/unittests/access_control/test_signals.py b/backend/unittests/access_control/test_signals.py index b3bb74b44..956d886e9 100644 --- a/backend/unittests/access_control/test_signals.py +++ b/backend/unittests/access_control/test_signals.py @@ -14,9 +14,7 @@ class TestSignals(BaseTestCase): def test_signal_user_logged_in(self, mock_format, mock_logging): signal_user_logged_in(None, user=self.user_internal) - mock_format.assert_called_with( - message="User logged in", user=self.user_internal - ) + mock_format.assert_called_with(message="User logged in", username="user_internal@example.com") mock_logging.assert_called_once() @patch("application.access_control.signals.logger.info") @@ -24,9 +22,7 @@ def test_signal_user_logged_in(self, mock_format, mock_logging): def test_signal_user_logged_out(self, mock_format, mock_logging): signal_user_logged_out(None, user=self.user_internal) - mock_format.assert_called_with( - message="User logged out", user=self.user_internal - ) + mock_format.assert_called_with(message="User logged out", username="user_internal@example.com") mock_logging.assert_called_once() @patch("application.access_control.signals.logger.info") @@ -35,5 +31,7 @@ def test_signal_user_login_failed(self, mock_format, mock_logging): credentials = {"user": "test_user", "password": "*****"} signal_user_login_failed(None, credentials=credentials) - mock_format.assert_called_with(message="User login failed: ", data=credentials) + mock_format.assert_called_with( + message="User login failed: ", data=credentials, username="user_admin@example.com" + ) mock_logging.assert_called_once() diff --git a/backend/unittests/authorization/__init__.py b/backend/unittests/authorization/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/authorization/api/__init__.py b/backend/unittests/authorization/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/access_control/api/test_authorization.py b/backend/unittests/authorization/api/test_authorization.py similarity index 83% rename from backend/unittests/access_control/api/test_authorization.py rename to backend/unittests/authorization/api/test_authorization.py index 284b10f00..7a228f1e2 100644 --- a/backend/unittests/access_control/api/test_authorization.py +++ b/backend/unittests/authorization/api/test_authorization.py @@ -37,41 +37,29 @@ def setUpClass(self, mock_user): call_command( "loaddata", [ - "application/licenses/fixtures/initial_data.json", + "unittests/fixtures/initial_license_data.json", "unittests/fixtures/unittests_fixtures.json", "unittests/fixtures/unittests_license_fixtures.json", ], ) product_metrics = Product_Metrics.objects.get(pk=1) - product_metrics.date = timezone.now().replace( - hour=0, minute=0, second=0, microsecond=0 - ) - timedelta(days=1) + product_metrics.date = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1) product_metrics.save() product_metrics = Product_Metrics.objects.get(pk=2) - product_metrics.date = timezone.now().replace( - hour=0, minute=0, second=0, microsecond=0 - ) + product_metrics.date = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) product_metrics.save() product_metrics = Product_Metrics.objects.get(pk=3) - product_metrics.date = timezone.now().replace( - hour=0, minute=0, second=0, microsecond=0 - ) - timedelta(days=1) + product_metrics.date = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1) product_metrics.save() product_metrics = Product_Metrics.objects.get(pk=4) - product_metrics.date = timezone.now().replace( - hour=0, minute=0, second=0, microsecond=0 - ) + product_metrics.date = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) product_metrics.save() super().setUpClass() - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) - @patch( - "application.core.api.serializers_product.calculate_risk_acceptance_expiry_date" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.core.api.serializers_product.calculate_risk_acceptance_expiry_date") def _test_api(self, data: APITest, mock_product_expiry_date, mock_authentication): user = User.objects.get(username=data.username) mock_authentication.return_value = user, None diff --git a/backend/unittests/access_control/api/test_authorization_api_configurations.py b/backend/unittests/authorization/api/test_authorization_api_configurations.py similarity index 57% rename from backend/unittests/access_control/api/test_authorization_api_configurations.py rename to backend/unittests/authorization/api/test_authorization_api_configurations.py index 5ac475a88..852e685e7 100644 --- a/backend/unittests/access_control/api/test_authorization_api_configurations.py +++ b/backend/unittests/authorization/api/test_authorization_api_configurations.py @@ -1,8 +1,8 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) @@ -16,14 +16,10 @@ def _test_authorization_api_configurations_product_authorization_group_member(se self._test_authorization_api_configurations() def _test_authorization_api_configurations(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'db_api_configuration_internal', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}, {'id': 2, 'product_data': {'id': 2, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': None, 'repository_default_branch': 3}, 'name': 'db_api_configuration_external', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 2, 'parser': 2}]}" - self._test_api( - APITest( - "db_admin", "get", "/api/api_configurations/", None, 200, expected_data - ) - ) + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'db_api_configuration_internal', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}, {'id': 2, 'product_data': {'id': 2, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': None, 'repository_default_branch': 3}, 'name': 'db_api_configuration_external', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 2, 'parser': 2}]}" + self._test_api(APITest("db_admin", "get", "/api/api_configurations/", None, 200, expected_data)) - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'db_api_configuration_internal', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}]}" + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'db_api_configuration_internal', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}]}" self._test_api( APITest( "db_internal_write", @@ -35,7 +31,7 @@ def _test_authorization_api_configurations(self): ) ) - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'db_api_configuration_internal', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}" + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'db_api_configuration_internal', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}" self._test_api( APITest( "db_internal_write", @@ -79,9 +75,7 @@ def _test_authorization_api_configurations(self): "product": 1, "parser": 2, } - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -93,7 +87,7 @@ def _test_authorization_api_configurations(self): ) ) - expected_data = "{'id': 3, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'string', 'base_url': 'string', 'project_key': 'string', 'api_key': 'string', 'product': 1, 'parser': 2}" + expected_data = "{'id': 3, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'string', 'base_url': 'string', 'project_key': 'string', 'api_key': 'string', 'product': 1, 'parser': 2}" self._test_api( APITest( "db_internal_write", @@ -105,9 +99,7 @@ def _test_authorization_api_configurations(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -119,7 +111,7 @@ def _test_authorization_api_configurations(self): ) ) - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'changed', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}" + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'product_group': 3, 'repository_default_branch': 1}, 'name': 'changed', 'base_url': 'http://localhost:8080', 'project_key': 'secobserve', 'api_key': '__secret__', 'product': 1, 'parser': 2}" self._test_api( APITest( "db_internal_write", @@ -131,9 +123,7 @@ def _test_authorization_api_configurations(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/authorization/api/test_authorization_api_tokens.py b/backend/unittests/authorization/api/test_authorization_api_tokens.py new file mode 100644 index 000000000..f74f16afa --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_api_tokens.py @@ -0,0 +1,49 @@ +from application.access_control.models import API_Token_Multiple, User +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) + + +class TestAuthorizationApiTokens(TestAuthorizationBase): + def test_authorization_api_tokens(self): + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'name': 'default', 'user': 5, 'username': '-product-2-api_token-', 'product': 2, 'product_group': None, 'expiration_date': None}]}" + self._test_api( + APITest( + "db_admin", + "get", + "/api/api_tokens/", + None, + 200, + expected_data, + ) + ) + + db_internal_write = User.objects.get(username="db_internal_write") + API_Token_Multiple(user=db_internal_write, api_token_hash="hash").save() + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 2, 'name': 'default', 'user': 2, 'username': 'db_internal_write', 'product': None, 'product_group': None, 'expiration_date': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/api_tokens/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'count': 0, 'next': None, 'previous': None, 'results': []}" + self._test_api( + APITest( + "db_internal_read", + "get", + "/api/api_tokens/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_authorization_group_members.py b/backend/unittests/authorization/api/test_authorization_authorization_group_members.py similarity index 80% rename from backend/unittests/access_control/api/test_authorization_authorization_group_members.py rename to backend/unittests/authorization/api/test_authorization_authorization_group_members.py index a0b036da1..e3fb19060 100644 --- a/backend/unittests/access_control/api/test_authorization_authorization_group_members.py +++ b/backend/unittests/authorization/api/test_authorization_authorization_group_members.py @@ -1,4 +1,4 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -6,7 +6,7 @@ class TestAuthorizationAuthorizationGroupMembers(TestAuthorizationBase): def test_authorization_authorization_group_members(self): - expected_data = "{'count': 4, 'next': None, 'previous': None, 'results': [{'id': 1, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'authorization_group': 3, 'user': 2}, {'id': 2, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'is_manager': False, 'authorization_group': 3, 'user': 3}, {'id': 3, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'is_manager': False, 'authorization_group': 1, 'user': 6}, {'id': 4, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'is_manager': False, 'authorization_group': 2, 'user': 6}]}" + expected_data = "{'count': 4, 'next': None, 'previous': None, 'results': [{'id': 1, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'authorization_group': 3, 'user': 2}, {'id': 2, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'is_manager': False, 'authorization_group': 3, 'user': 3}, {'id': 3, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'is_manager': False, 'authorization_group': 1, 'user': 6}, {'id': 4, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'is_manager': False, 'authorization_group': 2, 'user': 6}]}" self._test_api( APITest( "db_admin", @@ -18,7 +18,7 @@ def test_authorization_authorization_group_members(self): ) ) - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'authorization_group': 3, 'user': 2}, {'id': 2, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, 'is_manager': False, 'authorization_group': 3, 'user': 3}]}" + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'authorization_group': 3, 'user': 2}, {'id': 2, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, 'is_manager': False, 'authorization_group': 3, 'user': 3}]}" self._test_api( APITest( "db_internal_write", @@ -43,7 +43,7 @@ def test_authorization_authorization_group_members(self): no_second_user=True, ) ) - expected_data = "{'id': 1, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'authorization_group': 3, 'user': 2}" + expected_data = "{'id': 1, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'authorization_group': 3, 'user': 2}" self._test_api( APITest( username="db_internal_write", @@ -56,9 +56,7 @@ def test_authorization_authorization_group_members(self): ) ) - expected_data = ( - "{'message': 'No Authorization_Group_Member matches the given query.'}" - ) + expected_data = "{'message': 'No Authorization_Group_Member matches the given query.'}" self._test_api( APITest( "db_product_group_user", @@ -84,9 +82,7 @@ def test_authorization_authorization_group_members(self): ) post_data = {"authorization_group": 3, "user": 1, "is_manager": False} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -112,9 +108,7 @@ def test_authorization_authorization_group_members(self): ) post_data = {"is_manager": True} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -140,9 +134,7 @@ def test_authorization_authorization_group_members(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/access_control/api/test_authorization_authorization_groups.py b/backend/unittests/authorization/api/test_authorization_authorization_groups.py similarity index 95% rename from backend/unittests/access_control/api/test_authorization_authorization_groups.py rename to backend/unittests/authorization/api/test_authorization_authorization_groups.py index 565519b43..6e988d5d6 100644 --- a/backend/unittests/access_control/api/test_authorization_authorization_groups.py +++ b/backend/unittests/authorization/api/test_authorization_authorization_groups.py @@ -3,11 +3,11 @@ Authorization_Group_Member, User, ) -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) @@ -131,9 +131,7 @@ def test_authorization_authorization_groups(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", @@ -181,9 +179,7 @@ def test_authorization_authorization_groups(self): is_manager=False, ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/authorization/api/test_authorization_branches.py b/backend/unittests/authorization/api/test_authorization_branches.py new file mode 100644 index 000000000..b8d7d2ea7 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_branches.py @@ -0,0 +1,141 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationBranches(TestAuthorizationBase): + def test_authorization_branches_product_member(self): + self._test_authorization_branches() + + def _test_authorization_branches_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_branches() + + def _test_authorization_branches(self): + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_branch_internal_dev (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_dev', 'is_default_branch': True, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}, {'id': 2, 'name_with_product': 'db_branch_internal_main (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_main', 'is_default_branch': False, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}, {'id': 3, 'name_with_product': 'db_branch_external (db_product_external)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_external', 'is_default_branch': True, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 2}]}" + self._test_api(APITest("db_admin", "get", "/api/branches/", None, 200, expected_data)) + + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_branch_internal_dev (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_dev', 'is_default_branch': True, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}, {'id': 2, 'name_with_product': 'db_branch_internal_main (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_main', 'is_default_branch': False, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/branches/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'name_with_product': 'db_branch_internal_dev (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_branch_internal_dev', 'is_default_branch': True, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/branches/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No Branch matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/branches/3/", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/branches/99999/", + None, + 404, + expected_data, + ) + ) + + post_data = {"name": "string", "product": 1} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/branches/", + post_data, + 403, + expected_data, + ) + ) + + expected_data = "{'id': 4, 'name_with_product': 'string (db_product_internal)', 'name': 'string', 'is_default_branch': False, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/branches/", + post_data, + 201, + expected_data, + ) + ) + + post_data = {"name": "changed"} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/branches/1/", + post_data, + 403, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'name_with_product': 'changed (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'changed', 'is_default_branch': True, 'last_import': None, 'housekeeping_protect': False, 'purl': '', 'cpe23': '', 'osv_linux_distribution': '', 'osv_linux_release': '', 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/branches/1/", + post_data, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/branches/1/", + None, + 403, + expected_data, + ) + ) + + expected_data = "{'message': 'You cannot delete the default branch of a product.'}" + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/branches/1/", + None, + 400, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_components.py b/backend/unittests/authorization/api/test_authorization_components.py new file mode 100644 index 000000000..d8ddb852f --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_components.py @@ -0,0 +1,64 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationComponents(TestAuthorizationBase): + def test_authorization_components_product_member(self): + self._test_authorization_components() + + def test_authorization_components_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_components() + + def _test_authorization_components(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': '4dd012d1d2e18cf07666cc7fe0520da0', 'product_name': 'db_product_internal', 'product_group_name': 'db_product_group', 'branch_name': '', 'component_name_version_type': 'internal_component:1.0.0', 'origin_service_name': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_dependencies': '', 'component_cyclonedx_bom_link': '', 'has_observations': False, 'product': 1, 'branch': None, 'origin_service': None}, {'id': '25a348829ab6ef7330b97069dab60a40', 'product_name': 'db_product_external', 'product_group_name': '', 'branch_name': '', 'component_name_version_type': 'external_component:2.0.0', 'origin_service_name': '', 'component_name': 'external_component', 'component_version': '2.0.0', 'component_name_version': 'external_component:2.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_dependencies': '', 'component_cyclonedx_bom_link': '', 'has_observations': False, 'product': 2, 'branch': None, 'origin_service': None}]}" + self._test_api(APITest("db_admin", "get", "/api/components/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': '4dd012d1d2e18cf07666cc7fe0520da0', 'product_name': 'db_product_internal', 'product_group_name': 'db_product_group', 'branch_name': '', 'component_name_version_type': 'internal_component:1.0.0', 'origin_service_name': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_dependencies': '', 'component_cyclonedx_bom_link': '', 'has_observations': False, 'product': 1, 'branch': None, 'origin_service': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/components/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'id': '4dd012d1d2e18cf07666cc7fe0520da0', 'product_name': 'db_product_internal', 'product_group_name': 'db_product_group', 'branch_name': '', 'component_name_version_type': 'internal_component:1.0.0', 'origin_service_name': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_dependencies': '', 'component_cyclonedx_bom_link': '', 'has_observations': False, 'product': 1, 'branch': None, 'origin_service': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/components/4dd012d1d2e18cf07666cc7fe0520da0/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'message': 'No Component matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/components/25a348829ab6ef7330b97069dab60a40/", + None, + 404, + expected_data, + ) + ) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/components/99999/", + None, + 404, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_concluded_licenses.py b/backend/unittests/authorization/api/test_authorization_concluded_licenses.py new file mode 100644 index 000000000..9774d4c33 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_concluded_licenses.py @@ -0,0 +1,88 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationConcluded(TestAuthorizationBase): + def test_authorization_concluded_licenses_product_member(self): + self._test_authorization_concluded_licenses() + + def test_authorization_concluded_licenses_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_concluded_licenses() + + def _test_authorization_concluded_licenses(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'user_data': {'id': 1, 'username': 'db_admin', 'first_name': '', 'last_name': '', 'full_name': 'db_admin', 'email': '', 'is_active': True, 'is_superuser': True, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'component_name_version': 'internal_component:1.0.0 (npm)', 'manual_concluded_spdx_license_id': '', 'component_purl_type': 'npm', 'component_name': 'internal_component', 'component_version': '1.0.0', 'manual_concluded_license_expression': 'expression', 'manual_concluded_non_spdx_license': '', 'last_updated': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'manual_concluded_spdx_license': None, 'user': 1}, {'id': 2, 'product_data': {'id': 2, 'name': 'db_product_external', 'is_product_group': False}, 'user_data': {'id': 1, 'username': 'db_admin', 'first_name': '', 'last_name': '', 'full_name': 'db_admin', 'email': '', 'is_active': True, 'is_superuser': True, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'component_name_version': 'external_component:2.0.0 (pypi)', 'manual_concluded_spdx_license_id': '', 'component_purl_type': 'pypi', 'component_name': 'external_component', 'component_version': '2.0.0', 'manual_concluded_license_expression': '', 'manual_concluded_non_spdx_license': 'non spdx', 'last_updated': '2022-12-15T17:10:35.513000+01:00', 'product': 2, 'manual_concluded_spdx_license': None, 'user': 1}]}" + self._test_api(APITest("db_admin", "get", "/api/concluded_licenses/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'user_data': {'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}, 'component_name_version': 'internal_component:1.0.0 (npm)', 'manual_concluded_spdx_license_id': '', 'component_purl_type': 'npm', 'component_name': 'internal_component', 'component_version': '1.0.0', 'manual_concluded_license_expression': 'expression', 'manual_concluded_non_spdx_license': '', 'last_updated': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'manual_concluded_spdx_license': None, 'user': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/concluded_licenses/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user_data': {'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}, 'component_name_version': 'internal_component:1.0.0 (npm)', 'manual_concluded_spdx_license_id': '', 'component_purl_type': 'npm', 'component_name': 'internal_component', 'component_version': '1.0.0', 'manual_concluded_license_expression': 'expression', 'manual_concluded_non_spdx_license': '', 'last_updated': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'manual_concluded_spdx_license': None, 'user': 1}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/concluded_licenses/1/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'message': 'No Concluded_License matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/concluded_licenses/2/", + None, + 404, + expected_data, + ) + ) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/concluded_licenses/99999/", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/concluded_licenses/1/", + None, + 403, + None, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/concluded_licenses/1/", + None, + 204, + None, + no_second_user=True, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_evidences.py b/backend/unittests/authorization/api/test_authorization_evidences.py similarity index 91% rename from backend/unittests/access_control/api/test_authorization_evidences.py rename to backend/unittests/authorization/api/test_authorization_evidences.py index 48e4f132f..e8b75caed 100644 --- a/backend/unittests/access_control/api/test_authorization_evidences.py +++ b/backend/unittests/authorization/api/test_authorization_evidences.py @@ -1,8 +1,8 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) diff --git a/backend/unittests/access_control/api/test_authorization_general_rules.py b/backend/unittests/authorization/api/test_authorization_general_rules.py similarity index 51% rename from backend/unittests/access_control/api/test_authorization_general_rules.py rename to backend/unittests/authorization/api/test_authorization_general_rules.py index a297b569a..a7ac1793d 100644 --- a/backend/unittests/access_control/api/test_authorization_general_rules.py +++ b/backend/unittests/authorization/api/test_authorization_general_rules.py @@ -1,4 +1,4 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -8,7 +8,7 @@ class TestAuthorizationGeneralRules(TestAuthorizationBase): def test_authorization_general_rules(self): # --- general_rules --- - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 3, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_general_rule', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'enabled': True, 'parser': 1}]}" + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 3, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_general_rule', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'parser': 1}]}" self._test_api( APITest( "db_internal_write", @@ -32,7 +32,7 @@ def test_authorization_general_rules(self): ) ) - expected_data = "{'id': 3, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_general_rule', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'enabled': True, 'parser': 1}" + expected_data = "{'id': 3, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_general_rule', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'parser': 1}" self._test_api( APITest( "db_internal_write", @@ -45,9 +45,7 @@ def test_authorization_general_rules(self): ) post_data = {"name": "string", "parser": 1} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -59,16 +57,10 @@ def test_authorization_general_rules(self): ) ) - expected_data = "{'id': 4, 'user': 'db_admin', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_admin', 'approval_user_full_name': None, 'name': 'string', 'description': '', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'enabled': True, 'parser': 1}" - self._test_api( - APITest( - "db_admin", "post", "/api/general_rules/", post_data, 201, expected_data - ) - ) + expected_data = "{'id': 4, 'user': 'db_admin', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_admin', 'approval_user_full_name': None, 'name': 'string', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'parser': 1}" + self._test_api(APITest("db_admin", "post", "/api/general_rules/", post_data, 201, expected_data)) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -80,7 +72,7 @@ def test_authorization_general_rules(self): ) ) - expected_data = "{'id': 3, 'user': 'db_admin', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_admin', 'approval_user_full_name': None, 'name': 'changed', 'description': '', 'scanner_prefix': 'also_changed', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'enabled': True, 'parser': 1}" + expected_data = "{'id': 3, 'user': 'db_admin', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_admin', 'approval_user_full_name': None, 'name': 'changed', 'description': '', 'type': 'Fields', 'scanner_prefix': 'also_changed', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'parser': 1}" self._test_api( APITest( "db_admin", @@ -92,9 +84,13 @@ def test_authorization_general_rules(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'count': 0, 'results': []}" + self._test_api(APITest("db_admin", "post", "/api/general_rules/3/simulate/", None, 200, expected_data)) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api(APITest("db_internal_write", "post", "/api/general_rules/3/simulate/", None, 403, expected_data)) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -107,8 +103,4 @@ def test_authorization_general_rules(self): ) expected_data = "None" - self._test_api( - APITest( - "db_admin", "delete", "/api/general_rules/3/", None, 204, expected_data - ) - ) + self._test_api(APITest("db_admin", "delete", "/api/general_rules/3/", None, 204, expected_data)) diff --git a/backend/unittests/access_control/api/test_authorization_jwt_secret.py b/backend/unittests/authorization/api/test_authorization_jwt_secret.py similarity index 54% rename from backend/unittests/access_control/api/test_authorization_jwt_secret.py rename to backend/unittests/authorization/api/test_authorization_jwt_secret.py index fe7252287..e9f044cbb 100644 --- a/backend/unittests/access_control/api/test_authorization_jwt_secret.py +++ b/backend/unittests/authorization/api/test_authorization_jwt_secret.py @@ -1,5 +1,5 @@ from application.access_control.models import JWT_Secret -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -9,15 +9,9 @@ class TestAuthorizationJWTSecret(TestAuthorizationBase): def test_authorization_jwt_secret(self): secret_old = JWT_Secret.load().secret - self._test_api( - APITest("db_admin", "post", "/api/jwt_secret/reset/", None, 204, None) - ) + self._test_api(APITest("db_admin", "post", "/api/jwt_secret/reset/", None, 204, None)) secret_new = JWT_Secret.load().secret self.assertNotEqual(secret_old, secret_new) self.assertEqual(32, len(secret_new)) - self._test_api( - APITest( - "db_internal_write", "post", "/api/jwt_secret/reset/", None, 403, None - ) - ) + self._test_api(APITest("db_internal_write", "post", "/api/jwt_secret/reset/", None, 403, None)) diff --git a/backend/unittests/access_control/api/test_authorization_license_component_evidences.py b/backend/unittests/authorization/api/test_authorization_license_component_evidences.py similarity index 93% rename from backend/unittests/access_control/api/test_authorization_license_component_evidences.py rename to backend/unittests/authorization/api/test_authorization_license_component_evidences.py index 9dc0a9e79..2fa142707 100644 --- a/backend/unittests/access_control/api/test_authorization_license_component_evidences.py +++ b/backend/unittests/authorization/api/test_authorization_license_component_evidences.py @@ -1,8 +1,8 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) @@ -52,9 +52,7 @@ def _test_authorization_license_component_evidences(self): expected_data, ) ) - expected_data = ( - "{'message': 'No License_Component_Evidence matches the given query.'}" - ) + expected_data = "{'message': 'No License_Component_Evidence matches the given query.'}" self._test_api( APITest( "db_internal_write", diff --git a/backend/unittests/authorization/api/test_authorization_license_components.py b/backend/unittests/authorization/api/test_authorization_license_components.py new file mode 100644 index 000000000..a379aa5fe --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_license_components.py @@ -0,0 +1,169 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationLicense_Components(TestAuthorizationBase): + def test_authorization_license_components_product_member(self): + self._test_authorization_license_components() + + def test_authorization_license_components_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_license_components() + + def _test_authorization_license_components(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'component_name_version_type': 'internal_component:1.0.0', 'branch_name': '', 'origin_service_name': '', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_cyclonedx_bom_link': '', 'imported_declared_license_name': 'internal license', 'imported_declared_license_expression': '', 'imported_declared_non_spdx_license': 'internal license', 'imported_declared_multiple_licenses': '', 'imported_concluded_license_name': 'No license information', 'imported_concluded_license_expression': '', 'imported_concluded_non_spdx_license': '', 'imported_concluded_multiple_licenses': '', 'manual_concluded_license_name': 'No license information', 'manual_concluded_license_expression': '', 'manual_concluded_non_spdx_license': '', 'manual_concluded_comment': '', 'effective_license_name': 'No license information', 'effective_license_expression': '', 'effective_non_spdx_license': '', 'effective_multiple_licenses': '', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'imported_declared_spdx_license': None, 'imported_concluded_spdx_license': None, 'manual_concluded_spdx_license': None, 'effective_spdx_license': None, 'origin_service': None}, {'id': 2, 'component_name_version_type': 'external_component:2.0.0', 'branch_name': '', 'origin_service_name': '', 'identity_hash': 'da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9', 'upload_filename': '', 'component_name': 'external_component', 'component_version': '2.0.0', 'component_name_version': 'external_component:2.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_cyclonedx_bom_link': '', 'imported_declared_license_name': 'external license', 'imported_declared_license_expression': '', 'imported_declared_non_spdx_license': 'external license', 'imported_declared_multiple_licenses': '', 'imported_concluded_license_name': 'No license information', 'imported_concluded_license_expression': '', 'imported_concluded_non_spdx_license': '', 'imported_concluded_multiple_licenses': '', 'manual_concluded_license_name': 'No license information', 'manual_concluded_license_expression': '', 'manual_concluded_non_spdx_license': '', 'manual_concluded_comment': '', 'effective_license_name': 'No license information', 'effective_license_expression': '', 'effective_non_spdx_license': '', 'effective_multiple_licenses': '', 'evaluation_result': 'Review required', 'numerical_evaluation_result': 2, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 2, 'branch': None, 'imported_declared_spdx_license': None, 'imported_concluded_spdx_license': None, 'manual_concluded_spdx_license': None, 'effective_spdx_license': None, 'origin_service': None}]}" + self._test_api(APITest("db_admin", "get", "/api/license_components/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'component_name_version_type': 'internal_component:1.0.0', 'branch_name': '', 'origin_service_name': '', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_cyclonedx_bom_link': '', 'imported_declared_license_name': 'internal license', 'imported_declared_license_expression': '', 'imported_declared_non_spdx_license': 'internal license', 'imported_declared_multiple_licenses': '', 'imported_concluded_license_name': 'No license information', 'imported_concluded_license_expression': '', 'imported_concluded_non_spdx_license': '', 'imported_concluded_multiple_licenses': '', 'manual_concluded_license_name': 'No license information', 'manual_concluded_license_expression': '', 'manual_concluded_non_spdx_license': '', 'manual_concluded_comment': '', 'effective_license_name': 'No license information', 'effective_license_expression': '', 'effective_non_spdx_license': '', 'effective_multiple_licenses': '', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'imported_declared_spdx_license': None, 'imported_concluded_spdx_license': None, 'manual_concluded_spdx_license': None, 'effective_spdx_license': None, 'origin_service': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'id': 1, 'component_name_version_type': 'internal_component:1.0.0', 'branch_name': '', 'origin_service_name': '', 'license_policy_name': '', 'license_policy_id': 0, 'evidences': [{'id': 1, 'name': 'internal_license_evidence_name'}], 'effective_license_type': '', 'title': 'No license information / internal_component:1.0.0', 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_dependencies': '', 'component_cyclonedx_bom_link': '', 'imported_declared_license_name': 'internal license', 'imported_declared_license_expression': '', 'imported_declared_non_spdx_license': 'internal license', 'imported_declared_multiple_licenses': '', 'imported_concluded_license_name': 'No license information', 'imported_concluded_license_expression': '', 'imported_concluded_non_spdx_license': '', 'imported_concluded_multiple_licenses': '', 'manual_concluded_license_name': 'No license information', 'manual_concluded_license_expression': '', 'manual_concluded_non_spdx_license': '', 'manual_concluded_comment': '', 'effective_license_name': 'No license information', 'effective_license_expression': '', 'effective_non_spdx_license': '', 'effective_multiple_licenses': '', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'imported_declared_spdx_license': None, 'imported_concluded_spdx_license': None, 'manual_concluded_spdx_license': None, 'effective_spdx_license': None, 'origin_service': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/1/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'message': 'No License_Component matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/2/", + None, + 404, + expected_data, + ) + ) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/99999/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'message': 'No component id provided'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/for_component/", + None, + 400, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'component_name_version_type': 'internal_component:1.0.0', 'branch_name': '', 'origin_service_name': '', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'upload_filename': '', 'component_name': 'internal_component', 'component_version': '1.0.0', 'component_name_version': 'internal_component:1.0.0', 'component_purl': '', 'component_purl_type': '', 'component_cpe': '', 'component_cyclonedx_bom_link': '', 'imported_declared_license_name': 'internal license', 'imported_declared_license_expression': '', 'imported_declared_non_spdx_license': 'internal license', 'imported_declared_multiple_licenses': '', 'imported_concluded_license_name': 'No license information', 'imported_concluded_license_expression': '', 'imported_concluded_non_spdx_license': '', 'imported_concluded_multiple_licenses': '', 'manual_concluded_license_name': 'No license information', 'manual_concluded_license_expression': '', 'manual_concluded_non_spdx_license': '', 'manual_concluded_comment': '', 'effective_license_name': 'No license information', 'effective_license_expression': '', 'effective_non_spdx_license': '', 'effective_multiple_licenses': '', 'evaluation_result': 'Allowed', 'numerical_evaluation_result': 1, 'created': '2022-12-15T17:10:35.513000+01:00', 'import_last_seen': '2022-12-15T17:10:35.513000+01:00', 'last_change': '2022-12-15T17:10:35.513000+01:00', 'product': 1, 'branch': None, 'imported_declared_spdx_license': None, 'imported_concluded_spdx_license': None, 'manual_concluded_spdx_license': None, 'effective_spdx_license': None, 'origin_service': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/for_component/?component=4dd012d1d2e18cf07666cc7fe0520da0", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No Component matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/for_component/?component=25a348829ab6ef7330b97069dab60a40", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_components/for_component/?component=abc", + None, + 404, + expected_data, + ) + ) + + post_data = {"product": 1} + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/license_components/", + post_data, + 405, + None, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/license_components/1/", + {"title": "changed"}, + 405, + None, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/license_components/1/", + None, + 405, + None, + ) + ) + + patch_data = {"concluded_spdx_license": 1} + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/license_components/1/concluded_license/", + patch_data, + 200, + None, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/license_components/1/concluded_license/", + patch_data, + 403, + expected_data, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_license_group_authorization_group_members.py b/backend/unittests/authorization/api/test_authorization_license_group_authorization_group_members.py similarity index 65% rename from backend/unittests/access_control/api/test_authorization_license_group_authorization_group_members.py rename to backend/unittests/authorization/api/test_authorization_license_group_authorization_group_members.py index a63309c0c..03fd4bf94 100644 --- a/backend/unittests/access_control/api/test_authorization_license_group_authorization_group_members.py +++ b/backend/unittests/authorization/api/test_authorization_license_group_authorization_group_members.py @@ -1,5 +1,5 @@ from application.licenses.models import License_Group, License_Policy -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -10,7 +10,7 @@ def test_authorization_license_group_authorization_group_members(self): License_Policy.objects.all().delete() License_Group.objects.filter(pk__lt=1000).delete() - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_group_data': {'id': 1003, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_group': 1003, 'authorization_group': 2}, {'id': 1001, 'license_group_data': {'id': 1004, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_group': 1004, 'authorization_group': 2}, {'id': 1002, 'license_group_data': {'id': 1003, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'is_manager': True, 'license_group': 1003, 'authorization_group': 3}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_group_data': {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_group': 1003, 'authorization_group': 2}, {'id': 1001, 'license_group_data': {'id': 1004, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_group': 1004, 'authorization_group': 2}, {'id': 1002, 'license_group_data': {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'is_manager': True, 'license_group': 1003, 'authorization_group': 3}]}" self._test_api( APITest( "db_admin", @@ -22,7 +22,7 @@ def test_authorization_license_group_authorization_group_members(self): ) ) - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_group_data': {'id': 1003, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_group': 1003, 'authorization_group': 2}, {'id': 1001, 'license_group_data': {'id': 1004, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_group': 1004, 'authorization_group': 2}]}" + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_group_data': {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_group': 1003, 'authorization_group': 2}, {'id': 1001, 'license_group_data': {'id': 1004, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_group': 1004, 'authorization_group': 2}]}" self._test_api( APITest( "db_product_group_user", @@ -35,7 +35,7 @@ def test_authorization_license_group_authorization_group_members(self): ) ) - expected_data = "{'id': 1000, 'license_group_data': {'id': 1003, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_group': 1003, 'authorization_group': 2}" + expected_data = "{'id': 1000, 'license_group_data': {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_group': 1003, 'authorization_group': 2}" self._test_api( APITest( "db_product_group_user", @@ -66,7 +66,7 @@ def test_authorization_license_group_authorization_group_members(self): "authorization_group": 1, "is_manager": False, } - expected_data = "{'id': 1003, 'license_group_data': {'id': 1004, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': False, 'license_group': 1004, 'authorization_group': 1}" + expected_data = "{'id': 1003, 'license_group_data': {'id': 1004, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': False, 'license_group': 1004, 'authorization_group': 1}" self._test_api( APITest( "db_product_group_user", @@ -102,9 +102,7 @@ def test_authorization_license_group_authorization_group_members(self): "authorization_group": 2, "is_manager": False, } - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_product_group_user", @@ -134,7 +132,7 @@ def test_authorization_license_group_authorization_group_members(self): ) ) - expected_data = "{'id': 1003, 'license_group_data': {'id': 1004, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': True, 'license_group': 1004, 'authorization_group': 1}" + expected_data = "{'id': 1003, 'license_group_data': {'id': 1004, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': True, 'license_group': 1004, 'authorization_group': 1}" self._test_api( APITest( "db_product_group_user", @@ -147,9 +145,7 @@ def test_authorization_license_group_authorization_group_members(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_product_group_user", diff --git a/backend/unittests/access_control/api/test_authorization_license_group_members.py b/backend/unittests/authorization/api/test_authorization_license_group_members.py similarity index 61% rename from backend/unittests/access_control/api/test_authorization_license_group_members.py rename to backend/unittests/authorization/api/test_authorization_license_group_members.py index 6445c151b..276761ea6 100644 --- a/backend/unittests/access_control/api/test_authorization_license_group_members.py +++ b/backend/unittests/authorization/api/test_authorization_license_group_members.py @@ -1,5 +1,5 @@ from application.licenses.models import License_Group, License_Policy -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -10,7 +10,7 @@ def test_authorization_license_group_members(self): License_Policy.objects.all().delete() License_Group.objects.filter(pk__lt=1000).delete() - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1001, 'license_group_data': {'id': 1001, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'is_manager': False, 'license_group': 1001, 'user': 3}, {'id': 1002, 'license_group_data': {'id': 1002, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_group': 1002, 'user': 2}, {'id': 1003, 'license_group_data': {'id': 1001, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, 'is_manager': False, 'license_group': 1001, 'user': 4}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1001, 'license_group_data': {'id': 1001, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'is_manager': False, 'license_group': 1001, 'user': 3}, {'id': 1002, 'license_group_data': {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_group': 1002, 'user': 2}, {'id': 1003, 'license_group_data': {'id': 1001, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, 'is_manager': False, 'license_group': 1001, 'user': 4}]}" self._test_api( APITest( "db_admin", @@ -22,7 +22,7 @@ def test_authorization_license_group_members(self): ) ) - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1002, 'license_group_data': {'id': 1002, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_group': 1002, 'user': 2}]}" + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1002, 'license_group_data': {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_group': 1002, 'user': 2}]}" self._test_api( APITest( "db_internal_write", @@ -35,7 +35,7 @@ def test_authorization_license_group_members(self): ) ) - expected_data = "{'id': 1002, 'license_group_data': {'id': 1002, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_group': 1002, 'user': 2}" + expected_data = "{'id': 1002, 'license_group_data': {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_group': 1002, 'user': 2}" self._test_api( APITest( "db_internal_write", @@ -47,9 +47,7 @@ def test_authorization_license_group_members(self): no_second_user=True, ) ) - expected_data = ( - "{'message': 'No License_Group_Member matches the given query.'}" - ) + expected_data = "{'message': 'No License_Group_Member matches the given query.'}" self._test_api( APITest( "db_internal_write", @@ -74,7 +72,7 @@ def test_authorization_license_group_members(self): ) post_data = {"license_group": 1002, "user": 6, "is_manager": False} - expected_data = "{'id': 1004, 'license_group_data': {'id': 1002, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': False, 'license_group': 1002, 'user': 6}" + expected_data = "{'id': 1004, 'license_group_data': {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': False, 'license_group': 1002, 'user': 6}" self._test_api( APITest( "db_internal_write", @@ -88,9 +86,7 @@ def test_authorization_license_group_members(self): ) post_data = {"license_group": 1000, "user": 6, "is_manager": False} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -116,7 +112,7 @@ def test_authorization_license_group_members(self): ) ) - expected_data = "{'id': 1004, 'license_group_data': {'id': 1002, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': True, 'license_group': 1002, 'user': 6}" + expected_data = "{'id': 1004, 'license_group_data': {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': True, 'license_group': 1002, 'user': 6}" self._test_api( APITest( "db_internal_write", @@ -129,9 +125,7 @@ def test_authorization_license_group_members(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/access_control/api/test_authorization_license_groups.py b/backend/unittests/authorization/api/test_authorization_license_groups.py similarity index 73% rename from backend/unittests/access_control/api/test_authorization_license_groups.py rename to backend/unittests/authorization/api/test_authorization_license_groups.py index 987154e82..9bc5b6f99 100644 --- a/backend/unittests/access_control/api/test_authorization_license_groups.py +++ b/backend/unittests/authorization/api/test_authorization_license_groups.py @@ -1,5 +1,7 @@ +from unittest.mock import patch + from application.licenses.models import License_Group, License_Policy -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -10,12 +12,10 @@ def test_authorization_license_groups(self): License_Policy.objects.all().delete() License_Group.objects.filter(pk__lt=1000).delete() - expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1000, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True}, {'id': 1001, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False}, {'id': 1002, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, {'id': 1003, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, {'id': 1004, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}]}" - self._test_api( - APITest("db_admin", "get", "/api/license_groups/", None, 200, expected_data) - ) + expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1000, 'name': 'public', 'description': '', 'is_public': True}, {'id': 1001, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False}, {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, {'id': 1004, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}]}" + self._test_api(APITest("db_admin", "get", "/api/license_groups/", None, 200, expected_data)) - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True}, {'id': 1002, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, {'id': 1003, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'name': 'public', 'description': '', 'is_public': True}, {'id': 1002, 'name': 'internal_write_manager', 'description': '', 'is_public': False}, {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}]}" self._test_api( APITest( "db_internal_write", @@ -28,7 +28,7 @@ def test_authorization_license_groups(self): ) ) - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True}, {'id': 1003, 'is_manager': False, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, {'id': 1004, 'is_manager': True, 'is_in_license_policy': False, 'has_licenses': False, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'name': 'public', 'description': '', 'is_public': True}, {'id': 1003, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False}, {'id': 1004, 'name': 'authorization_group_manager', 'description': '', 'is_public': False}]}" self._test_api( APITest( "db_product_group_user", @@ -92,9 +92,7 @@ def test_authorization_license_groups(self): ) post_data = {"name": "new_license_group_external"} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", @@ -120,9 +118,7 @@ def test_authorization_license_groups(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -148,9 +144,7 @@ def test_authorization_license_groups(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_product_group_user", @@ -202,9 +196,7 @@ def test_authorization_license_groups(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", @@ -354,28 +346,3 @@ def test_authorization_license_groups(self): no_second_user=True, ) ) - - expected_data = "{'message': 'User is not allowed to import license groups from ScanCode LicenseDB'}" - self._test_api( - APITest( - "db_internal_write", - "post", - "/api/license_groups/import_scancode_licensedb/", - post_data, - 403, - expected_data, - no_second_user=True, - ) - ) - - self._test_api( - APITest( - "db_admin", - "post", - "/api/license_groups/import_scancode_licensedb/", - post_data, - 204, - None, - no_second_user=True, - ) - ) diff --git a/backend/unittests/access_control/api/test_authorization_license_policies.py b/backend/unittests/authorization/api/test_authorization_license_policies.py similarity index 74% rename from backend/unittests/access_control/api/test_authorization_license_policies.py rename to backend/unittests/authorization/api/test_authorization_license_policies.py index e8aca6f72..ed736d567 100644 --- a/backend/unittests/access_control/api/test_authorization_license_policies.py +++ b/backend/unittests/authorization/api/test_authorization_license_policies.py @@ -1,6 +1,6 @@ from application.core.models import Product from application.licenses.models import License_Policy -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -10,14 +10,10 @@ class TestAuthorizationLicensePolicies(TestAuthorizationBase): def test_authorization_license_policies(self): License_Policy.objects.filter(pk__lt=1000).delete() - expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1000, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, {'id': 1001, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}]}" - self._test_api( - APITest( - "db_admin", "get", "/api/license_policies/", None, 200, expected_data - ) - ) + expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1000, 'parent_name': '', 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, {'id': 1001, 'parent_name': '', 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}]}" + self._test_api(APITest("db_admin", "get", "/api/license_policies/", None, 200, expected_data)) - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, {'id': 1002, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': True, 'has_authorization_groups': False, 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'parent_name': '', 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}]}" self._test_api( APITest( "db_internal_write", @@ -30,7 +26,7 @@ def test_authorization_license_policies(self): ) ) - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': False, 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'parent_name': '', 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}]}" self._test_api( APITest( "db_product_group_user", @@ -165,9 +161,7 @@ def test_authorization_license_policies(self): ) post_data = {"name": "new_license_policy_external"} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", @@ -193,9 +187,7 @@ def test_authorization_license_policies(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -221,9 +213,7 @@ def test_authorization_license_policies(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_product_group_user", @@ -275,9 +265,7 @@ def test_authorization_license_policies(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", @@ -315,9 +303,7 @@ def test_authorization_license_policies(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/access_control/api/test_authorization_license_policy_authorization_group_members.py b/backend/unittests/authorization/api/test_authorization_license_policy_authorization_group_members.py similarity index 55% rename from backend/unittests/access_control/api/test_authorization_license_policy_authorization_group_members.py rename to backend/unittests/authorization/api/test_authorization_license_policy_authorization_group_members.py index 95e696c0c..60ffce6d8 100644 --- a/backend/unittests/access_control/api/test_authorization_license_policy_authorization_group_members.py +++ b/backend/unittests/authorization/api/test_authorization_license_policy_authorization_group_members.py @@ -1,5 +1,5 @@ from application.licenses.models import License_Policy -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -9,7 +9,7 @@ class TestAuthorizationLicensePolicyAuthorizationGroupMembers(TestAuthorizationB def test_authorization_license_policy_authorization_group_members(self): License_Policy.objects.filter(pk__lt=1000).delete() - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_policy': 1003, 'authorization_group': 2}, {'id': 1001, 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_policy': 1004, 'authorization_group': 2}, {'id': 1002, 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'is_manager': True, 'license_policy': 1003, 'authorization_group': 3}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_policy': 1003, 'authorization_group': 2}, {'id': 1001, 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_policy': 1004, 'authorization_group': 2}, {'id': 1002, 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 3, 'name': 'non_oidc_group', 'oidc_group': ''}, 'is_manager': True, 'license_policy': 1003, 'authorization_group': 3}]}" self._test_api( APITest( "db_admin", @@ -21,7 +21,7 @@ def test_authorization_license_policy_authorization_group_members(self): ) ) - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_policy': 1003, 'authorization_group': 2}, {'id': 1001, 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_policy': 1004, 'authorization_group': 2}]}" + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_policy': 1003, 'authorization_group': 2}, {'id': 1001, 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': True, 'license_policy': 1004, 'authorization_group': 2}]}" self._test_api( APITest( "db_product_group_user", @@ -34,7 +34,7 @@ def test_authorization_license_policy_authorization_group_members(self): ) ) - expected_data = "{'id': 1000, 'license_policy_data': {'id': 1003, 'parent_name': '', 'is_parent': False, 'is_manager': False, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_policy': 1003, 'authorization_group': 2}" + expected_data = "{'id': 1000, 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 2, 'name': 'oidc_group_2', 'oidc_group': 'oidc_2'}, 'is_manager': False, 'license_policy': 1003, 'authorization_group': 2}" self._test_api( APITest( "db_product_group_user", @@ -65,7 +65,7 @@ def test_authorization_license_policy_authorization_group_members(self): "authorization_group": 1, "is_manager": False, } - expected_data = "{'id': 1003, 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': False, 'license_policy': 1004, 'authorization_group': 1}" + expected_data = "{'id': 1003, 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': False, 'license_policy': 1004, 'authorization_group': 1}" self._test_api( APITest( "db_product_group_user", @@ -101,9 +101,7 @@ def test_authorization_license_policy_authorization_group_members(self): "authorization_group": 2, "is_manager": False, } - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_product_group_user", @@ -133,7 +131,7 @@ def test_authorization_license_policy_authorization_group_members(self): ) ) - expected_data = "{'id': 1003, 'license_policy_data': {'id': 1004, 'parent_name': '', 'is_parent': False, 'is_manager': True, 'has_products': False, 'has_product_groups': False, 'has_items': True, 'has_users': False, 'has_authorization_groups': True, 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': True, 'license_policy': 1004, 'authorization_group': 1}" + expected_data = "{'id': 1003, 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'authorization_group_data': {'id': 1, 'name': 'oidc_group_1', 'oidc_group': 'oidc_1'}, 'is_manager': True, 'license_policy': 1004, 'authorization_group': 1}" self._test_api( APITest( "db_product_group_user", @@ -146,9 +144,7 @@ def test_authorization_license_policy_authorization_group_members(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_product_group_user", diff --git a/backend/unittests/authorization/api/test_authorization_license_policy_items.py b/backend/unittests/authorization/api/test_authorization_license_policy_items.py new file mode 100644 index 000000000..9df85576a --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_license_policy_items.py @@ -0,0 +1,284 @@ +from application.licenses.models import License_Policy +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) + + +class TestAuthorizationLicensePolicyItems(TestAuthorizationBase): + def test_authorization_license_policy_items(self): + License_Policy.objects.filter(pk__lt=1000).delete() + + expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_spdx_id': '', 'license_group_name': 'Permissive Model (Blue Oak Council)', 'license_policy_data': {'id': 1000, 'parent_name': '', 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1000, 'license_group': 1, 'license': None}, {'id': 1001, 'license_spdx_id': '0BSD', 'license_group_name': '', 'license_policy_data': {'id': 1001, 'parent_name': '', 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Forbidden', 'comment': '', 'license_policy': 1001, 'license_group': None, 'license': 1}, {'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}, {'id': 1003, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Three non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1003, 'license_group': None, 'license': None}, {'id': 1004, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Four non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1004, 'license_group': None, 'license': None}]}" + self._test_api( + APITest( + "db_admin", + "get", + "/api/license_policy_items/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_spdx_id': '', 'license_group_name': 'Permissive Model (Blue Oak Council)', 'license_policy_data': {'id': 1000, 'parent_name': '', 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1000, 'license_group': 1, 'license': None}, {'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}, {'id': 1003, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Three non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1003, 'license_group': None, 'license': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_items/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1000, 'license_spdx_id': '', 'license_group_name': 'Permissive Model (Blue Oak Council)', 'license_policy_data': {'id': 1000, 'parent_name': '', 'name': 'public', 'description': '', 'is_public': True, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1000, 'license_group': 1, 'license': None}, {'id': 1003, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1003, 'parent_name': '', 'name': 'authorization_group_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Three non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1003, 'license_group': None, 'license': None}, {'id': 1004, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Four non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1004, 'license_group': None, 'license': None}]}" + self._test_api( + APITest( + "db_product_group_user", + "get", + "/api/license_policy_items/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Unknown', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_items/1002/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) + expected_data = "{'message': 'No License_Policy_Item matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_items/1001/", + None, + 404, + expected_data, + no_second_user=True, + ) + ) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_items/99999/", + None, + 404, + expected_data, + no_second_user=True, + ) + ) + + post_data = { + "license_policy": 1002, + "license_group": 2, + "non_spdx_license": "", + "evaluation_result": "Allowed", + } + expected_data = "{'id': 1005, 'license_spdx_id': '', 'license_group_name': 'Permissive Gold (Blue Oak Council)', 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1002, 'license_group': 2, 'license': None}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/license_policy_items/", + post_data, + 201, + expected_data, + no_second_user=True, + ) + ) + + post_data = { + "license_policy": 1000, + "license_group": 2, + "non_spdx_license": "", + "evaluation_result": "Allowed", + } + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/license_policy_items/", + post_data, + 403, + expected_data, + no_second_user=True, + ) + ) + + post_data = { + "license_policy": 1004, + "license_group": 2, + "non_spdx_license": "", + "evaluation_result": "Allowed", + } + expected_data = "{'id': 1006, 'license_spdx_id': '', 'license_group_name': 'Permissive Gold (Blue Oak Council)', 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': '', 'evaluation_result': 'Allowed', 'comment': '', 'license_policy': 1004, 'license_group': 2, 'license': None}" + self._test_api( + APITest( + "db_product_group_user", + "post", + "/api/license_policy_items/", + post_data, + 201, + expected_data, + no_second_user=True, + ) + ) + + post_data = { + "license_policy": 1003, + "license_group": 2, + "non_spdx_license": "", + "evaluation_result": "Allowed", + } + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_product_group_user", + "post", + "/api/license_policy_items/", + post_data, + 403, + expected_data, + no_second_user=True, + ) + ) + + post_data = { + "license_policy": 1001, + "license_group": 2, + "non_spdx_license": "", + "evaluation_result": "Allowed", + } + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/license_policy_items/", + post_data, + 403, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'id': 1002, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Two non-spdx licenses', 'evaluation_result': 'Review required', 'comment': '', 'license_policy': 1002, 'license_group': None, 'license': None}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/license_policy_items/1002/", + { + "non_spdx_license": "Two non-spdx licenses", + "evaluation_result": "Review required", + }, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/license_policy_items/1001/", + {"is_manager": "True"}, + 403, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'id': 1004, 'license_spdx_id': '', 'license_group_name': '', 'license_policy_data': {'id': 1004, 'parent_name': '', 'name': 'authorization_group_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'license_expression': '', 'non_spdx_license': 'Four non-spdx licenses', 'evaluation_result': 'Review required', 'comment': '', 'license_policy': 1004, 'license_group': None, 'license': None}" + self._test_api( + APITest( + "db_product_group_user", + "patch", + "/api/license_policy_items/1004/", + { + "non_spdx_license": "Four non-spdx licenses", + "evaluation_result": "Review required", + }, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_product_group_user", + "patch", + "/api/license_policy_items/1003/", + {"is_manager": "True"}, + 403, + expected_data, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/license_policy_items/1002/", + None, + 204, + None, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/license_policy_items/1001/", + None, + 403, + None, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_product_group_user", + "delete", + "/api/license_policy_items/1004/", + None, + 204, + None, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_product_group_user", + "delete", + "/api/license_policy_items/1003/", + None, + 403, + None, + no_second_user=True, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_license_policy_members.py b/backend/unittests/authorization/api/test_authorization_license_policy_members.py new file mode 100644 index 000000000..df242ca07 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_license_policy_members.py @@ -0,0 +1,162 @@ +from application.licenses.models import License_Policy +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) + + +class TestAuthorizationLicensePolicyMembers(TestAuthorizationBase): + def test_authorization_license_policy_members(self): + License_Policy.objects.filter(pk__lt=1000).delete() + + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1001, 'license_policy_data': {'id': 1001, 'parent_name': '', 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'is_manager': False, 'license_policy': 1001, 'user': 3}, {'id': 1002, 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_policy': 1002, 'user': 2}, {'id': 1003, 'license_policy_data': {'id': 1001, 'parent_name': '', 'name': 'internal_read_not_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, 'is_manager': False, 'license_policy': 1001, 'user': 4}]}" + self._test_api( + APITest( + "db_admin", + "get", + "/api/license_policy_members/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1002, 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_policy': 1002, 'user': 2}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_members/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'id': 1002, 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'is_manager': True, 'license_policy': 1002, 'user': 2}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_members/1002/", + None, + 200, + expected_data, + no_second_user=True, + ) + ) + expected_data = "{'message': 'No License_Policy_Member matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_members/1001/", + None, + 404, + expected_data, + no_second_user=True, + ) + ) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/license_policy_members/99999/", + None, + 404, + expected_data, + no_second_user=True, + ) + ) + + post_data = {"license_policy": 1002, "user": 6, "is_manager": False} + expected_data = "{'id': 1004, 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': False, 'license_policy': 1002, 'user': 6}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/license_policy_members/", + post_data, + 201, + expected_data, + no_second_user=True, + ) + ) + + post_data = {"license_policy": 1000, "user": 6, "is_manager": False} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/license_policy_members/", + post_data, + 403, + expected_data, + no_second_user=True, + ) + ) + + post_data = {"license_policy": 1001, "user": 6, "is_manager": False} + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/license_policy_members/", + post_data, + 403, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'id': 1004, 'license_policy_data': {'id': 1002, 'parent_name': '', 'name': 'internal_write_manager', 'description': '', 'is_public': False, 'ignore_component_types': '', 'parent': None}, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}, 'is_manager': True, 'license_policy': 1002, 'user': 6}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/license_policy_members/1004/", + {"is_manager": "True"}, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/license_policy_members/1001/", + {"is_manager": "True"}, + 403, + expected_data, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/license_policy_members/1004/", + None, + 204, + None, + no_second_user=True, + ) + ) + + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/license_policy_members/1001/", + None, + 403, + None, + no_second_user=True, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_notifications.py b/backend/unittests/authorization/api/test_authorization_notifications.py new file mode 100644 index 000000000..9e151fd57 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_notifications.py @@ -0,0 +1,49 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationNotifications(TestAuthorizationBase): + def test_authorization_notifications_product_member(self): + self._test_authorization_notifications() + + def test_authorization_notifications_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_notifications() + + def _test_authorization_notifications(self): + expected_data = "{'count': 6, 'next': None, 'previous': None, 'results': [{'id': 1, 'message': 'message_exception_internal', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'new_viewed': 'New', 'name': 'exception_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Exception', 'function': '', 'arguments': '', 'user': 2, 'product': 1, 'observation': 1}, {'id': 2, 'message': 'message_exception_external', 'product_name': 'db_product_external', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_external', 'new_viewed': 'New', 'name': 'exception_external', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Exception', 'function': '', 'arguments': '', 'user': 4, 'product': 2, 'observation': 2}, {'id': 3, 'message': '', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'new_viewed': 'New', 'name': 'security_gate_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Security gate', 'function': '', 'arguments': '', 'user': 2, 'product': 1, 'observation': 1}, {'id': 4, 'message': '', 'product_name': 'db_product_external', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_external', 'new_viewed': 'New', 'name': 'security_gate_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Security gate', 'function': '', 'arguments': '', 'user': 4, 'product': 2, 'observation': 2}, {'id': 5, 'message': 'message_task_internal', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'new_viewed': 'New', 'name': 'task_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Task', 'function': 'function_task_internal', 'arguments': 'arguments_task_internal', 'user': 2, 'product': 1, 'observation': 1}, {'id': 6, 'message': 'message_task_external', 'product_name': 'db_product_external', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_external', 'new_viewed': 'New', 'name': 'task_external', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Task', 'function': 'function_task_external', 'arguments': 'arguments_task_external', 'user': 4, 'product': 2, 'observation': 2}]}" + self._test_api(APITest("db_admin", "get", "/api/notifications/", None, 200, expected_data)) + + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 3, 'message': '', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'new_viewed': 'New', 'name': 'security_gate_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Security gate', 'function': '', 'arguments': '', 'user': 2, 'product': 1, 'observation': 1}, {'id': 5, 'message': '...', 'product_name': 'db_product_internal', 'observation_title': 'db_observation_internal', 'user_full_name': 'db_internal_write', 'new_viewed': 'New', 'name': 'task_internal', 'created': '2022-12-15T17:10:35.518000+01:00', 'type': 'Task', 'function': 'function_task_internal', 'arguments': 'arguments_task_internal', 'user': 2, 'product': 1, 'observation': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/notifications/", + None, + 200, + expected_data, + ) + ) + + self._test_api(APITest("db_internal_write", "get", "/api/notifications/1/", None, 404, None)) + + self._test_api(APITest("db_internal_write", "get", "/api/notifications/3/", None, 200, None)) + + post_data = {"notifications": [1, 3, 5]} + expected_data = "{'message': 'Some notifications do not exist'}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/notifications/bulk_mark_as_viewed/", + post_data, + 400, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_observation_logs.py b/backend/unittests/authorization/api/test_authorization_observation_logs.py new file mode 100644 index 000000000..697adb388 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_observation_logs.py @@ -0,0 +1,46 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationObservationLogs(TestAuthorizationBase): + def test_authorization_observation_logs_product_member(self): + self._test_authorization_observation_logs() + + def test_authorization_observation_logs_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_observation_logs() + + def _test_authorization_observation_logs(self): + expected_data = "{'count': 4, 'next': None, 'previous': None, 'results': [{'id': 2, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': '', 'status': 'Duplicate', 'priority': None, 'comment': 'Set by product rule', 'created': '2022-12-15T17:10:35.524000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 1, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'priority': None, 'comment': 'Set by parser', 'created': '2022-12-15T17:10:35.518000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 4, 'observation_data': {'id': 2, 'product_data': {'id': 2, 'product_group_name': '', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'branch_name': '', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_external', 'approval_user_full_name': None, 'severity': '', 'status': 'False positive', 'priority': None, 'comment': 'Set by product rule', 'created': '2022-12-15T17:12:23.196000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 2, 'user': 4, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 3, 'observation_data': {'id': 2, 'product_data': {'id': 2, 'product_group_name': '', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'branch_name': '', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_external', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'priority': None, 'comment': 'Set by parser', 'created': '2022-12-15T17:11:28.326000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 2, 'user': 4, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}]}" + self._test_api(APITest("db_admin", "get", "/api/observation_logs/", None, 200, expected_data)) + + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 2, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': '', 'status': 'Duplicate', 'priority': None, 'comment': 'Set by product rule', 'created': '2022-12-15T17:10:35.524000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}, {'id': 1, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'priority': None, 'comment': 'Set by parser', 'created': '2022-12-15T17:10:35.518000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/observation_logs/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'observation_data': {'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'references': [], 'evidences': [{'id': 1, 'name': 'db_evidence_internal'}], 'origin_source_file_url': None, 'issue_tracker_issue_url': None, 'assessment_needs_approval': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'severity': 'Medium', 'status': 'Open', 'priority': None, 'comment': 'Set by parser', 'created': '2022-12-15T17:10:35.518000+01:00', 'vex_justification': '', 'assessment_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'risk_acceptance_expiry_date': None, 'observation': 1, 'user': 2, 'approval_user': None, 'general_rule': None, 'product_rule': None, 'vex_statement': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/observation_logs/1/", + None, + 200, + expected_data, + ) + ) + + self._test_api(APITest("db_internal_write", "get", "/api/observation_logs/3/", None, 404, None)) diff --git a/backend/unittests/authorization/api/test_authorization_observations.py b/backend/unittests/authorization/api/test_authorization_observations.py new file mode 100644 index 000000000..e7d69e4bb --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_observations.py @@ -0,0 +1,181 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationObservations(TestAuthorizationBase): + def test_authorization_observations_product_member(self): + self._test_authorization_observations() + + def test_authorization_observations_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_observations() + + def _test_authorization_observations(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, {'id': 2, 'product_data': {'id': 2, 'product_group_name': '', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'branch_name': '', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}]}" + self._test_api(APITest("db_admin", "get", "/api/observations/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/observations/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'references': [], 'evidences': [{'id': 1, 'name': 'db_evidence_internal'}], 'origin_source_file_url': None, 'issue_tracker_issue_url': None, 'assessment_needs_approval': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/observations/1/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'message': 'No Observation matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/observations/2/", + None, + 404, + expected_data, + ) + ) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/observations/99999/", + None, + 404, + expected_data, + ) + ) + + post_data = {"product": 1} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/observations/", + post_data, + 403, + expected_data, + ) + ) + expected_data = "{'message': 'Title: This field is required.'}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/observations/", + post_data, + 400, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/observations/1/", + {"title": "changed"}, + 403, + expected_data, + ) + ) + expected_data = "{'message': 'Non field errors: Only manual observations can be updated'}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/observations/1/", + {"title": "changed"}, + 400, + expected_data, + ) + ) + + post_data = {"comment": "reason for assessment"} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/observations/1/assessment/", + post_data, + 403, + expected_data, + ) + ) + expected_data = "{'message': 'Observation 99999 not found'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/observations/99999/assessment/", + post_data, + 404, + expected_data, + ) + ) + expected_data = "None" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/observations/1/assessment/", + post_data, + 200, + expected_data, + ) + ) + + post_data = {"comment": "reason for assessment removal"} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/observations/1/remove_assessment/", + post_data, + 403, + expected_data, + ) + ) + expected_data = "{'message': 'Observation 99999 not found'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/observations/99999/remove_assessment/", + post_data, + 404, + expected_data, + ) + ) + expected_data = "None" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/observations/1/remove_assessment/", + post_data, + 200, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_periodic_tasks.py b/backend/unittests/authorization/api/test_authorization_periodic_tasks.py new file mode 100644 index 000000000..c7fdd7c10 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_periodic_tasks.py @@ -0,0 +1,18 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) + + +class TestAuthorizationPeriodicTasks(TestAuthorizationBase): + + def test_authorization_periodic_tasks(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'task': 'Calculate product metrics', 'start_time': '2022-12-15T17:10:35.513000+01:00', 'duration': 1234, 'status': 'Success', 'message': 'Task completed successfully'}, {'id': 2, 'task': 'Calculate product metrics', 'start_time': '2022-12-16T17:10:35.513000+01:00', 'duration': 5678, 'status': 'Failure', 'message': 'Exception has occurred'}]}" + self._test_api(APITest("db_admin", "get", "/api/periodic_tasks/", None, 200, expected_data)) + + expected_data = "{'id': 1, 'task': 'Calculate product metrics', 'start_time': '2022-12-15T17:10:35.513000+01:00', 'duration': 1234, 'status': 'Success', 'message': 'Task completed successfully'}" + self._test_api(APITest("db_admin", "get", "/api/periodic_tasks/1/", None, 200, expected_data)) + + self._test_api(APITest("db_internal_write", "get", "/api/periodic_tasks/", None, 403, None)) + + self._test_api(APITest("db_internal_write", "get", "/api/periodic_tasks/1/", None, 403, None)) diff --git a/backend/unittests/authorization/api/test_authorization_potential_duplicates.py b/backend/unittests/authorization/api/test_authorization_potential_duplicates.py new file mode 100644 index 000000000..6ad64f91f --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_potential_duplicates.py @@ -0,0 +1,46 @@ +from django.core.management import call_command + +from application.core.models import Observation +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationPotentialDuplicates(TestAuthorizationBase): + def test_authorization_potential_duplicates_product_member(self): + self._test_authorization_potential_duplicates() + + def test_authorization_potential_duplicates_product_authorization_group_member( + self, + ): + prepare_authorization_groups() + self._test_authorization_potential_duplicates() + + def _test_authorization_potential_duplicates(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'potential_duplicate_observation': {'id': 1, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'vulnerability_id_aliases': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'type': 'Component', 'observation': 1}, {'id': 2, 'potential_duplicate_observation': {'id': 2, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'False positive', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'False positive', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'vulnerability_id_aliases': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.876000+01:00', 'created': '2022-12-15T17:10:35.521000+01:00', 'modified': '2022-12-16T17:13:18.283000+01:00', 'last_observation_log': '2022-12-16T17:13:18.283000+01:00', 'identity_hash': 'da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 2, 'branch': None, 'parser': 1, 'origin_service': None, 'general_rule': None, 'product_rule': 2, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'type': 'Source', 'observation': 2}]}" + self._test_api( + APITest( + "db_admin", + "get", + "/api/potential_duplicates/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'potential_duplicate_observation': {'id': 1, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'vulnerability_id_aliases': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_dependencies': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}, 'type': 'Component', 'observation': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/potential_duplicates/", + None, + 200, + expected_data, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_product_api_tokens.py b/backend/unittests/authorization/api/test_authorization_product_api_tokens.py similarity index 70% rename from backend/unittests/access_control/api/test_authorization_product_api_tokens.py rename to backend/unittests/authorization/api/test_authorization_product_api_tokens.py index b5db86cec..a33595727 100644 --- a/backend/unittests/access_control/api/test_authorization_product_api_tokens.py +++ b/backend/unittests/authorization/api/test_authorization_product_api_tokens.py @@ -1,4 +1,4 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -6,7 +6,7 @@ class TestAuthorizationProductApiTokens(TestAuthorizationBase): def test_authorization_product_api_tokens(self): - expected_data = "{'results': [{'id': 2, 'role': 2}]}" + expected_data = "{'results': [{'id': 1, 'product': 2, 'role': 2, 'name': 'default', 'expiration_date': None}]}" self._test_api( APITest( "db_admin", @@ -29,15 +29,13 @@ def test_authorization_product_api_tokens(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", "post", "/api/product_api_tokens/", - {"id": 1, "role": 2}, + {"product": 1, "role": 2, "name": "api_token_name", "expiration_date": None}, 403, expected_data, ) @@ -48,13 +46,15 @@ def test_authorization_product_api_tokens(self): "db_internal_write", "post", "/api/product_api_tokens/", - {"id": 1, "role": 2}, + {"product": 1, "role": 2, "name": "api_token_name", "expiration_date": None}, 201, None, ) ) - expected_data = "{'results': [{'id': 1, 'role': 2}]}" + expected_data = ( + "{'results': [{'id': 2, 'product': 1, 'role': 2, 'name': 'api_token_name', 'expiration_date': None}]}" + ) self._test_api( APITest( "db_internal_write", @@ -66,14 +66,12 @@ def test_authorization_product_api_tokens(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_external", "delete", - "/api/product_api_tokens/1/", + "/api/product_api_tokens/2/", None, 403, expected_data, @@ -83,7 +81,7 @@ def test_authorization_product_api_tokens(self): APITest( "db_internal_write", "delete", - "/api/product_api_tokens/1/", + "/api/product_api_tokens/2/", None, 204, None, diff --git a/backend/unittests/access_control/api/test_authorization_product_authorization_group_members.py b/backend/unittests/authorization/api/test_authorization_product_authorization_group_members.py similarity index 93% rename from backend/unittests/access_control/api/test_authorization_product_authorization_group_members.py rename to backend/unittests/authorization/api/test_authorization_product_authorization_group_members.py index cc9824f54..a0e21402c 100644 --- a/backend/unittests/access_control/api/test_authorization_product_authorization_group_members.py +++ b/backend/unittests/authorization/api/test_authorization_product_authorization_group_members.py @@ -1,8 +1,8 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) @@ -83,9 +83,7 @@ def test_authorization_product_authorization_group_members(self): ) post_data = {"role": 3, "product": 1, "authorization_group": 8} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -109,9 +107,7 @@ def test_authorization_product_authorization_group_members(self): ) post_data = {"role": 2} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -135,9 +131,7 @@ def test_authorization_product_authorization_group_members(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/authorization/api/test_authorization_product_groups.py b/backend/unittests/authorization/api/test_authorization_product_groups.py new file mode 100644 index 000000000..fba1bc002 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_product_groups.py @@ -0,0 +1,91 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationProductGroups(TestAuthorizationBase): + def test_product_groups_authorization_product_member(self): + self._test_product_groups_authorization() + + def test_product_groups_authorization_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_product_groups_authorization() + + def _test_product_groups_authorization(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 3, 'name': 'db_product_group', 'description': '', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}, {'id': 4, 'name': 'db_product_group_admin_only', 'description': '', 'products_count': 0, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}]}" + self._test_api(APITest("db_admin", "get", "/api/product_groups/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 3, 'name': 'db_product_group', 'description': '', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}]}" + self._test_api( + APITest( + "db_product_group_user", + "get", + "/api/product_groups/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'id': 3, 'name': 'db_product_group', 'description': '', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}" + self._test_api( + APITest( + "db_product_group_user", + "get", + "/api/product_groups/3/", + None, + 200, + expected_data, + ) + ) + expected_data = "{'message': 'No Product matches the given query.'}" + self._test_api( + APITest( + "db_product_group_user", + "get", + "/api/product_groups/99999/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_external", + "post", + "/api/product_groups/", + {"name": "string"}, + 403, + expected_data, + ) + ) + expected_data = "{'id': 5, 'name': 'string', 'description': '', 'products_count': 0, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': None, 'active_high_observation_count': None, 'active_medium_observation_count': None, 'active_low_observation_count': None, 'active_none_observation_count': None, 'active_unknown_observation_count': None, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': None, 'review_required_licenses_count': None, 'unknown_licenses_count': None, 'allowed_licenses_count': None, 'ignored_licenses_count': None}" + self._test_api( + APITest( + "db_product_group_user", + "post", + "/api/product_groups/", + { + "name": "string", + }, + 201, + expected_data, + ) + ) + + expected_data = "{'id': 3, 'name': 'db_product_group', 'description': 'string', 'products_count': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'new_observations_in_review': False, 'product_rule_approvals': 0, 'license_policy': None, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0}" + self._test_api( + APITest( + "db_product_group_user", + "patch", + "/api/product_groups/3/", + {"description": "string"}, + 200, + expected_data, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_product_members.py b/backend/unittests/authorization/api/test_authorization_product_members.py similarity index 78% rename from backend/unittests/access_control/api/test_authorization_product_members.py rename to backend/unittests/authorization/api/test_authorization_product_members.py index 9ef825ae3..cae0f1496 100644 --- a/backend/unittests/access_control/api/test_authorization_product_members.py +++ b/backend/unittests/authorization/api/test_authorization_product_members.py @@ -1,4 +1,4 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -6,14 +6,10 @@ class TestAuthorizationProductMembers(TestAuthorizationBase): def test_authorization_product_members(self): - expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}, {'id': 2, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 1, 'product': 1, 'user': 3}, {'id': 3, 'user_data': {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, 'product_data': {'id': 2, 'name': 'db_product_external', 'is_product_group': False}, 'role': 5, 'product': 2, 'user': 4}, {'id': 4, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'product_data': {'id': 2, 'name': 'db_product_external', 'is_product_group': False}, 'role': 1, 'product': 2, 'user': 3}, {'id': 6, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'product_data': {'id': 3, 'name': 'db_product_group', 'is_product_group': True}, 'role': 5, 'product': 3, 'user': 6}]}" - self._test_api( - APITest( - "db_admin", "get", "/api/product_members/", None, 200, expected_data - ) - ) + expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}, {'id': 2, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 1, 'product': 1, 'user': 3}, {'id': 3, 'user_data': {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, 'product_data': {'id': 2, 'name': 'db_product_external', 'is_product_group': False}, 'role': 5, 'product': 2, 'user': 4}, {'id': 4, 'user_data': {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, 'product_data': {'id': 2, 'name': 'db_product_external', 'is_product_group': False}, 'role': 1, 'product': 2, 'user': 3}, {'id': 6, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'product_data': {'id': 3, 'name': 'db_product_group', 'is_product_group': True}, 'role': 5, 'product': 3, 'user': 6}]}" + self._test_api(APITest("db_admin", "get", "/api/product_members/", None, 200, expected_data)) - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}, {'id': 2, 'user_data': {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 1, 'product': 1, 'user': 3}]}" + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}, {'id': 2, 'user_data': {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 1, 'product': 1, 'user': 3}]}" self._test_api( APITest( "db_internal_write", @@ -26,7 +22,7 @@ def test_authorization_product_members(self): ) ) - expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'full_name': 'db_internal_write'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}, {'id': 2, 'user_data': {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 1, 'product': 1, 'user': 3}, {'id': 6, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'product_data': {'id': 3, 'name': 'db_product_group', 'is_product_group': True}, 'role': 5, 'product': 3, 'user': 6}]}" + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'full_name': 'db_internal_write'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}, {'id': 2, 'user_data': {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 1, 'product': 1, 'user': 3}, {'id': 6, 'user_data': {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, 'product_data': {'id': 3, 'name': 'db_product_group', 'is_product_group': True}, 'role': 5, 'product': 3, 'user': 6}]}" self._test_api( APITest( "db_product_group_user", @@ -37,7 +33,7 @@ def test_authorization_product_members(self): expected_data, ) ) - expected_data = "{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}" + expected_data = "{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}" expected_data_product_group = "{'id': 1, 'user_data': {'id': 2, 'username': 'db_internal_write', 'full_name': 'db_internal_write'}, 'product_data': {'id': 1, 'name': 'db_product_internal', 'is_product_group': False}, 'role': 5, 'product': 1, 'user': 2}" self._test_api( APITest( @@ -76,9 +72,7 @@ def test_authorization_product_members(self): ) post_data = {"role": 3, "product": 1, "user": 1} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -102,9 +96,7 @@ def test_authorization_product_members(self): ) post_data = {"role": 2} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -128,9 +120,7 @@ def test_authorization_product_members(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", diff --git a/backend/unittests/authorization/api/test_authorization_product_metrics.py b/backend/unittests/authorization/api/test_authorization_product_metrics.py new file mode 100644 index 000000000..ad5859156 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_product_metrics.py @@ -0,0 +1,115 @@ +from datetime import timedelta + +from django.utils import timezone + +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) + + +class TestAuthorizationProductMetrics(TestAuthorizationBase): + def test_authorization_metrics(self): + yesterday = (timezone.now() - timedelta(days=1)).date().isoformat() + today = timezone.now().date().isoformat() + + expected_data = "{'active_critical': 7, 'active_high': 9, 'active_medium': 11, 'active_low': 13, 'active_none': 15, 'active_unknown': 17, 'open': 19, 'affected': 21, 'resolved': 23, 'duplicate': 25, 'false_positive': 27, 'in_review': 29, 'not_affected': 31, 'not_security': 33, 'risk_accepted': 35}" + self._test_api( + APITest( + "db_admin", + "get", + "/api/metrics/product_metrics_current/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'2023-07-09': {'active_critical': 5, 'active_high': 7, 'active_medium': 9, 'active_low': 11, 'active_none': 13, 'active_unknown': 15, 'open': 17, 'affected': 19, 'resolved': 21, 'duplicate': 23, 'false_positive': 25, 'in_review': 27, 'not_affected': 29, 'not_security': 31, 'risk_accepted': 33}, '2023-07-10': {'active_critical': 7, 'active_high': 9, 'active_medium': 11, 'active_low': 13, 'active_none': 15, 'active_unknown': 17, 'open': 19, 'affected': 21, 'resolved': 23, 'duplicate': 25, 'false_positive': 27, 'in_review': 29, 'not_affected': 31, 'not_security': 33, 'risk_accepted': 35}}" + expected_data = expected_data.replace("2023-07-10", today) + expected_data = expected_data.replace("2023-07-09", yesterday) + self._test_api( + APITest( + "db_admin", + "get", + "/api/metrics/product_metrics_timeline/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'active_critical': 2, 'active_high': 3, 'active_medium': 4, 'active_low': 5, 'active_none': 6, 'active_unknown': 7, 'open': 8, 'affected': 9, 'resolved': 10, 'duplicate': 11, 'false_positive': 12, 'in_review': 13, 'not_affected': 14, 'not_security': 15, 'risk_accepted': 16}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/metrics/product_metrics_current/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'2023-07-09': {'active_critical': 1, 'active_high': 2, 'active_medium': 3, 'active_low': 4, 'active_none': 5, 'active_unknown': 6, 'open': 7, 'affected': 8, 'resolved': 9, 'duplicate': 10, 'false_positive': 11, 'in_review': 12, 'not_affected': 13, 'not_security': 14, 'risk_accepted': 15}, '2023-07-10': {'active_critical': 2, 'active_high': 3, 'active_medium': 4, 'active_low': 5, 'active_none': 6, 'active_unknown': 7, 'open': 8, 'affected': 9, 'resolved': 10, 'duplicate': 11, 'false_positive': 12, 'in_review': 13, 'not_affected': 14, 'not_security': 15, 'risk_accepted': 16}}" + expected_data = expected_data.replace("2023-07-10", today) + expected_data = expected_data.replace("2023-07-09", yesterday) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/metrics/product_metrics_timeline/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'active_critical': 2, 'active_high': 3, 'active_medium': 4, 'active_low': 5, 'active_none': 6, 'active_unknown': 7, 'open': 8, 'affected': 9, 'resolved': 10, 'duplicate': 11, 'false_positive': 12, 'in_review': 13, 'not_affected': 14, 'not_security': 15, 'risk_accepted': 16}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/metrics/product_metrics_current/?product_id=1", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'2023-07-09': {'active_critical': 1, 'active_high': 2, 'active_medium': 3, 'active_low': 4, 'active_none': 5, 'active_unknown': 6, 'open': 7, 'affected': 8, 'resolved': 9, 'duplicate': 10, 'false_positive': 11, 'in_review': 12, 'not_affected': 13, 'not_security': 14, 'risk_accepted': 15}, '2023-07-10': {'active_critical': 2, 'active_high': 3, 'active_medium': 4, 'active_low': 5, 'active_none': 6, 'active_unknown': 7, 'open': 8, 'affected': 9, 'resolved': 10, 'duplicate': 11, 'false_positive': 12, 'in_review': 13, 'not_affected': 14, 'not_security': 15, 'risk_accepted': 16}}" + expected_data = expected_data.replace("2023-07-10", today) + expected_data = expected_data.replace("2023-07-09", yesterday) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/metrics/product_metrics_timeline/?product_id=1", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/metrics/product_metrics_current/?product_id=2", + None, + 403, + expected_data, + ) + ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/metrics/product_metrics_timeline/?product_id=2", + None, + 403, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_product_rules.py b/backend/unittests/authorization/api/test_authorization_product_rules.py new file mode 100644 index 000000000..6f77d82e3 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_product_rules.py @@ -0,0 +1,151 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationProductRules(TestAuthorizationBase): + def test_authorization_product_rules_product_member(self): + self._test_authorization_product_rules() + + def test_authorization_product_rules_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_product_rules() + + def _test_authorization_product_rules(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_internal', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'product': 1, 'parser': 1}, {'id': 2, 'product_data': {'id': 2, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_external', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_external', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'False positive', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'product': 2, 'parser': 1}]}" + self._test_api(APITest("db_admin", "get", "/api/product_rules/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_internal', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'product': 1, 'parser': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/product_rules/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': None, 'approval_status': '', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': None, 'approval_user_full_name': None, 'name': 'db_product_rule_internal', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'product': 1, 'parser': 1}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/product_rules/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No Rule matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/product_rules/3/", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/product_rules/99999/", + None, + 404, + expected_data, + ) + ) + + post_data = {"name": "string", "product": 1, "parser": 1} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/product_rules/", + post_data, + 403, + expected_data, + ) + ) + + expected_data = "{'id': 4, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': 'db_internal_write', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'name': 'string', 'description': '', 'type': 'Fields', 'scanner_prefix': '', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': '', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'product': 1, 'parser': 1}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/product_rules/", + post_data, + 201, + expected_data, + ) + ) + + post_data = {"name": "changed", "scanner_prefix": "also_changed"} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/product_rules/1/", + post_data, + 403, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'results': [{'id': 1, 'product_data': {'id': 1, 'product_group_name': 'db_product_group', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'branch_name': 'db_branch_internal_dev', 'parser_data': {'id': 1, 'name': 'db_parser_file', 'type': 'DAST', 'source': 'File', 'sbom': False, 'module_name': '', 'class_name': ''}, 'scanner_name': 'db_parser', 'origin_component_name_version': '', 'origin_source_file_short': '', 'origin_source_file_url': None, 'vulnerability_id_aliases': [], 'cve_found_in': [], 'title': 'db_observation_internal', 'description': '', 'recommendation': '', 'current_severity': 'Medium', 'parser_severity': 'Medium', 'rule_severity': '', 'rule_rego_severity': '', 'assessment_severity': '', 'current_status': 'Duplicate', 'parser_status': 'Open', 'vex_status': '', 'rule_status': 'Duplicate', 'rule_rego_status': '', 'assessment_status': '', 'current_priority': None, 'rule_priority': None, 'rule_rego_priority': None, 'assessment_priority': None, 'scanner_observation_id': '', 'vulnerability_id': '', 'origin_component_name': '', 'origin_component_version': '', 'origin_component_purl': '', 'origin_component_purl_type': '', 'origin_component_cpe': '', 'origin_component_cyclonedx_bom_link': '', 'origin_docker_image_name': '', 'origin_docker_image_tag': '', 'origin_docker_image_name_tag': '', 'origin_docker_image_name_tag_short': '', 'origin_docker_image_digest': '', 'origin_endpoint_url': '', 'origin_endpoint_scheme': '', 'origin_endpoint_hostname': '', 'origin_endpoint_port': None, 'origin_endpoint_path': '', 'origin_endpoint_params': '', 'origin_endpoint_query': '', 'origin_endpoint_fragment': '', 'origin_service_name': 'db_service_internal_backend', 'origin_source_file': '', 'origin_source_line_start': None, 'origin_source_line_end': None, 'origin_cloud_provider': '', 'origin_cloud_account_subscription_project': '', 'origin_cloud_resource': '', 'origin_cloud_resource_type': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_cluster': '', 'origin_kubernetes_namespace': '', 'origin_kubernetes_resource_type': '', 'origin_kubernetes_resource_name': '', 'origin_kubernetes_qualified_resource': '', 'cvss3_score': None, 'cvss3_vector': '', 'cvss4_score': None, 'cvss4_vector': '', 'cwe': None, 'epss_score': None, 'epss_percentile': None, 'found': None, 'scanner': 'db_parser', 'upload_filename': 'parser.json', 'api_configuration_name': '', 'import_last_seen': '2022-12-15T17:14:20.870000+01:00', 'created': '2022-12-15T17:10:35.513000+01:00', 'modified': '2022-12-16T17:13:18.282000+01:00', 'last_observation_log': '2022-12-16T17:13:18.281000+01:00', 'identity_hash': '12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be', 'issue_tracker_issue_id': '', 'issue_tracker_issue_closed': False, 'has_potential_duplicates': False, 'current_vex_justification': '', 'parser_vex_justification': '', 'vex_vex_justification': '', 'rule_vex_justification': '', 'rule_rego_vex_justification': '', 'assessment_vex_justification': '', 'risk_acceptance_expiry_date': None, 'update_impact_score': None, 'fix_available': None, 'product': 1, 'branch': 1, 'parser': 1, 'origin_service': 1, 'general_rule': None, 'product_rule': 1, 'general_rule_rego': None, 'product_rule_rego': None, 'vex_statement': None}]}" + self._test_api(APITest("db_internal_write", "post", "/api/product_rules/1/simulate/", None, 200, expected_data)) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api(APITest("db_internal_write", "post", "/api/product_rules/2/simulate/", None, 403, expected_data)) + + expected_data = "{'message': 'Not found.'}" + self._test_api(APITest("db_internal_write", "post", "/api/product_rules/3/simulate/", None, 404, expected_data)) + + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'user': 'db_internal_write', 'approval_status': 'Auto approved', 'approval_remark': '', 'approval_date': None, 'approval_user': None, 'user_full_name': 'db_internal_write', 'approval_user_full_name': None, 'name': 'changed', 'description': '', 'type': 'Fields', 'scanner_prefix': 'also_changed', 'title': '', 'description_observation': '', 'origin_component_name_version': '', 'origin_component_purl': '', 'origin_docker_image_name_tag': '', 'origin_endpoint_url': '', 'origin_service_name': '', 'origin_source_file': '', 'origin_cloud_qualified_resource': '', 'origin_kubernetes_qualified_resource': '', 'new_severity': '', 'new_status': 'Duplicate', 'new_vex_justification': '', 'rego_module': '', 'enabled': True, 'product': 1, 'parser': 1}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/product_rules/1/", + post_data, + 200, + expected_data, + no_second_user=True, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/product_rules/1/", + None, + 403, + expected_data, + ) + ) + + expected_data = "{'message': 'Cannot delete Rule because it still has Observations.'}" + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/product_rules/1/", + None, + 409, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_products.py b/backend/unittests/authorization/api/test_authorization_products.py new file mode 100644 index 000000000..691dfd701 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_products.py @@ -0,0 +1,188 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationProducts(TestAuthorizationBase): + def test_authorization_products_product_member(self): + self._test_authorization_products() + + def test_authorization_products_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_products() + + def _test_authorization_products(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'repository_default_branch_name': 'db_branch_internal_dev', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, {'id': 2, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': '', 'repository_default_branch_name': 'db_branch_external', 'name': 'db_product_external', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': False, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': 3, 'license_policy': None}]}" + self._test_api(APITest("db_admin", "get", "/api/products/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'repository_default_branch_name': 'db_branch_internal_dev', 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}]}" + self._test_api(APITest("db_internal_write", "get", "/api/products/", None, 200, expected_data)) + expected_data = "{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'repository_default_branch_name': 'db_branch_internal_dev', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'has_api_configurations': True, 'has_branch_osv_linux_distribution': False, 'has_concluded_comments': False, 'name': 'db_product_internal', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}" + self._test_api(APITest("db_internal_write", "get", "/api/products/1/", None, 200, expected_data)) + expected_data = "{'message': 'No Product matches the given query.'}" + self._test_api(APITest("db_internal_write", "get", "/api/products/2/", None, 404, expected_data)) + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/products/99999/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_external", + "post", + "/api/products/", + {"name": "string"}, + 403, + expected_data, + ) + ) + expected_data = "{'id': 5, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': None, 'active_high_observation_count': None, 'active_medium_observation_count': None, 'active_low_observation_count': None, 'active_none_observation_count': None, 'active_unknown_observation_count': None, 'forbidden_licenses_count': None, 'review_required_licenses_count': None, 'unknown_licenses_count': None, 'allowed_licenses_count': None, 'ignored_licenses_count': None, 'product_group_name': '', 'repository_default_branch_name': '', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': False, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': False, 'has_licenses': False, 'product_group_license_policy': None, 'has_api_configurations': False, 'has_branch_osv_linux_distribution': False, 'has_concluded_comments': False, 'name': 'string', 'description': '', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': None, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': None, 'repository_default_branch': None, 'license_policy': None}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/products/", + { + "name": "string", + "last_observation_change": "2022-12-16T17:13:18.283000+01:00", + }, + 201, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/products/1/", + {"description": "string"}, + 403, + expected_data, + ) + ) + expected_data = "{'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'product_group_name': 'db_product_group', 'repository_default_branch_name': 'db_branch_internal_dev', 'product_group_repository_branch_housekeeping_active': None, 'product_group_security_gate_active': None, 'product_group_assessments_need_approval': False, 'observation_reviews': 0, 'observation_log_approvals': 0, 'has_services': True, 'product_group_product_rules_need_approval': False, 'product_rule_approvals': 0, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'product_group_new_observations_in_review': False, 'has_branches': True, 'has_licenses': True, 'product_group_license_policy': None, 'has_api_configurations': True, 'has_branch_osv_linux_distribution': False, 'has_concluded_comments': False, 'name': 'db_product_internal', 'description': 'string', 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/products/1/", + {"description": "string"}, + 200, + expected_data, + ) + ) + + post_data = None + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/products/1/apply_rules/", + post_data, + 403, + expected_data, + ) + ) + expected_data = "None" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/products/1/apply_rules/", + post_data, + 204, + expected_data, + ) + ) + + post_data = { + "severity": "Critical", + "status": "Open", + "comment": "string", + "observations": [], + } + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/products/1/observations_bulk_assessment/", + post_data, + 403, + expected_data, + ) + ) + expected_data = "None" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/products/1/observations_bulk_assessment/", + post_data, + 204, + expected_data, + ) + ) + + post_data = {"observations": []} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/products/1/observations_bulk_delete/", + post_data, + 403, + expected_data, + ) + ) + expected_data = "None" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/products/1/observations_bulk_delete/", + post_data, + 204, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/products/1/", + None, + 403, + expected_data, + ) + ) + expected_data = ( + "{'message': 'Cannot delete Product because it still has Services, Observations, License_Components.'}" + ) + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/products/1/", + None, + 409, + expected_data, + ) + ) diff --git a/backend/unittests/authorization/api/test_authorization_services.py b/backend/unittests/authorization/api/test_authorization_services.py new file mode 100644 index 000000000..67af8ead6 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_services.py @@ -0,0 +1,141 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationServices(TestAuthorizationBase): + def test_authorization_services_product_member(self): + self._test_authorization_services() + + def test_authorization_services_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_services() + + def _test_authorization_services(self): + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_service_internal_backend (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_service_internal_backend', 'product': 1}, {'id': 2, 'name_with_product': 'db_service_internal_frontend (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_service_internal_frontend', 'product': 1}, {'id': 3, 'name_with_product': 'db_service_external (db_product_external)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_service_external', 'product': 2}]}" + self._test_api(APITest("db_admin", "get", "/api/services/", None, 200, expected_data)) + + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'name_with_product': 'db_service_internal_backend (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_service_internal_backend', 'product': 1}, {'id': 2, 'name_with_product': 'db_service_internal_frontend (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_service_internal_frontend', 'product': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/services/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'name_with_product': 'db_service_internal_backend (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'db_service_internal_backend', 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/services/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No Service matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/services/3/", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/services/99999/", + None, + 404, + expected_data, + ) + ) + + post_data = {"name": "string", "product": 1} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "post", + "/api/services/", + post_data, + 403, + expected_data, + ) + ) + + expected_data = "{'id': 4, 'name_with_product': 'string (db_product_internal)', 'name': 'string', 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "post", + "/api/services/", + post_data, + 201, + expected_data, + ) + ) + + patch_data = {"name": "changed"} + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "patch", + "/api/services/1/", + patch_data, + 403, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'name_with_product': 'changed (db_product_internal)', 'active_critical_observation_count': 0, 'active_high_observation_count': 0, 'active_medium_observation_count': 0, 'active_low_observation_count': 0, 'active_none_observation_count': 0, 'active_unknown_observation_count': 0, 'forbidden_licenses_count': 0, 'review_required_licenses_count': 0, 'unknown_licenses_count': 0, 'allowed_licenses_count': 0, 'ignored_licenses_count': 0, 'name': 'changed', 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "patch", + "/api/services/1/", + patch_data, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/services/1/", + None, + 403, + expected_data, + ) + ) + + expected_data = "{'message': 'Cannot delete Service because it still has Observations.'}" + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/services/1/", + None, + 409, + expected_data, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_settings.py b/backend/unittests/authorization/api/test_authorization_settings.py similarity index 56% rename from backend/unittests/access_control/api/test_authorization_settings.py rename to backend/unittests/authorization/api/test_authorization_settings.py index 533b2a434..35e686d7f 100644 --- a/backend/unittests/access_control/api/test_authorization_settings.py +++ b/backend/unittests/authorization/api/test_authorization_settings.py @@ -1,22 +1,19 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) class TestAuthorizationSettings(TestAuthorizationBase): + def test_authorization_settings(self): - expected_data = "{'id': 1, 'security_gate_active': True, 'security_gate_threshold_critical': 0, 'security_gate_threshold_high': 0, 'security_gate_threshold_medium': 99999, 'security_gate_threshold_low': 99999, 'security_gate_threshold_none': 99999, 'security_gate_threshold_unknown': 99999, 'jwt_validity_duration_user': 168, 'jwt_validity_duration_superuser': 24, 'internal_users': '', 'base_url_frontend': '', 'exception_ms_teams_webhook': '', 'exception_slack_webhook': '', 'exception_rate_limit': 3600, 'email_from': '', 'exception_email_to': '', 'background_product_metrics_interval_minutes': 5, 'background_epss_import_crontab_minute': 0, 'background_epss_import_crontab_hour': 3, 'branch_housekeeping_crontab_minute': 0, 'branch_housekeeping_crontab_hour': 2, 'branch_housekeeping_active': True, 'branch_housekeeping_keep_inactive_days': 30, 'branch_housekeeping_exempt_branches': '', 'feature_vex': False, 'feature_disable_user_login': False, 'feature_general_rules_need_approval': False, 'risk_acceptance_expiry_days': 30, 'risk_acceptance_expiry_crontab_minute': 0, 'risk_acceptance_expiry_crontab_hour': 1, 'feature_automatic_api_import': True, 'api_import_crontab_minute': 0, 'api_import_crontab_hour': 4, 'password_validator_minimum_length': 8, 'password_validator_attribute_similarity': True, 'password_validator_common_passwords': True, 'password_validator_not_numeric': True, 'feature_license_management': True, 'license_import_crontab_minute': 30, 'license_import_crontab_hour': 1}" - self._test_api( - APITest("db_admin", "get", "/api/settings/1/", None, 200, expected_data) - ) + expected_data = "{'id': 1, 'security_gate_active': True, 'security_gate_threshold_critical': 0, 'security_gate_threshold_high': 0, 'security_gate_threshold_medium': 99999, 'security_gate_threshold_low': 99999, 'security_gate_threshold_none': 99999, 'security_gate_threshold_unknown': 99999, 'jwt_validity_duration_user': 168, 'jwt_validity_duration_superuser': 24, 'internal_users': '', 'base_url_frontend': '', 'exception_ms_teams_webhook': '', 'exception_slack_webhook': '', 'exception_rate_limit': 3600, 'email_from': '', 'exception_email_to': '', 'background_product_metrics_interval_minutes': 5, 'background_epss_import_crontab_minute': 0, 'background_epss_import_crontab_hour': 3, 'branch_housekeeping_crontab_minute': 0, 'branch_housekeeping_crontab_hour': 2, 'branch_housekeeping_active': True, 'branch_housekeeping_keep_inactive_days': 30, 'branch_housekeeping_exempt_branches': '', 'feature_vex': False, 'vex_justification_style': 'CSAF/OpenVEX', 'feature_disable_user_login': False, 'feature_general_rules_need_approval': False, 'risk_acceptance_expiry_days': 30, 'risk_acceptance_expiry_crontab_minute': 0, 'risk_acceptance_expiry_crontab_hour': 1, 'feature_automatic_api_import': True, 'api_import_crontab_minute': 0, 'api_import_crontab_hour': 4, 'password_validator_minimum_length': 8, 'password_validator_attribute_similarity': True, 'password_validator_common_passwords': True, 'password_validator_not_numeric': True, 'feature_license_management': True, 'license_import_crontab_minute': 30, 'license_import_crontab_hour': 1, 'feature_automatic_osv_scanning': True, 'feature_exploit_information': True, 'exploit_information_max_age_years': 10, 'periodic_task_max_entries': 10, 'oidc_clock_skew': 0}" + self._test_api(APITest("db_admin", "get", "/api/settings/1/", None, 200, expected_data)) - self._test_api( - APITest("db_internal_write", "get", "/api/settings/1/", None, 403, None) - ) + self._test_api(APITest("db_internal_write", "get", "/api/settings/1/", None, 403, None)) post_data = {"security_gate_threshold_critical": 1234} - expected_data = "{'id': 1, 'security_gate_active': True, 'security_gate_threshold_critical': 1234, 'security_gate_threshold_high': 0, 'security_gate_threshold_medium': 99999, 'security_gate_threshold_low': 99999, 'security_gate_threshold_none': 99999, 'security_gate_threshold_unknown': 99999, 'jwt_validity_duration_user': 168, 'jwt_validity_duration_superuser': 24, 'internal_users': '', 'base_url_frontend': '', 'exception_ms_teams_webhook': '', 'exception_slack_webhook': '', 'exception_rate_limit': 3600, 'email_from': '', 'exception_email_to': '', 'background_product_metrics_interval_minutes': 5, 'background_epss_import_crontab_minute': 0, 'background_epss_import_crontab_hour': 3, 'branch_housekeeping_crontab_minute': 0, 'branch_housekeeping_crontab_hour': 2, 'branch_housekeeping_active': True, 'branch_housekeeping_keep_inactive_days': 30, 'branch_housekeeping_exempt_branches': '', 'feature_vex': False, 'feature_disable_user_login': False, 'feature_general_rules_need_approval': False, 'risk_acceptance_expiry_days': 30, 'risk_acceptance_expiry_crontab_minute': 0, 'risk_acceptance_expiry_crontab_hour': 1, 'feature_automatic_api_import': True, 'api_import_crontab_minute': 0, 'api_import_crontab_hour': 4, 'password_validator_minimum_length': 8, 'password_validator_attribute_similarity': True, 'password_validator_common_passwords': True, 'password_validator_not_numeric': True, 'feature_license_management': True, 'license_import_crontab_minute': 30, 'license_import_crontab_hour': 1}" + expected_data = "{'id': 1, 'security_gate_active': True, 'security_gate_threshold_critical': 1234, 'security_gate_threshold_high': 0, 'security_gate_threshold_medium': 99999, 'security_gate_threshold_low': 99999, 'security_gate_threshold_none': 99999, 'security_gate_threshold_unknown': 99999, 'jwt_validity_duration_user': 168, 'jwt_validity_duration_superuser': 24, 'internal_users': '', 'base_url_frontend': '', 'exception_ms_teams_webhook': '', 'exception_slack_webhook': '', 'exception_rate_limit': 3600, 'email_from': '', 'exception_email_to': '', 'background_product_metrics_interval_minutes': 5, 'background_epss_import_crontab_minute': 0, 'background_epss_import_crontab_hour': 3, 'branch_housekeeping_crontab_minute': 0, 'branch_housekeeping_crontab_hour': 2, 'branch_housekeeping_active': True, 'branch_housekeeping_keep_inactive_days': 30, 'branch_housekeeping_exempt_branches': '', 'feature_vex': False, 'vex_justification_style': 'CSAF/OpenVEX', 'feature_disable_user_login': False, 'feature_general_rules_need_approval': False, 'risk_acceptance_expiry_days': 30, 'risk_acceptance_expiry_crontab_minute': 0, 'risk_acceptance_expiry_crontab_hour': 1, 'feature_automatic_api_import': True, 'api_import_crontab_minute': 0, 'api_import_crontab_hour': 4, 'password_validator_minimum_length': 8, 'password_validator_attribute_similarity': True, 'password_validator_common_passwords': True, 'password_validator_not_numeric': True, 'feature_license_management': True, 'license_import_crontab_minute': 30, 'license_import_crontab_hour': 1, 'feature_automatic_osv_scanning': True, 'feature_exploit_information': True, 'exploit_information_max_age_years': 10, 'periodic_task_max_entries': 10, 'oidc_clock_skew': 0}" self._test_api( APITest( "db_admin", diff --git a/backend/unittests/access_control/api/test_authorization_users.py b/backend/unittests/authorization/api/test_authorization_users.py similarity index 71% rename from backend/unittests/access_control/api/test_authorization_users.py rename to backend/unittests/authorization/api/test_authorization_users.py index 7b5e4c5ac..85d6ff321 100644 --- a/backend/unittests/access_control/api/test_authorization_users.py +++ b/backend/unittests/authorization/api/test_authorization_users.py @@ -1,4 +1,4 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -6,13 +6,11 @@ class TestAuthorizationUsers(TestAuthorizationBase): def test_authorization_users(self): - expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'username': 'db_admin', 'first_name': '', 'last_name': '', 'full_name': 'db_admin', 'email': '', 'is_active': True, 'is_superuser': True, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}]}" - self._test_api( - APITest("db_admin", "get", "/api/users/", None, 200, expected_data) - ) + expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'username': 'db_admin', 'first_name': '', 'last_name': '', 'full_name': 'db_admin', 'email': '', 'is_active': True, 'is_superuser': True, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}, {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, {'id': 3, 'username': 'db_internal_read', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_read', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:25:06+01:00', 'has_password': False}, {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}, {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}]}" + self._test_api(APITest("db_admin", "get", "/api/users/", None, 200, expected_data)) - expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}, {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, {'id': 4, 'username': 'db_external', 'full_name': 'db_external'}, {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}]}" - expected_data_product_group = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}, {'id': 2, 'username': 'db_internal_write', 'full_name': 'db_internal_write'}, {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, {'id': 4, 'username': 'db_external', 'full_name': 'db_external'}, {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}]}" + expected_data = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}, {'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False}, {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, {'id': 4, 'username': 'db_external', 'full_name': 'db_external'}, {'id': 6, 'username': 'db_product_group_user', 'full_name': 'db_product_group_user'}]}" + expected_data_product_group = "{'count': 5, 'next': None, 'previous': None, 'results': [{'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}, {'id': 2, 'username': 'db_internal_write', 'full_name': 'db_internal_write'}, {'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, {'id': 4, 'username': 'db_external', 'full_name': 'db_external'}, {'id': 6, 'username': 'db_product_group_user', 'first_name': '', 'last_name': '', 'full_name': 'db_product_group_user', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-04T11:09:18.495000+01:00', 'has_password': True}]}" self._test_api( APITest( "db_internal_write", @@ -27,13 +25,9 @@ def test_authorization_users(self): ) expected_data = "{'id': 1, 'username': 'db_admin', 'full_name': 'db_admin'}" - self._test_api( - APITest( - "db_internal_write", "get", "/api/users/1/", None, 200, expected_data - ) - ) + self._test_api(APITest("db_internal_write", "get", "/api/users/1/", None, 200, expected_data)) - expected_data = "{'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False, 'has_authorization_groups': True, 'has_product_group_members': False, 'has_product_members': True}" + expected_data = "{'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False, 'has_authorization_groups': True, 'has_product_group_members': False, 'has_product_members': True, 'has_api_tokens': False}" expected_data_product_group = "{'id': 2, 'username': 'db_internal_write', 'full_name': 'db_internal_write'}" self._test_api( APITest( @@ -60,24 +54,14 @@ def test_authorization_users(self): ) ) - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}]}" - self._test_api( - APITest("db_external", "get", "/api/users/", None, 200, expected_data) - ) - expected_data = "{'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False, 'has_authorization_groups': False, 'has_product_group_members': False, 'has_product_members': True}" - self._test_api( - APITest("db_external", "get", "/api/users/4/", None, 200, expected_data) - ) - expected_data = ( - "{'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}" - ) - self._test_api( - APITest("db_external", "get", "/api/users/3/", None, 200, expected_data) - ) + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}, {'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False}]}" + self._test_api(APITest("db_external", "get", "/api/users/", None, 200, expected_data)) + expected_data = "{'id': 4, 'username': 'db_external', 'first_name': '', 'last_name': '', 'full_name': 'db_external', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': True, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-12T19:48:08.514000+01:00', 'has_password': False, 'has_authorization_groups': False, 'has_product_group_members': False, 'has_product_members': True, 'has_api_tokens': False}" + self._test_api(APITest("db_external", "get", "/api/users/4/", None, 200, expected_data)) + expected_data = "{'id': 3, 'username': 'db_internal_read', 'full_name': 'db_internal_read'}" + self._test_api(APITest("db_external", "get", "/api/users/3/", None, 200, expected_data)) expected_data = "{'message': 'No User matches the given query.'}" - self._test_api( - APITest("db_external", "get", "/api/users/2/", None, 404, expected_data) - ) + self._test_api(APITest("db_external", "get", "/api/users/2/", None, 404, expected_data)) expected_data = "{'id': 7, 'username': 'test_user', 'first_name': '', 'last_name': '', 'full_name': 'string', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False}" self._test_api( @@ -95,9 +79,7 @@ def test_authorization_users(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -126,9 +108,7 @@ def test_authorization_users(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -178,9 +158,7 @@ def test_authorization_users(self): ) ) - expected_data = ( - "{'message': \"You are not allowed to change other users' passwords\"}" - ) + expected_data = "{'message': \"You are not allowed to change other users' passwords\"}" self._test_api( APITest( "db_internal_write", @@ -214,7 +192,7 @@ def test_authorization_users(self): ) ) - expected_data = "{'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False, 'has_authorization_groups': True, 'has_product_group_members': False, 'has_product_members': True}" + expected_data = "{'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'light', 'setting_list_size': 'medium', 'setting_package_info_preference': 'open/source/insights', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False, 'has_authorization_groups': True, 'has_product_group_members': False, 'has_product_members': True, 'has_api_tokens': False}" self._test_api( APITest( "db_internal_write", @@ -227,8 +205,12 @@ def test_authorization_users(self): ) ) - post_data = {"setting_theme": "dark"} - expected_data = "{'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'dark', 'setting_list_size': 'medium', 'permissions': [, ], 'setting_list_properties': '', 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False, 'has_authorization_groups': True, 'has_product_group_members': False, 'has_product_members': True}" + post_data = { + "setting_theme": "dark", + "setting_list_size": "small", + "setting_package_info_preference": "ecosyste.ms", + } + expected_data = "{'id': 2, 'username': 'db_internal_write', 'first_name': '', 'last_name': '', 'full_name': 'db_internal_write', 'email': '', 'is_active': True, 'is_superuser': False, 'is_external': False, 'setting_theme': 'dark', 'setting_list_size': 'small', 'setting_package_info_preference': 'ecosyste.ms', 'setting_metrics_timespan': 'Week', 'permissions': [, ], 'oidc_groups_hash': '', 'is_oidc_user': False, 'date_joined': '2022-12-07T20:24:53+01:00', 'has_password': False, 'has_authorization_groups': True, 'has_product_group_members': False, 'has_product_members': True, 'has_api_tokens': False}" self._test_api( APITest( "db_internal_write", @@ -242,9 +224,7 @@ def test_authorization_users(self): ) post_data = {"setting_theme": "medium"} - expected_data = ( - "{'message': 'Setting theme: \"medium\" is not a valid choice.'}" - ) + expected_data = "{'message': 'Setting theme: \"medium\" is not a valid choice.'}" self._test_api( APITest( "db_internal_write", diff --git a/backend/unittests/access_control/api/test_authorization_vex_counters.py b/backend/unittests/authorization/api/test_authorization_vex_counters.py similarity index 78% rename from backend/unittests/access_control/api/test_authorization_vex_counters.py rename to backend/unittests/authorization/api/test_authorization_vex_counters.py index 3ddf5b2a7..6ac784934 100644 --- a/backend/unittests/access_control/api/test_authorization_vex_counters.py +++ b/backend/unittests/authorization/api/test_authorization_vex_counters.py @@ -1,4 +1,4 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) @@ -18,9 +18,7 @@ def test_authorization_vex_counters(self): ) ) - expected_data = ( - "{'id': 1, 'document_id_prefix': 'prefix', 'year': 2024, 'counter': 2}" - ) + expected_data = "{'id': 1, 'document_id_prefix': 'prefix', 'year': 2024, 'counter': 2}" self._test_api( APITest( "db_internal_write", @@ -45,9 +43,7 @@ def test_authorization_vex_counters(self): ) post_data = {"document_id_prefix": "string", "year": 2024} - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -59,9 +55,7 @@ def test_authorization_vex_counters(self): ) ) - expected_data = ( - "{'id': 2, 'document_id_prefix': 'string', 'year': 2024, 'counter': 0}" - ) + expected_data = "{'id': 2, 'document_id_prefix': 'string', 'year': 2024, 'counter': 0}" self._test_api( APITest( "db_admin", @@ -73,9 +67,7 @@ def test_authorization_vex_counters(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", @@ -87,9 +79,7 @@ def test_authorization_vex_counters(self): ) ) - expected_data = ( - "{'id': 1, 'document_id_prefix': 'prefix', 'year': 2024, 'counter': 7}" - ) + expected_data = "{'id': 1, 'document_id_prefix': 'prefix', 'year': 2024, 'counter': 7}" self._test_api( APITest( "db_admin", @@ -101,9 +91,7 @@ def test_authorization_vex_counters(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_write", diff --git a/backend/unittests/access_control/api/test_authorization_vex_csaf.py b/backend/unittests/authorization/api/test_authorization_vex_csaf.py similarity index 64% rename from backend/unittests/access_control/api/test_authorization_vex_csaf.py rename to backend/unittests/authorization/api/test_authorization_vex_csaf.py index b041a716b..9b8f47b24 100644 --- a/backend/unittests/access_control/api/test_authorization_vex_csaf.py +++ b/backend/unittests/authorization/api/test_authorization_vex_csaf.py @@ -1,8 +1,8 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) @@ -16,12 +16,10 @@ def test_authorization_csaf_product_authorization_group_member(self): self._test_authorization_csaf() def _test_authorization_csaf(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_product', 'tlp_label': 'AMBER', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': 1}, {'id': 2, 'product_data': None, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_vulnerability', 'tlp_label': 'RED', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': None}]}" - self._test_api( - APITest("db_admin", "get", "/api/vex/csaf/", None, 200, expected_data) - ) + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_product', 'tlp_label': 'AMBER', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': 1}, {'id': 2, 'product_data': None, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_vulnerability', 'tlp_label': 'RED', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': None}]}" + self._test_api(APITest("db_admin", "get", "/api/vex/csaf/", None, 200, expected_data)) - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_product', 'tlp_label': 'AMBER', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': 1}]}" + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_product', 'tlp_label': 'AMBER', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': 1}]}" self._test_api( APITest( "db_internal_write", @@ -33,7 +31,7 @@ def _test_authorization_csaf(self): ) ) - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_product', 'tlp_label': 'AMBER', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': 1}" + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_product', 'tlp_label': 'AMBER', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': 1}" self._test_api( APITest( "db_internal_write", @@ -80,9 +78,7 @@ def _test_authorization_csaf(self): ) ) - expected_data = ( - "{'id': 1, 'name': 'db_branch_internal_dev', 'csaf': 1, 'branch': 1}" - ) + expected_data = "{'id': 1, 'name': 'db_branch_internal_dev', 'csaf': 1, 'branch': 1}" self._test_api( APITest( "db_internal_write", @@ -118,9 +114,7 @@ def _test_authorization_csaf(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -148,9 +142,7 @@ def _test_authorization_csaf(self): class TestAuthorizationCSAF_User(TestAuthorizationBase): def test_authorization_csaf(self): expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 2, 'product_data': None, 'revisions': [], 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'csaf_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'title': 'csaf_title_vulnerability', 'tlp_label': 'RED', 'tracking_initial_release_date': '2022-12-15T17:10:35.513000+01:00', 'tracking_current_release_date': '2022-12-16T17:13:18.282000+01:00', 'tracking_status': 'final', 'publisher_name': 'publisher name', 'publisher_category': 'publisher category', 'publisher_namespace': 'https://publisher.namespace', 'user': 4, 'product': None}]}" - self._test_api( - APITest("db_external", "get", "/api/vex/csaf/", None, 200, expected_data) - ) + self._test_api(APITest("db_external", "get", "/api/vex/csaf/", None, 200, expected_data)) expected_data = "{'message': 'No CSAF matches the given query.'}" self._test_api( diff --git a/backend/unittests/authorization/api/test_authorization_vex_cyclonedx.py b/backend/unittests/authorization/api/test_authorization_vex_cyclonedx.py new file mode 100644 index 000000000..bca74f6d2 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_vex_cyclonedx.py @@ -0,0 +1,241 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationCycloneDX_Product(TestAuthorizationBase): + def test_authorization_cyclonedx_product_member(self): + self._test_authorization_cyclonedx() + + def test_authorization_cyclonedx_product_authorization_group_member(self): + prepare_authorization_groups() + self._test_authorization_cyclonedx() + + def _test_authorization_cyclonedx(self): + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'cyclonedx_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'author': 'author', 'manufacturer': 'manufacturer', 'first_issued': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}, {'id': 2, 'product_data': None, 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'cyclonedx_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'author': 'author', 'manufacturer': 'manufacturer', 'first_issued': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': None}]}" + self._test_api(APITest("db_admin", "get", "/api/vex/cyclonedx/", None, 200, expected_data)) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'cyclonedx_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'author': 'author', 'manufacturer': 'manufacturer', 'first_issued': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'cyclonedx_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'author': 'author', 'manufacturer': 'manufacturer', 'first_issued': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No CycloneDX matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx/2/", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx/99999/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'name': 'db_branch_internal_dev', 'cyclonedx': 1, 'branch': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx_branches/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'name': 'db_branch_internal_dev', 'cyclonedx': 1, 'branch': 1}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx_branches/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 2, 'name': 'CVE_vulnerability_1', 'cyclonedx': 1}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx_vulnerabilities/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No CycloneDX_Vulnerability matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vex/cyclonedx_vulnerabilities/1/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'message': 'You do not have permission to perform this action.'}" + self._test_api( + APITest( + "db_internal_read", + "delete", + "/api/vex/cyclonedx/1/", + None, + 403, + expected_data, + ) + ) + + expected_data = "None" + self._test_api( + APITest( + "db_internal_write", + "delete", + "/api/vex/cyclonedx/1/", + None, + 204, + expected_data, + ) + ) + + +class TestAuthorizationCycloneDX_User(TestAuthorizationBase): + def test_authorization_cyclonedx(self): + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 2, 'product_data': None, 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'cyclonedx_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'author': 'author', 'manufacturer': 'manufacturer', 'first_issued': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': None}]}" + self._test_api(APITest("db_external", "get", "/api/vex/cyclonedx/", None, 200, expected_data)) + + expected_data = "{'message': 'No CycloneDX matches the given query.'}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx/1/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'id': 2, 'product_data': None, 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'cyclonedx_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'author': 'author', 'manufacturer': 'manufacturer', 'first_issued': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': None}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx/2/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No CycloneDX matches the given query.'}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx/99999/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 2, 'name': 'db_branch_internal_main', 'cyclonedx': 2, 'branch': 2}]}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx_branches/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No CycloneDX_Branch matches the given query.'}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx_branches/1/", + None, + 404, + expected_data, + ) + ) + + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'name': 'CVE_vulnerability', 'cyclonedx': 2}]}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx_vulnerabilities/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'name': 'CVE_vulnerability', 'cyclonedx': 2}" + self._test_api( + APITest( + "db_external", + "get", + "/api/vex/cyclonedx_vulnerabilities/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "None" + self._test_api( + APITest( + "db_external", + "delete", + "/api/vex/cyclonedx/2/", + None, + 204, + expected_data, + ) + ) diff --git a/backend/unittests/access_control/api/test_authorization_vex_openvex.py b/backend/unittests/authorization/api/test_authorization_vex_openvex.py similarity index 62% rename from backend/unittests/access_control/api/test_authorization_vex_openvex.py rename to backend/unittests/authorization/api/test_authorization_vex_openvex.py index b46d05e55..289c19ae7 100644 --- a/backend/unittests/access_control/api/test_authorization_vex_openvex.py +++ b/backend/unittests/authorization/api/test_authorization_vex_openvex.py @@ -1,8 +1,8 @@ -from unittests.access_control.api.test_authorization import ( +from unittests.authorization.api.test_authorization import ( APITest, TestAuthorizationBase, ) -from unittests.access_control.services.test_authorization import ( +from unittests.authorization.services.test_authorization import ( prepare_authorization_groups, ) @@ -16,12 +16,10 @@ def test_authorization_openvex_product_authorization_group_member(self): self._test_authorization_openvex() def _test_authorization_openvex(self): - expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}, {'id': 2, 'product_data': None, 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': None}]}" - self._test_api( - APITest("db_admin", "get", "/api/vex/openvex/", None, 200, expected_data) - ) + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}, {'id': 2, 'product_data': None, 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': None}]}" + self._test_api(APITest("db_admin", "get", "/api/vex/openvex/", None, 200, expected_data)) - expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}]}" + expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}]}" self._test_api( APITest( "db_internal_write", @@ -33,7 +31,7 @@ def _test_authorization_openvex(self): ) ) - expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}" + expected_data = "{'id': 1, 'product_data': {'id': 1, 'permissions': {, , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , }, 'product_group_assessments_need_approval': False, 'product_group_product_rules_need_approval': False, 'risk_acceptance_expiry_date_calculated': datetime.date(2024, 7, 1), 'name': 'db_product_internal', 'description': '', 'is_product_group': False, 'purl': '', 'cpe23': '', 'repository_prefix': '', 'repository_branch_housekeeping_active': None, 'repository_branch_housekeeping_keep_inactive_days': None, 'repository_branch_housekeeping_exempt_branches': '', 'security_gate_passed': True, 'security_gate_active': None, 'security_gate_threshold_critical': None, 'security_gate_threshold_high': None, 'security_gate_threshold_medium': None, 'security_gate_threshold_low': None, 'security_gate_threshold_none': None, 'security_gate_threshold_unknown': None, 'apply_general_rules': True, 'notification_ms_teams_webhook': '', 'notification_slack_webhook': '', 'notification_email_to': '', 'issue_tracker_active': False, 'issue_tracker_type': '', 'issue_tracker_base_url': '', 'issue_tracker_username': '', 'issue_tracker_api_key': '', 'issue_tracker_project_id': '', 'issue_tracker_labels': '', 'issue_tracker_issue_type': '', 'issue_tracker_status_closed': '', 'issue_tracker_minimum_severity': '', 'last_observation_change': '2022-12-16T17:13:18.283000+01:00', 'assessments_need_approval': False, 'new_observations_in_review': False, 'product_rules_need_approval': False, 'risk_acceptance_expiry_active': None, 'risk_acceptance_expiry_days': None, 'osv_enabled': True, 'osv_linux_distribution': '', 'osv_linux_release': '', 'automatic_osv_scanning_enabled': False, 'has_cloud_resource': False, 'has_component': False, 'has_docker_image': False, 'has_endpoint': False, 'has_kubernetes_resource': False, 'has_source': False, 'has_potential_duplicates': False, 'product_group': 3, 'repository_default_branch': 1, 'license_policy': None}, 'vulnerability_names': 'CVE_vulnerability_1', 'branch_names': 'db_branch_internal_dev', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0001', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': 1}" self._test_api( APITest( "db_internal_write", @@ -80,9 +78,7 @@ def _test_authorization_openvex(self): ) ) - expected_data = ( - "{'id': 1, 'name': 'db_branch_internal_dev', 'openvex': 1, 'branch': 1}" - ) + expected_data = "{'id': 1, 'name': 'db_branch_internal_dev', 'openvex': 1, 'branch': 1}" self._test_api( APITest( "db_internal_write", @@ -106,9 +102,7 @@ def _test_authorization_openvex(self): ) ) - expected_data = ( - "{'message': 'No OpenVEX_Vulnerability matches the given query.'}" - ) + expected_data = "{'message': 'No OpenVEX_Vulnerability matches the given query.'}" self._test_api( APITest( "db_internal_write", @@ -120,9 +114,7 @@ def _test_authorization_openvex(self): ) ) - expected_data = ( - "{'message': 'You do not have permission to perform this action.'}" - ) + expected_data = "{'message': 'You do not have permission to perform this action.'}" self._test_api( APITest( "db_internal_read", @@ -150,9 +142,7 @@ def _test_authorization_openvex(self): class TestAuthorizationOpenVEX_User(TestAuthorizationBase): def test_authorization_openvex(self): expected_data = "{'count': 1, 'next': None, 'previous': None, 'results': [{'id': 2, 'product_data': None, 'vulnerability_names': 'CVE_vulnerability', 'branch_names': 'db_branch_internal_main', 'user_full_name': 'db_external', 'document_id_prefix': 'openvex_prefix', 'document_base_id': '2024_0002', 'version': 1, 'content_hash': 'abcdef123456', 'id_namespace': 'https://id.namespace', 'author': 'author', 'role': 'role', 'timestamp': '2022-12-15T17:10:35.513000+01:00', 'last_updated': '2022-12-16T17:13:18.282000+01:00', 'user': 4, 'product': None}]}" - self._test_api( - APITest("db_external", "get", "/api/vex/openvex/", None, 200, expected_data) - ) + self._test_api(APITest("db_external", "get", "/api/vex/openvex/", None, 200, expected_data)) expected_data = "{'message': 'No OpenVEX matches the given query.'}" self._test_api( diff --git a/backend/unittests/authorization/api/test_authorization_vulnerability_checks.py b/backend/unittests/authorization/api/test_authorization_vulnerability_checks.py new file mode 100644 index 000000000..50c673905 --- /dev/null +++ b/backend/unittests/authorization/api/test_authorization_vulnerability_checks.py @@ -0,0 +1,78 @@ +from unittests.authorization.api.test_authorization import ( + APITest, + TestAuthorizationBase, +) +from unittests.authorization.services.test_authorization import ( + prepare_authorization_groups, +) + + +class TestAuthorizationVulnerabilityChecks(TestAuthorizationBase): + def test_authorization_vulnerability_checks_product_member(self): + self._test_authorization_vulnerability_checks() + + def test_authorization_vulnerability_checks_product_authorization_group_member( + self, + ): + prepare_authorization_groups() + self._test_authorization_vulnerability_checks() + + def _test_authorization_vulnerability_checks(self): + expected_data = "{'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'branch_name': '', 'service_name': '', 'scanner_name': 'scanner_internal_no_branch', 'filename': 'filename_internal_no_branch', 'api_configuration_name': '', 'scanner': 'scanner_internal_no_branch / 1.0.0', 'first_import': '2022-12-15T17:10:35.521000+01:00', 'last_import': '2022-12-15T17:10:35.854000+01:00', 'last_import_observations_new': 1, 'last_import_observations_updated': 2, 'last_import_observations_resolved': 3, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': None, 'service': None}, {'id': 2, 'branch_name': 'db_branch_internal_dev', 'service_name': '', 'scanner_name': 'scanner_internal_dev', 'filename': '', 'api_configuration_name': 'api_configuration_internal_dev', 'scanner': 'scanner_internal_dev', 'first_import': '2022-12-16T17:10:35.521000+01:00', 'last_import': '2022-12-16T17:10:35.854000+01:00', 'last_import_observations_new': 4, 'last_import_observations_updated': 5, 'last_import_observations_resolved': 6, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': 1, 'service': None}, {'id': 3, 'branch_name': 'db_branch_external', 'service_name': '', 'scanner_name': 'scanner_external', 'filename': 'filename_external', 'api_configuration_name': '', 'scanner': 'scanner_external', 'first_import': '2022-12-17T17:10:35.521000+01:00', 'last_import': '2022-12-17T17:10:35.854000+01:00', 'last_import_observations_new': 7, 'last_import_observations_updated': 8, 'last_import_observations_resolved': 9, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 2, 'branch': 3, 'service': None}]}" + self._test_api( + APITest( + "db_admin", + "get", + "/api/vulnerability_checks/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'branch_name': '', 'service_name': '', 'scanner_name': 'scanner_internal_no_branch', 'filename': 'filename_internal_no_branch', 'api_configuration_name': '', 'scanner': 'scanner_internal_no_branch / 1.0.0', 'first_import': '2022-12-15T17:10:35.521000+01:00', 'last_import': '2022-12-15T17:10:35.854000+01:00', 'last_import_observations_new': 1, 'last_import_observations_updated': 2, 'last_import_observations_resolved': 3, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': None, 'service': None}, {'id': 2, 'branch_name': 'db_branch_internal_dev', 'service_name': '', 'scanner_name': 'scanner_internal_dev', 'filename': '', 'api_configuration_name': 'api_configuration_internal_dev', 'scanner': 'scanner_internal_dev', 'first_import': '2022-12-16T17:10:35.521000+01:00', 'last_import': '2022-12-16T17:10:35.854000+01:00', 'last_import_observations_new': 4, 'last_import_observations_updated': 5, 'last_import_observations_resolved': 6, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': 1, 'service': None}]}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vulnerability_checks/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'id': 1, 'branch_name': '', 'service_name': '', 'scanner_name': 'scanner_internal_no_branch', 'filename': 'filename_internal_no_branch', 'api_configuration_name': '', 'scanner': 'scanner_internal_no_branch / 1.0.0', 'first_import': '2022-12-15T17:10:35.521000+01:00', 'last_import': '2022-12-15T17:10:35.854000+01:00', 'last_import_observations_new': 1, 'last_import_observations_updated': 2, 'last_import_observations_resolved': 3, 'last_import_licenses_new': None, 'last_import_licenses_updated': None, 'last_import_licenses_deleted': None, 'product': 1, 'branch': None, 'service': None}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vulnerability_checks/1/", + None, + 200, + expected_data, + ) + ) + + expected_data = "{'message': 'No Vulnerability_Check matches the given query.'}" + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vulnerability_checks/3/", + None, + 404, + expected_data, + ) + ) + + self._test_api( + APITest( + "db_internal_write", + "get", + "/api/vulnerability_checks/99999/", + None, + 404, + expected_data, + ) + ) diff --git a/backend/unittests/access_control/api/test_permissions.py b/backend/unittests/authorization/api/test_permissions.py similarity index 83% rename from backend/unittests/access_control/api/test_permissions.py rename to backend/unittests/authorization/api/test_permissions.py index 7df54f774..40b40d07d 100644 --- a/backend/unittests/access_control/api/test_permissions.py +++ b/backend/unittests/authorization/api/test_permissions.py @@ -3,11 +3,11 @@ from django.http import Http404, HttpRequest from rest_framework.exceptions import ParseError -from application.access_control.api.permissions_base import ( +from application.authorization.api.permissions_base import ( check_object_permission, check_post_permission, ) -from application.access_control.services.roles_permissions import Permissions +from application.authorization.services.roles_permissions import Permissions from application.core.models import Product from unittests.base_test_case import BaseTestCase @@ -32,7 +32,7 @@ def test_check_post_permission_no_foreign_key(self): str(e.exception), ) - @patch("application.access_control.api.permissions_base.get_object_or_404") + @patch("application.authorization.api.permissions_base.get_object_or_404") def test_check_post_permission_foreign_key_not_found(self, mock): mock.side_effect = Http404() request = HttpRequest() @@ -43,8 +43,8 @@ def test_check_post_permission_foreign_key_not_found(self, mock): mock.assert_called_with(Product, pk=1) - @patch("application.access_control.api.permissions_base.get_object_or_404") - @patch("application.access_control.api.permissions_base.user_has_permission") + @patch("application.authorization.api.permissions_base.get_object_or_404") + @patch("application.authorization.api.permissions_base.user_has_permission") def test_check_post_permission_successful(self, permission_mock, get_mock): get_mock.return_value = self.product_1 permission_mock.return_value = True @@ -52,19 +52,13 @@ def test_check_post_permission_successful(self, permission_mock, get_mock): request.method = "POST" request.data = {"product": 1} - self.assertTrue( - check_post_permission( - request, Product, "product", Permissions.Product_Member_Create - ) - ) + self.assertTrue(check_post_permission(request, Product, "product", Permissions.Product_Member_Create)) get_mock.assert_called_with(Product, pk=1) - permission_mock.assert_called_with( - self.product_1, Permissions.Product_Member_Create - ) + permission_mock.assert_called_with(self.product_1, Permissions.Product_Member_Create) # --- check_object_permission --- - @patch("application.access_control.api.permissions_base.user_has_permission") + @patch("application.authorization.api.permissions_base.user_has_permission") def test_check_object_permission_get(self, mock): mock.return_value = True request = HttpRequest() @@ -82,7 +76,7 @@ def test_check_object_permission_get(self, mock): ) mock.assert_called_with(self.product_1, Permissions.Product_View) - @patch("application.access_control.api.permissions_base.user_has_permission") + @patch("application.authorization.api.permissions_base.user_has_permission") def test_check_object_permission_put(self, mock): mock.return_value = True request = HttpRequest() @@ -100,7 +94,7 @@ def test_check_object_permission_put(self, mock): ) mock.assert_called_with(self.product_1, Permissions.Product_Edit) - @patch("application.access_control.api.permissions_base.user_has_permission") + @patch("application.authorization.api.permissions_base.user_has_permission") def test_check_object_permission_patch(self, mock): mock.return_value = True request = HttpRequest() @@ -118,7 +112,7 @@ def test_check_object_permission_patch(self, mock): ) mock.assert_called_with(self.product_1, Permissions.Product_Edit) - @patch("application.access_control.api.permissions_base.user_has_permission") + @patch("application.authorization.api.permissions_base.user_has_permission") def test_check_object_permission_delete(self, mock): mock.return_value = True request = HttpRequest() @@ -136,7 +130,7 @@ def test_check_object_permission_delete(self, mock): ) mock.assert_called_with(self.product_1, Permissions.Product_Delete) - @patch("application.access_control.api.permissions_base.user_has_permission") + @patch("application.authorization.api.permissions_base.user_has_permission") def test_check_object_permission_post(self, mock): mock.return_value = True request = HttpRequest() diff --git a/backend/unittests/authorization/services/__init__.py b/backend/unittests/authorization/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/access_control/services/test_authorization.py b/backend/unittests/authorization/services/test_authorization.py similarity index 70% rename from backend/unittests/access_control/services/test_authorization.py rename to backend/unittests/authorization/services/test_authorization.py index bb54adf7b..8aa08c370 100644 --- a/backend/unittests/access_control/services/test_authorization.py +++ b/backend/unittests/authorization/services/test_authorization.py @@ -8,17 +8,16 @@ Authorization_Group_Member, User, ) -from application.access_control.services.authorization import ( +from application.authorization.services.authorization import ( NoAuthorizationImplementedError, PermissionDoesNotExistError, RoleDoesNotExistError, get_highest_user_role, - get_user_permissions, role_has_permission, user_has_permission, user_has_permission_or_403, ) -from application.access_control.services.roles_permissions import Permissions, Roles +from application.authorization.services.roles_permissions import Permissions, Roles from application.core.models import ( Product, Product_Authorization_Group_Member, @@ -32,24 +31,20 @@ class TestAuthorization(BaseTestCase): # user_has_permission # --------------------------------------------------------------- - @patch("application.access_control.services.authorization.get_current_user") + @patch("application.authorization.services.authorization.get_current_user") def test_user_has_permission_superuser(self, mock): mock.return_value = self.user_admin self.assertTrue(user_has_permission(None, Permissions.Observation_Delete)) # --- Product --- - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_product_no_permissions(self, mock): mock.return_value = None - self.assertFalse( - user_has_permission( - self.product_1, Permissions.Product_Edit, self.user_internal - ) - ) + self.assertFalse(user_has_permission(self.product_1, Permissions.Product_Edit, self.user_internal)) mock.assert_called_with(self.product_1, self.user_internal) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_product_successful(self, mock): mock.return_value = Roles.Maintainer self.assertTrue( @@ -63,17 +58,13 @@ def test_user_has_permission_product_successful(self, mock): # --- Product_Group --- - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_product_group_no_permissions(self, mock): mock.return_value = None - self.assertFalse( - user_has_permission( - self.product_group_1, Permissions.Product_Edit, self.user_internal - ) - ) + self.assertFalse(user_has_permission(self.product_group_1, Permissions.Product_Edit, self.user_internal)) mock.assert_called_with(self.product_group_1, self.user_internal) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_product_group_successful(self, mock): mock.return_value = Roles.Maintainer self.assertTrue( @@ -89,15 +80,13 @@ def test_user_has_permission_product_group_successful(self, mock): def test_user_has_permission_product_member_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.product_member_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.product_member_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Product_Member and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_product_member_correct_permission(self, mock): mock.return_value = None self.assertFalse( @@ -125,10 +114,8 @@ def test_user_has_permission_product_authorization_group_member_wrong_permission str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") - def test_user_has_permission_product_authorization_group_member_correct_permission( - self, mock - ): + @patch("application.authorization.services.authorization.get_highest_user_role") + def test_user_has_permission_product_authorization_group_member_correct_permission(self, mock): mock.return_value = None self.assertFalse( user_has_permission( @@ -143,112 +130,82 @@ def test_user_has_permission_product_authorization_group_member_correct_permissi def test_user_has_permission_rule_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.product_rule_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.product_rule_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Rule and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_rule_correct_permission(self, mock): mock.return_value = None - self.assertFalse( - user_has_permission( - self.product_rule_1, Permissions.Product_Rule_Edit, self.user_internal - ) - ) + self.assertFalse(user_has_permission(self.product_rule_1, Permissions.Product_Rule_Edit, self.user_internal)) mock.assert_called_with(self.product_1, self.user_internal) def test_user_has_permission_rule_general_rule(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.general_rule, Permissions.Product_Rule_View, self.user_internal - ) - self.assertEqual( - "No authorization implemented for General Rules", str(e.exception) - ) + user_has_permission(self.general_rule, Permissions.Product_Rule_View, self.user_internal) + self.assertEqual("No authorization implemented for General Rules", str(e.exception)) # --- Branch --- def test_user_has_permission_branch_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.branch_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.branch_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Branch and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_branch_correct_permission(self, mock): mock.return_value = None - self.assertFalse( - user_has_permission( - self.branch_1, Permissions.Branch_Edit, self.user_internal - ) - ) + self.assertFalse(user_has_permission(self.branch_1, Permissions.Branch_Edit, self.user_internal)) mock.assert_called_with(self.product_1, self.user_internal) # --- Service --- def test_user_has_permission_service_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.service_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.service_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Service and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_service_correct_permission(self, mock): mock.return_value = None - self.assertFalse( - user_has_permission( - self.service_1, Permissions.Service_Delete, self.user_internal - ) - ) + self.assertFalse(user_has_permission(self.service_1, Permissions.Service_Delete, self.user_internal)) mock.assert_called_with(self.product_1, self.user_internal) # --- Observation --- def test_user_has_permission_observation_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.observation_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.observation_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Observation and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_observation_correct_permission(self, mock): mock.return_value = None - self.assertFalse( - user_has_permission( - self.observation_1, Permissions.Observation_Delete, self.user_internal - ) - ) + self.assertFalse(user_has_permission(self.observation_1, Permissions.Observation_Delete, self.user_internal)) mock.assert_called_with(self.product_1, self.user_internal) # --- Observation Log --- def test_user_has_permission_observation_log_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.observation_log_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.observation_log_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Observation_Log and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_observation_log_correct_permission(self, mock): mock.return_value = None self.assertFalse( @@ -264,15 +221,13 @@ def test_user_has_permission_observation_log_correct_permission(self, mock): def test_user_has_permission_api_configuration_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.api_configuration_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.api_configuration_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Api_Configuration and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_api_configuration_correct_permission(self, mock): mock.return_value = None self.assertFalse( @@ -288,15 +243,13 @@ def test_user_has_permission_api_configuration_correct_permission(self, mock): def test_user_has_permission_vex_base_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.openvex_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.openvex_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class OpenVEX and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_vex_base_correct_permission(self, mock): mock.return_value = None self.assertFalse( @@ -308,7 +261,7 @@ def test_user_has_permission_vex_base_correct_permission(self, mock): ) mock.assert_called_with(self.product_1, self.user_internal) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_vex_base_correct_user(self, mock): self.assertTrue( user_has_permission( @@ -323,15 +276,13 @@ def test_user_has_permission_vex_base_correct_user(self, mock): def test_user_has_permission_vulnerability_check_wrong_permission(self): with self.assertRaises(NoAuthorizationImplementedError) as e: - user_has_permission( - self.vulnerability_check_1, Permissions.Product_Edit, self.user_internal - ) + user_has_permission(self.vulnerability_check_1, Permissions.Product_Edit, self.user_internal) self.assertEqual( "No authorization implemented for class Vulnerability_Check and permission 1102", str(e.exception), ) - @patch("application.access_control.services.authorization.get_highest_user_role") + @patch("application.authorization.services.authorization.get_highest_user_role") def test_user_has_permission_vulnerability_check_correct_permission(self, mock): mock.return_value = None self.assertFalse( @@ -347,13 +298,13 @@ def test_user_has_permission_vulnerability_check_correct_permission(self, mock): # user_has_permission_or_403 # --------------------------------------------------------------- - @patch("application.access_control.services.authorization.user_has_permission") + @patch("application.authorization.services.authorization.user_has_permission") def test_user_has_permission_or_403_permission_denied(self, mock): mock.return_value = False with self.assertRaises(PermissionDenied): user_has_permission_or_403(None, Permissions.Product_Edit) - @patch("application.access_control.services.authorization.user_has_permission") + @patch("application.authorization.services.authorization.user_has_permission") def test_user_has_permission_or_403_successful(self, mock): mock.return_value = True user_has_permission_or_403(None, Permissions.Product_Edit) @@ -372,41 +323,16 @@ def test_role_has_permission_wrong_permission(self): role_has_permission(Roles.Reader, 99999) self.assertEqual("Permission 99999 does not exist", str(e.exception)) - @patch( - "application.access_control.services.authorization.get_roles_with_permissions" - ) + @patch("application.authorization.services.authorization.get_roles_with_permissions") def test_role_has_permission_no_permission(self, mock): mock.return_value = {Roles.Reader: {}} - self.assertFalse( - role_has_permission(Roles.Reader, Permissions.Observation_Delete) - ) + self.assertFalse(role_has_permission(Roles.Reader, Permissions.Observation_Delete)) def test_role_has_permission_not_permitted(self): - self.assertFalse( - role_has_permission(Roles.Maintainer, Permissions.Observation_Delete) - ) + self.assertFalse(role_has_permission(Roles.Maintainer, Permissions.Observation_Delete)) def test_role_has_permission_successful(self): - self.assertTrue( - role_has_permission(Roles.Owner, Permissions.Observation_Delete) - ) - - # --------------------------------------------------------------- - # get_user_permission - # --------------------------------------------------------------- - - def test_get_user_permission_internal(self): - permissions = get_user_permissions(self.user_internal) - self.assertEqual( - [Permissions.Product_Create, Permissions.Product_Group_Create], permissions - ) - - @patch("application.access_control.services.authorization.get_current_user") - def test_get_user_permission_external(self, mock): - mock.return_value = self.user_external - - permissions = get_user_permissions() - self.assertEqual([], permissions) + self.assertTrue(role_has_permission(Roles.Owner, Permissions.Observation_Delete)) # --------------------------------------------------------------- # get_highest_user_role @@ -462,12 +388,8 @@ def prepare_authorization_groups(): product_group = Product.objects.get(name="db_product_group") user_internal_write = User.objects.get(username="db_internal_write") - group_internal_write = Authorization_Group.objects.create( - name="db_group_internal_write" - ) - Authorization_Group_Member.objects.filter( - authorization_group=group_internal_write - ).delete() + group_internal_write = Authorization_Group.objects.create(name="db_group_internal_write") + Authorization_Group_Member.objects.filter(authorization_group=group_internal_write).delete() Authorization_Group_Member.objects.create( authorization_group=group_internal_write, user=user_internal_write, @@ -477,12 +399,8 @@ def prepare_authorization_groups(): product=product_internal, authorization_group=group_internal_write, role=5 ) - group_internal_read = Authorization_Group.objects.create( - name="db_group_internal_read" - ) - Authorization_Group_Member.objects.filter( - authorization_group=group_internal_read - ).delete() + group_internal_read = Authorization_Group.objects.create(name="db_group_internal_read") + Authorization_Group_Member.objects.filter(authorization_group=group_internal_read).delete() Authorization_Group_Member.objects.create( authorization_group=group_internal_read, user=User.objects.get(id=3), @@ -493,9 +411,7 @@ def prepare_authorization_groups(): ) group_external = Authorization_Group.objects.create(name="db_group_external") - Authorization_Group_Member.objects.filter( - authorization_group=group_external - ).delete() + Authorization_Group_Member.objects.filter(authorization_group=group_external).delete() Authorization_Group_Member.objects.create( authorization_group=group_external, user=User.objects.get(id=4), @@ -505,12 +421,8 @@ def prepare_authorization_groups(): product=product_external, authorization_group=group_external, role=5 ) - group_product_group = Authorization_Group.objects.create( - name="db_group_product_group" - ) - Authorization_Group_Member.objects.filter( - authorization_group=group_product_group - ).delete() + group_product_group = Authorization_Group.objects.create(name="db_group_product_group") + Authorization_Group_Member.objects.filter(authorization_group=group_product_group).delete() Authorization_Group_Member.objects.create( authorization_group=group_product_group, user=User.objects.get(id=6), diff --git a/backend/unittests/background_tasks/__init__.py b/backend/unittests/background_tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/background_tasks/periodic_tasks/__init__.py b/backend/unittests/background_tasks/periodic_tasks/__init__.py new file mode 100644 index 000000000..db1249ac0 --- /dev/null +++ b/backend/unittests/background_tasks/periodic_tasks/__init__.py @@ -0,0 +1 @@ +# This file is intentionally left empty to mark the directory as a Python package. diff --git a/backend/unittests/background_tasks/periodic_tasks/test_core_tasks.py b/backend/unittests/background_tasks/periodic_tasks/test_core_tasks.py new file mode 100644 index 000000000..011deda3d --- /dev/null +++ b/backend/unittests/background_tasks/periodic_tasks/test_core_tasks.py @@ -0,0 +1,33 @@ +from unittest.mock import patch + +from application.background_tasks.periodic_tasks.core_tasks import ( + task_branch_housekeeping, + task_expire_risk_acceptances, +) +from unittests.base_test_case import BaseTestCase + + +class TestCoreTasks(BaseTestCase): + # --------------------------------------------------------------- + # task_branch_housekeeping + # --------------------------------------------------------------- + + @patch("application.background_tasks.periodic_tasks.core_tasks.delete_inactive_branches_and_set_flags") + def test_task_branch_housekeeping(self, mock_delete_inactive_branches): + # Execute + task_branch_housekeeping() + + # Assert + mock_delete_inactive_branches.assert_called_once() + + # --------------------------------------------------------------- + # task_expire_risk_acceptances + # --------------------------------------------------------------- + + @patch("application.background_tasks.periodic_tasks.core_tasks.expire_risk_acceptances") + def test_task_expire_risk_acceptances(self, mock_expire_risk_acceptances): + # Execute + task_expire_risk_acceptances() + + # Assert + mock_expire_risk_acceptances.assert_called_once() diff --git a/backend/unittests/background_tasks/periodic_tasks/test_epss_tasks.py b/backend/unittests/background_tasks/periodic_tasks/test_epss_tasks.py new file mode 100644 index 000000000..5ab0a3dd9 --- /dev/null +++ b/backend/unittests/background_tasks/periodic_tasks/test_epss_tasks.py @@ -0,0 +1,41 @@ +from unittest.mock import patch + +from application.background_tasks.periodic_tasks.epss_tasks import task_import_epss +from unittests.base_test_case import BaseTestCase + + +class TestEpssTasks(BaseTestCase): + # --------------------------------------------------------------- + # task_import_epss + # --------------------------------------------------------------- + + @patch("application.background_tasks.periodic_tasks.epss_tasks.import_cvss_bt") + @patch("application.background_tasks.periodic_tasks.epss_tasks.epss_apply_observations") + @patch("application.background_tasks.periodic_tasks.epss_tasks.import_epss") + def test_task_import_epss(self, mock_import_epss, mock_epss_apply_observations, mock_import_cvss_bt): + # Execute + task_import_epss() + + # Assert + mock_import_epss.assert_called_once() + mock_epss_apply_observations.assert_called_once() + mock_import_cvss_bt.assert_called_once() + + @patch("application.background_tasks.periodic_tasks.epss_tasks.import_cvss_bt") + @patch("application.background_tasks.periodic_tasks.epss_tasks.epss_apply_observations") + @patch("application.background_tasks.periodic_tasks.epss_tasks.import_epss") + def test_task_import_epss_execution_order( + self, mock_import_epss, mock_epss_apply_observations, mock_import_cvss_bt + ): + # Execute + task_import_epss() + + # Assert - Check execution order + self.assertEqual(mock_import_epss.call_count, 1) + self.assertEqual(mock_epss_apply_observations.call_count, 1) + self.assertEqual(mock_import_cvss_bt.call_count, 1) + + # Verify the order of execution + mock_import_epss.assert_called_once() + mock_epss_apply_observations.assert_called_once() + mock_import_cvss_bt.assert_called_once() diff --git a/backend/unittests/background_tasks/periodic_tasks/test_import_observations_tasks.py b/backend/unittests/background_tasks/periodic_tasks/test_import_observations_tasks.py new file mode 100644 index 000000000..d6539e747 --- /dev/null +++ b/backend/unittests/background_tasks/periodic_tasks/test_import_observations_tasks.py @@ -0,0 +1,218 @@ +from unittest.mock import MagicMock, patch + +from application.background_tasks.periodic_tasks.import_observations_tasks import ( + task_api_import, +) +from application.commons.models import Settings +from unittests.base_test_case import BaseTestCase + + +class TestImportObservationsTasks(BaseTestCase): + # --------------------------------------------------------------- + # task_api_import + # --------------------------------------------------------------- + + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.scan_product") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.api_import_observations") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Product.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Api_Configuration.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Settings.load") + def test_task_api_import_all_enabled( + self, + mock_settings_load, + mock_api_config_filter, + mock_product_filter, + mock_api_import_observations, + mock_scan_product, + ): + # Setup + # Mock settings + settings = Settings() + settings.feature_automatic_api_import = True + settings.feature_automatic_osv_scanning = True + mock_settings_load.return_value = settings + + # Mock API configurations + mock_api_config = MagicMock() + mock_api_config.automatic_import_branch = self.branch_1 + mock_api_config.automatic_import_service = self.service_1 + mock_api_config.automatic_import_docker_image_name_tag = "image:tag" + mock_api_config.automatic_import_endpoint_url = "https://example.com" + mock_api_config.automatic_import_kubernetes_cluster = "cluster1" + mock_api_config_filter.return_value = [mock_api_config] + + # Mock products + mock_product = MagicMock() + mock_product_filter.return_value = [self.product_1] + + # Mock import results + mock_api_import_observations.return_value = (1, 2, 3) # new, updated, resolved + mock_scan_product.return_value = (4, 5, 6) # new, updated, resolved + + # Execute + task_api_import() + + # Assert + # Check settings were loaded 3 times (once for API import, once for OSV and once for deleting old entries) + # self.assertEqual(mock_settings_load.call_count, 3) + + # Check API import was called with correct parameters + mock_api_config_filter.assert_called_once_with(automatic_import_enabled=True) + mock_api_import_observations.assert_called_once() + api_import_params = mock_api_import_observations.call_args[0][0] + self.assertEqual(api_import_params.api_configuration, mock_api_config) + self.assertEqual(api_import_params.branch, mock_api_config.automatic_import_branch) + self.assertEqual(api_import_params.service_name, mock_api_config.automatic_import_service.name) + self.assertEqual( + api_import_params.docker_image_name_tag, mock_api_config.automatic_import_docker_image_name_tag + ) + self.assertEqual(api_import_params.endpoint_url, mock_api_config.automatic_import_endpoint_url) + self.assertEqual(api_import_params.kubernetes_cluster, mock_api_config.automatic_import_kubernetes_cluster) + + # Check OSV scanning was called + mock_product_filter.assert_called_once_with(osv_enabled=True, automatic_osv_scanning_enabled=True) + mock_scan_product.assert_called_once_with(self.product_1) + + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.scan_product") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.api_import_observations") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Product.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Api_Configuration.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Settings.load") + def test_task_api_import_api_disabled( + self, + mock_settings_load, + mock_api_config_filter, + mock_product_filter, + mock_api_import_observations, + mock_scan_product, + ): + # Setup + # Mock settings + settings = Settings() + settings.feature_automatic_api_import = False + settings.feature_automatic_osv_scanning = True + mock_settings_load.return_value = settings + + # Mock products + mock_product = MagicMock() + mock_product_filter.return_value = [mock_product] + + # Mock import results + mock_scan_product.return_value = (4, 5, 6) # new, updated, resolved + + # Execute + task_api_import() + + # Assert + # Check API import was not called + mock_api_config_filter.assert_not_called() + mock_api_import_observations.assert_not_called() + + # Check OSV scanning was called + mock_product_filter.assert_called_once_with(osv_enabled=True, automatic_osv_scanning_enabled=True) + mock_scan_product.assert_called_once_with(mock_product) + + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.scan_product") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.api_import_observations") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Product.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Api_Configuration.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Settings.load") + def test_task_api_import_osv_disabled( + self, + mock_settings_load, + mock_api_config_filter, + mock_product_filter, + mock_api_import_observations, + mock_scan_product, + ): + # Setup + # Mock settings + settings = Settings() + settings.feature_automatic_api_import = True + settings.feature_automatic_osv_scanning = False + mock_settings_load.return_value = settings + + # Mock API configurations + mock_api_config = MagicMock() + mock_api_config_filter.return_value = [mock_api_config] + + # Mock import results + mock_api_import_observations.return_value = (1, 2, 3) # new, updated, resolved + + # Execute + task_api_import() + + # Assert + # Check API import was called + mock_api_config_filter.assert_called_once_with(automatic_import_enabled=True) + mock_api_import_observations.assert_called_once() + + # Check OSV scanning was not called + mock_product_filter.assert_not_called() + mock_scan_product.assert_not_called() + + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.handle_task_exception") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.api_import_observations") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Api_Configuration.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Settings.load") + def test_task_api_import_api_exception_handling( + self, + mock_settings_load, + mock_api_config_filter, + mock_api_import_observations, + mock_handle_task_exception, + ): + # Setup + # Mock settings + settings = Settings() + settings.feature_automatic_api_import = True + settings.feature_automatic_osv_scanning = False + mock_settings_load.return_value = settings + + # Mock API configurations + mock_api_config = MagicMock() + mock_api_config_filter.return_value = [mock_api_config] + + # Mock API import to raise exception + test_exception = Exception("Test API import exception") + mock_api_import_observations.side_effect = test_exception + + # Execute + task_api_import() + + # Assert + # Check exception was handled + mock_handle_task_exception.assert_called_once_with(test_exception, product=mock_api_config.product) + + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.handle_task_exception") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.scan_product") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Product.objects.filter") + @patch("application.background_tasks.periodic_tasks.import_observations_tasks.Settings.load") + def test_task_api_import_osv_exception_handling( + self, + mock_settings_load, + mock_product_filter, + mock_scan_product, + mock_handle_task_exception, + ): + # Setup + # Mock settings + settings = Settings() + settings.feature_automatic_api_import = False + settings.feature_automatic_osv_scanning = True + mock_settings_load.return_value = settings + + # Mock products + mock_product = MagicMock() + mock_product_filter.return_value = [mock_product] + + # Mock scan_product to raise exception + test_exception = Exception("Test OSV scanning exception") + mock_scan_product.side_effect = test_exception + + # Execute + task_api_import() + + # Assert + # Check exception was handled + mock_handle_task_exception.assert_called_once_with(test_exception, product=mock_product) diff --git a/backend/unittests/background_tasks/periodic_tasks/test_license_tasks.py b/backend/unittests/background_tasks/periodic_tasks/test_license_tasks.py new file mode 100644 index 000000000..e721d15ee --- /dev/null +++ b/backend/unittests/background_tasks/periodic_tasks/test_license_tasks.py @@ -0,0 +1,43 @@ +from unittest.mock import patch + +from application.background_tasks.periodic_tasks.license_tasks import ( + task_spdx_license_import, +) +from application.commons.models import Settings +from unittests.base_test_case import BaseTestCase + + +class TestLicenseTasks(BaseTestCase): + # --------------------------------------------------------------- + # task_spdx_license_import + # --------------------------------------------------------------- + + @patch("application.background_tasks.periodic_tasks.license_tasks.import_licenses") + @patch("application.background_tasks.periodic_tasks.license_tasks.Settings.load") + def test_task_spdx_license_import_enabled(self, mock_settings_load, mock_import_licenses): + # Setup + settings = Settings() + settings.feature_license_management = True + mock_settings_load.return_value = settings + + # Execute + task_spdx_license_import() + + # Assert + self.assertEqual(mock_settings_load.call_count, 2) + mock_import_licenses.assert_called_once() + + @patch("application.background_tasks.periodic_tasks.license_tasks.import_licenses") + @patch("application.background_tasks.periodic_tasks.license_tasks.Settings.load") + def test_task_spdx_license_import_disabled(self, mock_settings_load, mock_import_licenses): + # Setup + settings = Settings() + settings.feature_license_management = False + mock_settings_load.return_value = settings + + # Execute + task_spdx_license_import() + + # Assert + self.assertEqual(mock_settings_load.call_count, 2) + mock_import_licenses.assert_not_called() diff --git a/backend/unittests/background_tasks/periodic_tasks/test_metrics_tasks.py b/backend/unittests/background_tasks/periodic_tasks/test_metrics_tasks.py new file mode 100644 index 000000000..b0578fc32 --- /dev/null +++ b/backend/unittests/background_tasks/periodic_tasks/test_metrics_tasks.py @@ -0,0 +1,20 @@ +from unittest.mock import patch + +from application.background_tasks.periodic_tasks.metrics_tasks import ( + task_calculate_product_metrics, +) +from unittests.base_test_case import BaseTestCase + + +class TestMetricsTasks(BaseTestCase): + # --------------------------------------------------------------- + # task_calculate_product_metrics + # --------------------------------------------------------------- + + @patch("application.background_tasks.periodic_tasks.metrics_tasks.calculate_product_metrics") + def test_task_calculate_product_metrics(self, mock_calculate_product_metrics): + # Execute + task_calculate_product_metrics() + + # Assert + mock_calculate_product_metrics.assert_called_once() diff --git a/backend/unittests/background_tasks/services/__init__.py b/backend/unittests/background_tasks/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/background_tasks/services/test_task_base.py b/backend/unittests/background_tasks/services/test_task_base.py new file mode 100644 index 000000000..c2cfee481 --- /dev/null +++ b/backend/unittests/background_tasks/services/test_task_base.py @@ -0,0 +1,246 @@ +from unittest.mock import MagicMock, call, patch + +from application.background_tasks.services.task_base import ( + _delete_older_entries, + _handle_periodic_task_exception, + so_periodic_task, +) +from unittests.base_test_case import BaseTestCase + + +class TestTaskBase(BaseTestCase): + # --------------------------------------------------------------- + # so_periodic_task + # --------------------------------------------------------------- + + @patch("application.background_tasks.services.task_base.lock_task") + @patch("application.background_tasks.services.task_base.logger") + @patch("application.background_tasks.models.Periodic_Task.save") + @patch("application.background_tasks.services.task_base._delete_older_entries") + def test_so_periodic_task_successful_execution( + self, mock_delete_older_entries, mock_save, mock_logger, mock_lock_task + ): + # Setup + mock_lock_task.return_value = lambda func: func + test_function = MagicMock() + test_function.__name__ = "test_function" + + # Execute + decorated_function = so_periodic_task("test_task")(test_function) + decorated_function() + + # Assert + mock_logger.info.assert_has_calls( + [call("--- %s - start ---", "test_task"), call("--- %s - finished ---", "test_task")] + ) + test_function.assert_called_once() + self.assertEqual(mock_save.call_count, 2) + mock_lock_task.assert_called_once_with("test_task") + mock_delete_older_entries.assert_called_once_with("test_task") + + @patch("application.background_tasks.services.task_base._handle_periodic_task_exception") + @patch("application.background_tasks.services.task_base.lock_task") + @patch("application.background_tasks.services.task_base.logger") + @patch("application.background_tasks.models.Periodic_Task.save") + @patch("application.background_tasks.services.task_base._delete_older_entries") + def test_so_periodic_task_exception_handling( + self, mock_delete_older_entries, mock_save, mock_logger, mock_lock_task, mock_handle_exception + ): + # Setup + mock_lock_task.return_value = lambda func: func + test_exception = Exception("Test exception") + test_function = MagicMock(side_effect=test_exception) + test_function.__name__ = "test_function" + + # Execute + decorated_function = so_periodic_task("test_task")(test_function) + decorated_function() + + # Assert + mock_logger.info.assert_called_once_with("--- %s - start ---", "test_task") + test_function.assert_called_once() + mock_handle_exception.assert_called_once_with(test_exception) + self.assertEqual(mock_save.call_count, 2) + mock_delete_older_entries.assert_called_once_with("test_task") + + # Verify that the "finished" log is not called when an exception occurs + self.assertEqual(mock_logger.info.call_count, 1) + + # --------------------------------------------------------------- + # _handle_periodic_task_exception + # --------------------------------------------------------------- + + @patch("application.background_tasks.services.task_base.send_task_exception_notification") + @patch("application.background_tasks.services.task_base.format_log_message") + @patch("application.background_tasks.services.task_base.logger") + @patch("application.background_tasks.services.task_base.sys.exc_info") + @patch("application.background_tasks.services.task_base.inspect.getinnerframes") + def test_handle_periodic_task_exception_with_function_name( + self, mock_getinnerframes, mock_exc_info, mock_logger, mock_format_log_message, mock_send_notification + ): + # Setup + test_exception = Exception("Test exception") + mock_exc_info.return_value = (None, None, "traceback_object") + + # Create a mock frame with a function attribute + mock_frame = MagicMock() + mock_frame.function = "test_function" + mock_getinnerframes.return_value = [MagicMock(), mock_frame] + + mock_format_log_message.return_value = "Formatted log message" + + # Execute + _handle_periodic_task_exception(test_exception) + + # Assert + self.assertEqual(mock_exc_info.call_count, 4) + mock_getinnerframes.assert_called_once_with("traceback_object") + + # Check that format_log_message was called with the correct parameters + mock_format_log_message.assert_called_once_with( + message="Error while executing periodic background task", + data={"function": "test_function"}, + exception=test_exception, + username=None, + ) + + # Check that the error was logged + mock_logger.error.assert_has_calls([call("Formatted log message"), call("NoneType: None\n")]) + + # Check that send_task_exception_notification was called with the correct parameters + mock_send_notification.assert_called_once_with( + function="test_function", arguments=None, user=None, exception=test_exception, product=None + ) + + @patch("application.background_tasks.services.task_base.send_task_exception_notification") + @patch("application.background_tasks.services.task_base.format_log_message") + @patch("application.background_tasks.services.task_base.logger") + @patch("application.background_tasks.services.task_base.sys.exc_info") + def test_handle_periodic_task_exception_without_frames( + self, mock_exc_info, mock_logger, mock_format_log_message, mock_send_notification + ): + # Setup + test_exception = Exception("Test exception") + mock_exc_info.return_value = (None, None, None) + mock_format_log_message.return_value = "Formatted log message" + + # Execute + _handle_periodic_task_exception(test_exception) + + # Assert + self.assertEqual(mock_exc_info.call_count, 3) + + # Check that format_log_message was called with the correct parameters + mock_format_log_message.assert_called_once_with( + message="Error while executing periodic background task", + data={}, + exception=test_exception, + username=None, + ) + + # Check that the error was logged + mock_logger.error.assert_has_calls([call("Formatted log message"), call("NoneType: None\n")]) + + # Check that send_task_exception_notification was called with the correct parameters + mock_send_notification.assert_called_once_with( + function=None, arguments=None, user=None, exception=test_exception, product=None + ) + + @patch("application.background_tasks.services.task_base.send_task_exception_notification") + @patch("application.background_tasks.services.task_base.format_log_message") + @patch("application.background_tasks.services.task_base.logger") + @patch("application.background_tasks.services.task_base.sys.exc_info") + @patch("application.background_tasks.services.task_base.inspect.getinnerframes") + def test_handle_periodic_task_exception_with_insufficient_frames( + self, mock_getinnerframes, mock_exc_info, mock_logger, mock_format_log_message, mock_send_notification + ): + # Setup + test_exception = Exception("Test exception") + mock_exc_info.return_value = (None, None, "traceback_object") + + # Create a mock frame with only one frame (less than required) + mock_getinnerframes.return_value = [MagicMock()] + + mock_format_log_message.return_value = "Formatted log message" + + # Execute + _handle_periodic_task_exception(test_exception) + + # Assert + self.assertEqual(mock_exc_info.call_count, 4) + mock_getinnerframes.assert_called_once_with("traceback_object") + + # Check that format_log_message was called with the correct parameters + mock_format_log_message.assert_called_once_with( + message="Error while executing periodic background task", + data={}, + exception=test_exception, + username=None, + ) + + # Check that the error was logged + mock_logger.error.assert_has_calls([call("Formatted log message"), call("NoneType: None\n")]) + + # Check that send_task_exception_notification was called with the correct parameters + mock_send_notification.assert_called_once_with( + function=None, arguments=None, user=None, exception=test_exception, product=None + ) + + # --------------------------------------------------------------- + # _delete_older_entries + # --------------------------------------------------------------- + + @patch("application.background_tasks.services.task_base.Settings.load") + def test_delete_older_entries(self, mock_settings_load): + # Setup settings + mock_settings = MagicMock() + mock_settings.periodic_task_max_entries = 8 + mock_settings_load.return_value = mock_settings + # Setup - Create 15 periodic tasks with the same task name + from django.utils import timezone + + from application.background_tasks.models import Periodic_Task + from application.background_tasks.types import Status + + task_name = "test_task" + other_task_name = "other_task" + + # Create 15 tasks with different start times for the main task + for i in range(15): + Periodic_Task.objects.create( + task=task_name, + start_time=timezone.now() - timezone.timedelta(minutes=i), + status=Status.STATUS_SUCCESS, + message=f"Task {i}", + ) + + # Create 5 tasks with a different task name (should not be affected) + for i in range(5): + Periodic_Task.objects.create( + task=other_task_name, + start_time=timezone.now() - timezone.timedelta(minutes=i), + status=Status.STATUS_SUCCESS, + message=f"Other task {i}", + ) + + # Execute + _delete_older_entries(task_name) + + # Assert + mock_settings_load.assert_called_once() + + # Should keep only the 8 most recent tasks for task_name + remaining_tasks = Periodic_Task.objects.filter(task=task_name).count() + self.assertEqual(remaining_tasks, 8, "Should keep exactly 8 most recent tasks") + + # Check that the oldest tasks were deleted + oldest_remaining = Periodic_Task.objects.filter(task=task_name).order_by("start_time").first() + self.assertEqual(oldest_remaining.message, "Task 7", "The oldest remaining task should be Task 9") + + # Check that the newest tasks were kept + newest_remaining = Periodic_Task.objects.filter(task=task_name).order_by("-start_time").first() + self.assertEqual(newest_remaining.message, "Task 0", "The newest task should be Task 0") + + # Check that other task entries were not affected + other_tasks_count = Periodic_Task.objects.filter(task=other_task_name).count() + self.assertEqual(other_tasks_count, 5, "Tasks with different names should not be affected") diff --git a/backend/unittests/base_test_case.py b/backend/unittests/base_test_case.py index db4719235..1c52e990f 100644 --- a/backend/unittests/base_test_case.py +++ b/backend/unittests/base_test_case.py @@ -5,7 +5,7 @@ Authorization_Group_Member, User, ) -from application.access_control.services.roles_permissions import Roles +from application.authorization.services.roles_permissions import Roles from application.core.models import ( Branch, Observation, @@ -29,15 +29,9 @@ class BaseTestCase(TestCase): def setUp(self) -> None: self.maxDiff = None - self.user_internal = User( - id=1, username="user_internal@example.com", is_external=False - ) - self.user_external = User( - username="user_external@example.com", is_external=True - ) - self.user_admin = User( - id=2, username="user_admin@example.com", is_superuser=True - ) + self.user_internal = User(id=1, username="user_internal@example.com", is_external=False) + self.user_external = User(username="user_external@example.com", is_external=True) + self.user_admin = User(id=2, username="user_admin@example.com", is_superuser=True) self.parser_1 = Parser(name="parser_1") self.product_group_1 = Product(name="product_group_1") @@ -62,9 +56,7 @@ def setUp(self) -> None: self.service_1 = Service(name="service_1", product=self.product_1) - self.product_member_1 = Product_Member( - product=self.product_1, user=self.user_internal, role=Roles.Writer - ) + self.product_member_1 = Product_Member(product=self.product_1, user=self.user_internal, role=Roles.Writer) self.authorization_group_1 = Authorization_Group(name="authorization_group_1") self.authorization_group_member_1 = Authorization_Group_Member( diff --git a/backend/unittests/commons/api/test_views.py b/backend/unittests/commons/api/test_views.py index ba09e0035..803e75558 100644 --- a/backend/unittests/commons/api/test_views.py +++ b/backend/unittests/commons/api/test_views.py @@ -1,15 +1,16 @@ from unittest.mock import patch -from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST +from rest_framework.status import ( + HTTP_200_OK, + HTTP_204_NO_CONTENT, +) from rest_framework.test import APIClient from unittests.base_test_case import BaseTestCase class TestViews(BaseTestCase): - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") def test_version(self, mock_authentication): mock_authentication.return_value = self.user_internal, None @@ -31,37 +32,3 @@ def test_empty(self): response = api_client.get("/") self.assertEqual(HTTP_204_NO_CONTENT, response.status_code) - - # --- NotificationViewSet --- - - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) - def test_notification_bulk_delete_no_list(self, mock_authentication): - mock_authentication.return_value = self.user_internal, None - - api_client = APIClient() - response = api_client.post("/api/notifications/bulk_delete/") - - self.assertEqual(HTTP_400_BAD_REQUEST, response.status_code) - self.assertEqual( - {"message": "Notifications: This field is required."}, response.data - ) - - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) - @patch("application.commons.api.views.bulk_delete") - def test_notification_bulk_delete_successful( - self, mock_bulk_delete, mock_authentication - ): - mock_authentication.return_value = self.user_internal, None - - data = {"notifications": [1, 2, 3]} - api_client = APIClient() - response = api_client.post( - "/api/notifications/bulk_delete/", data=data, format="json" - ) - - self.assertEqual(HTTP_204_NO_CONTENT, response.status_code) - mock_bulk_delete.assert_called_once_with([1, 2, 3]) diff --git a/backend/unittests/commons/services/test_log_message.py b/backend/unittests/commons/services/test_log_message.py index 326a9ede9..c578f3a16 100644 --- a/backend/unittests/commons/services/test_log_message.py +++ b/backend/unittests/commons/services/test_log_message.py @@ -10,26 +10,22 @@ class TestLogMessage(BaseTestCase): - @patch("application.commons.services.log_message.get_current_user") @patch("application.commons.services.log_message.get_current_request") - def test_format_log_message_empty(self, mock_request, mock_user): + def test_format_log_message_empty(self, mock_request): mock_request.return_value = None - mock_user.return_value = None message = format_log_message() self.assertEqual("{'message': 'No message given'}", message) - @patch("application.commons.services.log_message.get_current_user") @patch("application.commons.services.log_message.get_current_request") - def test_format_log_message_full(self, mock_request, mock_user): + def test_format_log_message_full(self, mock_request): mock_request.return_value = None - mock_user.return_value = None message = format_log_message( message="incoming_message", data={"key_1": "value_1", "key_2": "value_2"}, - user=self.user_internal, + username="user_internal@example.com", response=Response(status=500), exception=Exception("exception_message"), ) @@ -37,42 +33,16 @@ def test_format_log_message_full(self, mock_request, mock_user): log_message = "{'message': 'incoming_message', 'data_key_1': 'value_1', 'data_key_2': 'value_2', 'user': 'user_internal@example.com', 'response_status': '500', 'exception_message': 'exception_message', 'exception_class': 'builtins.Exception'}" self.assertEqual(log_message, message) - @patch("application.commons.services.log_message.get_current_user") @patch("application.commons.services.log_message.get_current_request") - def test_format_log_message_anonymous_user(self, mock_request, mock_user): - mock_request.return_value = None - mock_user.return_value = AnonymousUser() - - message = format_log_message(message="test_message") - - log_message = "{'message': 'test_message'}" - self.assertEqual(log_message, message) - - @patch("application.commons.services.log_message.get_current_user") - @patch("application.commons.services.log_message.get_current_request") - def test_format_log_message_medium_1(self, mock_request, mock_user): + def test_format_log_message_medium(self, mock_request): request = HttpRequest() request.META["HTTP_X_FORWARDED_FOR"] = "addr_0, addr_1" request.method = "POST" mock_request.return_value = request - mock_user.return_value = self.user_external - message = format_log_message(exception=ProtectedError("protected_error", None)) + message = format_log_message( + exception=ProtectedError("protected_error", None), username="user_external@example.com" + ) log_message = "{'message': \"('protected_error', None)\", 'user': 'user_external@example.com', 'request_method': 'POST', 'request_path': '', 'request_client_ip': 'addr_0', 'exception_class': 'django.db.models.deletion.ProtectedError'}" self.assertEqual(log_message, message) - - @patch("application.commons.services.log_message.get_current_user") - @patch("application.commons.services.log_message.get_current_request") - def test_format_log_message_medium_2(self, mock_request, mock_user): - request = HttpRequest() - request.path = "request_path" - request.method = "GET" - request.META["REMOTE_ADDR"] = "addr" - mock_request.return_value = request - mock_user.return_value = self.user_external - - message = format_log_message(exception=ProtectedError("protected_error", None)) - - log_message = "{'message': \"('protected_error', None)\", 'user': 'user_external@example.com', 'request_method': 'GET', 'request_path': 'request_path', 'request_client_ip': 'addr', 'exception_class': 'django.db.models.deletion.ProtectedError'}" - self.assertEqual(log_message, message) diff --git a/backend/unittests/commons/services/test_request_cache.py b/backend/unittests/commons/services/test_request_cache.py new file mode 100644 index 000000000..9ed04ee0b --- /dev/null +++ b/backend/unittests/commons/services/test_request_cache.py @@ -0,0 +1,97 @@ +from unittest import mock + +# Import the module under test +import application.commons.services.request_cache as rc +from unittests.base_test_case import BaseTestCase + + +class DummyRequest: + """A very small stand‑in for Django's HttpRequest object.""" + + pass + + +class RequestCacheMiddlewareTest(BaseTestCase): + def test_process_request_attaches_cache(self): + """Middleware should add a RequestCache instance to the request.""" + request = DummyRequest() + middleware = rc.RequestCacheMiddleware(get_response=lambda r: None) + + # process_request is called by Django during the request cycle + middleware.process_request(request) + + self.assertTrue(hasattr(request, "cache")) + self.assertIsInstance(request.cache, rc.RequestCache) + + +class CacheForRequestDecoratorTest(BaseTestCase): + def setUp(self): + super().setUp() + # Reset any global state between tests + rc.cache_args_kwargs_marker = object() + + def _mock_get_current_request(self, cache_instance): + """Helper to patch get_current_request to return an object with a cache attribute.""" + mock_req = DummyRequest() + mock_req.cache = cache_instance + return mock.patch("application.commons.services.request_cache.get_current_request", return_value=mock_req) + + def test_decorator_caches_result_within_request(self): + """The decorator should cache results only for the current request.""" + counter = {"calls": 0} + + @rc.cache_for_request + def expensive(arg): + counter["calls"] += 1 + return f"result-{arg}" + + # Use a RequestCache instance + cache = rc.RequestCache() + with self._mock_get_current_request(cache): + # First call with arg=1 -> should compute + self.assertEqual(expensive(1), "result-1") + self.assertEqual(counter["calls"], 1) + + # Second call with same arg -> should hit cache + self.assertEqual(expensive(1), "result-1") + self.assertEqual(counter["calls"], 1) + + # Call with different arg -> compute again + self.assertEqual(expensive(2), "result-2") + self.assertEqual(counter["calls"], 2) + + # Ensure the cache stores attributes correctly + # self.assertTrue(hasattr(cache, "_cache_calculate_key(1,)")) + # self.assertTrue(hasattr(cache, "_cache_calculate_key(2,)")) + + def test_decorator_falls_back_when_no_cache(self): + """If no request cache is available, the function should execute normally.""" + counter = {"calls": 0} + + @rc.cache_for_request + def expensive(arg): + counter["calls"] += 1 + return f"result-{arg}" + + # Patch get_current_request to return None + with mock.patch("application.commons.services.request_cache.get_current_request", return_value=None): + self.assertEqual(expensive(1), "result-1") + self.assertEqual(counter["calls"], 1) + self.assertEqual(expensive(1), "result-1") + self.assertEqual(counter["calls"], 2) # no caching + + def test_cache_key_generation_is_consistent(self): + """Cache key calculation should be order‑insensitive for kwargs.""" + key1 = rc._cache_calculate_key(1, 2, foo="bar", baz=3) + key2 = rc._cache_calculate_key(1, 2, baz=3, foo="bar") + key3 = rc._cache_calculate_key(1, 2, foo="bar", baz=4) + self.assertEqual(key1, key2) + self.assertNotEqual(key1, key3) + + def test_cache_key_contains_args_and_kwargs(self): + """The marker object should separate positional args from keyword args.""" + key_both = rc._cache_calculate_key(1, 2, foo="bar") + + # Args and kwargs should be present in the key string + marker_repr = str(rc.cache_args_kwargs_marker) + self.assertIn(marker_repr, key_both) diff --git a/backend/unittests/core/api/test_permissions.py b/backend/unittests/core/api/test_permissions.py index 9361d8857..1a6f1fcab 100644 --- a/backend/unittests/core/api/test_permissions.py +++ b/backend/unittests/core/api/test_permissions.py @@ -3,7 +3,7 @@ from django.http.request import HttpRequest from rest_framework.exceptions import ValidationError -from application.access_control.services.roles_permissions import Roles +from application.authorization.services.roles_permissions import Roles from application.core.api.permissions import UserHasProductMemberPermission from application.core.models import Product_Member from unittests.base_test_case import BaseTestCase @@ -11,68 +11,50 @@ class TestPermissions(BaseTestCase): @patch("application.core.api.permissions.get_highest_user_role") - def test_has_object_permission_delete_owner_no_product_member( - self, mock_get_highest_user_role - ): + def test_has_object_permission_delete_owner_no_product_member(self, mock_get_highest_user_role): mock_get_highest_user_role.return_value = None request = HttpRequest() request.user = self.user_internal request.method = "DELETE" - product_member = Product_Member( - product=self.product_1, user=self.user_external, role=Roles.Owner - ) + product_member = Product_Member(product=self.product_1, user=self.user_external, role=Roles.Owner) user_has_product_permission = UserHasProductMemberPermission() with self.assertRaises(ValidationError) as e: - user_has_product_permission.has_object_permission( - request=request, view=None, obj=product_member - ) + user_has_product_permission.has_object_permission(request=request, view=None, obj=product_member) self.assertEqual( "[ErrorDetail(string='You are not permitted to delete an Owner', code='invalid')]", str(e.exception), ) - mock_get_highest_user_role.assert_called_with( - self.product_1, self.user_internal - ) + mock_get_highest_user_role.assert_called_with(self.product_1, self.user_internal) @patch("application.core.api.permissions.get_highest_user_role") - def test_has_object_permission_delete_owner_not_owner( - self, mock_get_highest_user_role - ): + def test_has_object_permission_delete_owner_not_owner(self, mock_get_highest_user_role): mock_get_highest_user_role.return_value = 4 request = HttpRequest() request.user = self.user_internal request.method = "DELETE" - product_member = Product_Member( - product=self.product_1, user=self.user_external, role=Roles.Owner - ) + product_member = Product_Member(product=self.product_1, user=self.user_external, role=Roles.Owner) user_has_product_permission = UserHasProductMemberPermission() with self.assertRaises(ValidationError) as e: - user_has_product_permission.has_object_permission( - request=request, view=None, obj=product_member - ) + user_has_product_permission.has_object_permission(request=request, view=None, obj=product_member) self.assertEqual( "[ErrorDetail(string='You are not permitted to delete an Owner', code='invalid')]", str(e.exception), ) - mock_get_highest_user_role.assert_called_with( - self.product_1, self.user_internal - ) + mock_get_highest_user_role.assert_called_with(self.product_1, self.user_internal) @patch("application.core.api.permissions.get_highest_user_role") @patch("application.core.api.permissions.check_object_permission") - def test_has_object_permission_delete_owner_success( - self, mock_check, mock_get_highest_user_role - ): + def test_has_object_permission_delete_owner_success(self, mock_check, mock_get_highest_user_role): mock_check.return_value = True mock_get_highest_user_role.return_value = 5 @@ -80,18 +62,12 @@ def test_has_object_permission_delete_owner_success( request.user = self.user_internal request.method = "DELETE" - product_member = Product_Member( - product=self.product_1, user=self.user_external, role=Roles.Owner - ) + product_member = Product_Member(product=self.product_1, user=self.user_external, role=Roles.Owner) user_has_product_permission = UserHasProductMemberPermission() self.assertTrue( - user_has_product_permission.has_object_permission( - request=request, view=None, obj=product_member - ) - ) - mock_get_highest_user_role.assert_called_with( - self.product_1, self.user_internal + user_has_product_permission.has_object_permission(request=request, view=None, obj=product_member) ) + mock_get_highest_user_role.assert_called_with(self.product_1, self.user_internal) mock_check.assert_called_once() diff --git a/backend/unittests/core/api/test_serializers.py b/backend/unittests/core/api/test_serializers.py index df30bf322..8bf7f4291 100644 --- a/backend/unittests/core/api/test_serializers.py +++ b/backend/unittests/core/api/test_serializers.py @@ -3,7 +3,8 @@ from rest_framework.serializers import ValidationError from application.access_control.models import Authorization_Group -from application.access_control.services.roles_permissions import Permissions, Roles +from application.authorization.services.roles_permissions import Permissions, Roles +from application.commons.models import Settings from application.core.api.serializers_product import ( BranchSerializer, ProductAuthorizationGroupMemberSerializer, @@ -16,99 +17,10 @@ class TestBranchSerializer(BaseTestCase): - def test_is_default_branch_true(self): - branch_serializer = BranchSerializer() - self.assertTrue(branch_serializer.get_is_default_branch(obj=self.branch_1)) - - def test_is_default_branch_false(self): - branch_serializer = BranchSerializer() - self.assertFalse(branch_serializer.get_is_default_branch(obj=self.branch_2)) - - @patch("application.core.models.Observation.objects.filter") - def test_get_open_critical_observation_count(self, mock_filter): - mock_filter.return_value.count.return_value = 99 - branch_serializer = BranchSerializer() - self.assertEqual( - 99, - branch_serializer.get_open_critical_observation_count(obj=self.branch_1), - ) - mock_filter.assert_called_with( - branch=self.branch_1, - current_severity=Severity.SEVERITY_CRITICAL, - current_status=Status.STATUS_OPEN, - ) - - @patch("application.core.models.Observation.objects.filter") - def test_get_open_high_observation_count(self, mock_filter): - mock_filter.return_value.count.return_value = 99 - branch_serializer = BranchSerializer() - self.assertEqual( - 99, branch_serializer.get_open_high_observation_count(obj=self.branch_1) - ) - mock_filter.assert_called_with( - branch=self.branch_1, - current_severity=Severity.SEVERITY_HIGH, - current_status=Status.STATUS_OPEN, - ) - - @patch("application.core.models.Observation.objects.filter") - def test_get_open_medium_observation_count(self, mock_filter): - mock_filter.return_value.count.return_value = 99 - branch_serializer = BranchSerializer() - self.assertEqual( - 99, branch_serializer.get_open_medium_observation_count(obj=self.branch_1) - ) - mock_filter.assert_called_with( - branch=self.branch_1, - current_severity=Severity.SEVERITY_MEDIUM, - current_status=Status.STATUS_OPEN, - ) - - @patch("application.core.models.Observation.objects.filter") - def test_get_open_low_observation_count(self, mock_filter): - mock_filter.return_value.count.return_value = 99 - branch_serializer = BranchSerializer() - self.assertEqual( - 99, branch_serializer.get_open_low_observation_count(obj=self.branch_1) - ) - mock_filter.assert_called_with( - branch=self.branch_1, - current_severity=Severity.SEVERITY_LOW, - current_status=Status.STATUS_OPEN, - ) - - @patch("application.core.models.Observation.objects.filter") - def test_get_open_none_observation_count(self, mock_filter): - mock_filter.return_value.count.return_value = 99 - branch_serializer = BranchSerializer() - self.assertEqual( - 99, branch_serializer.get_open_none_observation_count(obj=self.branch_1) - ) - mock_filter.assert_called_with( - branch=self.branch_1, - current_severity=Severity.SEVERITY_NONE, - current_status=Status.STATUS_OPEN, - ) - - @patch("application.core.models.Observation.objects.filter") - def test_get_open_unknown_observation_count(self, mock_filter): - mock_filter.return_value.count.return_value = 99 - branch_serializer = BranchSerializer() - self.assertEqual( - 99, branch_serializer.get_open_unknown_observation_count(obj=self.branch_1) - ) - mock_filter.assert_called_with( - branch=self.branch_1, - current_severity=Severity.SEVERITY_UNKNOWN, - current_status=Status.STATUS_OPEN, - ) - @patch("application.core.api.serializers_product.get_current_user") @patch("application.core.api.serializers_product.get_highest_user_role") @patch("application.core.api.serializers_product.get_permissions_for_role") - def test_get_permissions_user( - self, mock_permissions, mock_highest_user_role, mock_user - ): + def test_get_permissions_user(self, mock_permissions, mock_highest_user_role, mock_user): mock_permissions.return_value = [Permissions.Product_View] mock_highest_user_role.return_value = Roles.Writer mock_user.return_value = self.user_internal @@ -135,12 +47,13 @@ def test_validate_security_gate_active_empty(self, mock_product_member): product_serializer = ProductSerializer(product) data = product_serializer.validate(product_serializer.data) - self.assertEqual(0, data["security_gate_threshold_critical"]) - self.assertEqual(0, data["security_gate_threshold_high"]) - self.assertEqual(0, data["security_gate_threshold_medium"]) - self.assertEqual(0, data["security_gate_threshold_low"]) - self.assertEqual(0, data["security_gate_threshold_none"]) - self.assertEqual(0, data["security_gate_threshold_unknown"]) + settings = Settings.load() + self.assertEqual(settings.security_gate_threshold_critical, data["security_gate_threshold_critical"]) + self.assertEqual(settings.security_gate_threshold_high, data["security_gate_threshold_high"]) + self.assertEqual(settings.security_gate_threshold_medium, data["security_gate_threshold_medium"]) + self.assertEqual(settings.security_gate_threshold_low, data["security_gate_threshold_low"]) + self.assertEqual(settings.security_gate_threshold_none, data["security_gate_threshold_none"]) + self.assertEqual(settings.security_gate_threshold_unknown, data["security_gate_threshold_unknown"]) @patch("application.core.api.serializers_product.get_product_member") def test_validate_security_gate_active_full(self, mock_product_member): @@ -286,9 +199,7 @@ def test_validate_add_owner_not_permitted(self, mock_highest_user_role, mock_use @patch("application.core.api.serializers_product.get_current_user") @patch("application.core.api.serializers_product.get_highest_user_role") - def test_validate_change_owner_not_permitted( - self, mock_highest_user_role, mock_user - ): + def test_validate_change_owner_not_permitted(self, mock_highest_user_role, mock_user): mock_highest_user_role.return_value = Roles.Maintainer mock_user.return_value = self.user_external self.product_member_1.role = Roles.Owner @@ -322,9 +233,7 @@ def test_validate_successful_with_instance(self, mock_highest_user_role, mock_us @patch("application.core.api.serializers_product.get_product_member") @patch("application.core.api.serializers_product.get_current_user") @patch("application.core.api.serializers_product.get_highest_user_role") - def test_validate_successful_no_instance( - self, mock_highest_user_role, mock_user, mock_product_member - ): + def test_validate_successful_no_instance(self, mock_highest_user_role, mock_user, mock_product_member): mock_product_member.return_value = None mock_highest_user_role.return_value = Roles.Maintainer mock_user.return_value = self.user_internal @@ -346,10 +255,8 @@ def test_validate_successful_no_instance( class TestProductAuthorizationGroupMemberSerializer(BaseTestCase): def test_validate_product_change(self): product_2 = Product(name="product_2") - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer( - self.product_authorization_group_member_1 - ) + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer( + self.product_authorization_group_member_1 ) attrs = { "product": product_2, @@ -365,10 +272,8 @@ def test_validate_product_change(self): def test_validate_authorization_group_change(self): authorization_group_1 = Authorization_Group(name="authorization_group_2") - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer( - self.product_authorization_group_member_1 - ) + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer( + self.product_authorization_group_member_1 ) attrs = { "authorization_group": authorization_group_1, @@ -382,16 +287,10 @@ def test_validate_authorization_group_change(self): str(e.exception), ) - @patch( - "application.core.api.serializers_product.get_product_authorization_group_member" - ) + @patch("application.core.api.serializers_product.get_product_authorization_group_member") def test_validate_already_exists(self, mock_product_authorization_group_member): - mock_product_authorization_group_member.return_value = ( - self.product_authorization_group_member_1 - ) - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer() - ) + mock_product_authorization_group_member.return_value = self.product_authorization_group_member_1 + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer() attrs = { "product": self.product_1, "authorization_group": self.authorization_group_1, @@ -404,19 +303,15 @@ def test_validate_already_exists(self, mock_product_authorization_group_member): "[ErrorDetail(string='Product authorization group member product_1 / authorization_group_1 already exists', code='invalid')]", str(e.exception), ) - mock_product_authorization_group_member.assert_called_with( - self.product_1, self.authorization_group_1 - ) + mock_product_authorization_group_member.assert_called_with(self.product_1, self.authorization_group_1) @patch("application.core.api.serializers_product.get_current_user") @patch("application.core.api.serializers_product.get_highest_user_role") def test_validate_add_owner_not_permitted(self, mock_highest_user_role, mock_user): mock_highest_user_role.return_value = Roles.Maintainer mock_user.return_value = self.user_external - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer( - self.product_authorization_group_member_1 - ) + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer( + self.product_authorization_group_member_1 ) attrs = {"role": Roles.Owner} @@ -432,16 +327,12 @@ def test_validate_add_owner_not_permitted(self, mock_highest_user_role, mock_use @patch("application.core.api.serializers_product.get_current_user") @patch("application.core.api.serializers_product.get_highest_user_role") - def test_validate_change_owner_not_permitted( - self, mock_highest_user_role, mock_user - ): + def test_validate_change_owner_not_permitted(self, mock_highest_user_role, mock_user): mock_highest_user_role.return_value = Roles.Maintainer mock_user.return_value = self.user_external self.product_authorization_group_member_1.role = Roles.Owner - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer( - self.product_authorization_group_member_1 - ) + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer( + self.product_authorization_group_member_1 ) attrs = {"role": Roles.Writer} @@ -460,10 +351,8 @@ def test_validate_change_owner_not_permitted( def test_validate_successful_with_instance(self, mock_highest_user_role, mock_user): mock_highest_user_role.return_value = Roles.Maintainer mock_user.return_value = self.user_internal - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer( - self.product_authorization_group_member_1 - ) + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer( + self.product_authorization_group_member_1 ) attrs = {"role": Roles.Writer} @@ -473,9 +362,7 @@ def test_validate_successful_with_instance(self, mock_highest_user_role, mock_us mock_highest_user_role.assert_called_with(self.product_1, self.user_internal) mock_user.assert_called_once() - @patch( - "application.core.api.serializers_product.get_product_authorization_group_member" - ) + @patch("application.core.api.serializers_product.get_product_authorization_group_member") @patch("application.core.api.serializers_product.get_current_user") @patch("application.core.api.serializers_product.get_highest_user_role") def test_validate_successful_no_instance( @@ -484,9 +371,7 @@ def test_validate_successful_no_instance( mock_product_authorization_group_member.return_value = None mock_highest_user_role.return_value = Roles.Maintainer mock_user.return_value = self.user_internal - product_authorization_group_member_serializer = ( - ProductAuthorizationGroupMemberSerializer() - ) + product_authorization_group_member_serializer = ProductAuthorizationGroupMemberSerializer() attrs = { "product": self.product_1, "authorization_group": self.authorization_group_1, @@ -496,8 +381,6 @@ def test_validate_successful_no_instance( new_attrs = product_authorization_group_member_serializer.validate(attrs) self.assertEqual(new_attrs, attrs) - mock_product_authorization_group_member.assert_called_with( - self.product_1, self.authorization_group_1 - ) + mock_product_authorization_group_member.assert_called_with(self.product_1, self.authorization_group_1) mock_highest_user_role.assert_called_with(self.product_1, self.user_internal) mock_user.assert_called_once() diff --git a/backend/unittests/core/migrations/__init__.py b/backend/unittests/core/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/core/migrations/test_0064_product_description_markdown.py b/backend/unittests/core/migrations/test_0064_product_description_markdown.py new file mode 100644 index 000000000..3082ef34b --- /dev/null +++ b/backend/unittests/core/migrations/test_0064_product_description_markdown.py @@ -0,0 +1,34 @@ +from html_to_markdown import convert_to_markdown + +from unittests.base_test_case import BaseTestCase + + +class TestMarkdownConversion(BaseTestCase): + def test_convert_markdown(self): + description = """

Heading 1

Heading 2

  • bold

  • italics

  • underscore

  • strikethrough

  1. line 1

  2. line 2

code

SecObserve

""" + description = convert_to_markdown(description) + self.assertEqual( + """Heading 1 +========= + +Heading 2 +--------- + +* **bold** + +* *italics* + +* underscore + +* ~~strikethrough~~ + +1. line 1 + +2. line 2 + +`code` + +[SecObserve](https://secobserve.github.io/SecObserve/) +""", + description, + ) diff --git a/backend/unittests/core/services/files/duplicates_cdx.json b/backend/unittests/core/services/files/duplicates_cdx.json index 99a846f00..8d6184b64 100644 --- a/backend/unittests/core/services/files/duplicates_cdx.json +++ b/backend/unittests/core/services/files/duplicates_cdx.json @@ -981,10 +981,10 @@ ] }, { - "bom-ref": "pkg:apk/alpine/libuv@1.47.0-r0?arch=x86_64&distro=3.19.2", + "bom-ref": "pkg:apk/alpine/libuv@1.48.0-r0?arch=x86_64&distro=3.19.2", "type": "library", "name": "libuv", - "version": "1.47.0-r0", + "version": "1.48.0-r0", "hashes": [ { "alg": "SHA-1", @@ -998,7 +998,7 @@ } } ], - "purl": "pkg:apk/alpine/libuv@1.47.0-r0?arch=x86_64&distro=3.19.2", + "purl": "pkg:apk/alpine/libuv@1.48.0-r0?arch=x86_64&distro=3.19.2", "properties": [ { "name": "aquasecurity:trivy:LayerDiffID", @@ -1010,7 +1010,7 @@ }, { "name": "aquasecurity:trivy:PkgID", - "value": "libuv@1.47.0-r0" + "value": "libuv@1.48.0-r0" }, { "name": "aquasecurity:trivy:PkgType", @@ -1022,7 +1022,7 @@ }, { "name": "aquasecurity:trivy:SrcVersion", - "value": "1.47.0-r0" + "value": "1.48.0-r0" } ] }, @@ -1577,7 +1577,7 @@ "pkg:apk/alpine/libintl@0.22.3-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/libssl3@3.1.5-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/libstdc%2B%2B@13.2.1_git20231014-r0?arch=x86_64&distro=3.19.2", - "pkg:apk/alpine/libuv@1.47.0-r0?arch=x86_64&distro=3.19.2", + "pkg:apk/alpine/libuv@1.48.0-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/musl-utils@1.2.4_git20230717-r4?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/musl@1.2.4_git20230717-r4?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/nghttp2-libs@1.58.0-r0?arch=x86_64&distro=3.19.2", @@ -1720,7 +1720,7 @@ ] }, { - "ref": "pkg:apk/alpine/libuv@1.47.0-r0?arch=x86_64&distro=3.19.2", + "ref": "pkg:apk/alpine/libuv@1.48.0-r0?arch=x86_64&distro=3.19.2", "dependsOn": [ "pkg:apk/alpine/musl@1.2.4_git20230717-r4?arch=x86_64&distro=3.19.2" ] @@ -1765,7 +1765,7 @@ "pkg:apk/alpine/libgcc@13.2.1_git20231014-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/libssl3@3.1.5-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/libstdc%2B%2B@13.2.1_git20231014-r0?arch=x86_64&distro=3.19.2", - "pkg:apk/alpine/libuv@1.47.0-r0?arch=x86_64&distro=3.19.2", + "pkg:apk/alpine/libuv@1.48.0-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/musl@1.2.4_git20230717-r4?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/nghttp2-libs@1.58.0-r0?arch=x86_64&distro=3.19.2", "pkg:apk/alpine/zlib@1.3.1-r0?arch=x86_64&distro=3.19.2" diff --git a/backend/unittests/core/services/test_branch.py b/backend/unittests/core/services/test_branch.py new file mode 100644 index 000000000..cda16874e --- /dev/null +++ b/backend/unittests/core/services/test_branch.py @@ -0,0 +1,123 @@ +from unittest.mock import patch + +from django.test import TestCase + +from application.core.models import Branch, Product +from application.core.services.branch import set_default_branch + + +class TestSetBranchService(TestCase): + def setUp(self): + self.product = Product.objects.create(name="Test Product", repository_default_branch=None) + self.branch_non_default = Branch.objects.create( + name="non_default", product=self.product, is_default_branch=False + ) + self.branch_default = Branch.objects.create(name="default", product=self.product, is_default_branch=True) + + @patch("application.core.services.branch.Branch.objects") + @patch("application.core.services.branch.Branch.save") + @patch("application.core.models.Product.save") + def test_set_default_branch_when_branch_is_already_default( + self, mock_product_save, mock_branch_save, mock_branch_objects + ): + # Set up the branch as default + self.branch_non_default.is_default_branch = True + + # Create another branch for the same product + other_branch = Branch(name="develop", product=self.product, is_default_branch=True) + + # Mock the filter to return the other branch + mock_branch_objects.filter.return_value.exclude.return_value = [other_branch] + + set_default_branch(self.branch_non_default, False) + + # Verify that the other branch was set to not default + self.assertFalse(other_branch.is_default_branch) + mock_branch_save.assert_called() + + # Verify that product's repository_default_branch was set + self.assertEqual(self.product.repository_default_branch, self.branch_non_default) + mock_product_save.assert_called() + + # Verify the filter has been called with the correct arguments + mock_branch_objects.filter.assert_called_once_with(product=self.product, is_default_branch=True) + mock_branch_objects.filter().exclude.assert_called_once_with(pk=self.branch_non_default.pk) + + @patch("application.core.services.branch.Branch.save") + @patch("application.core.models.Product.save") + def test_set_default_branch_when_branch_is_not_default_and_not_product_default( + self, mock_product_save, mock_branch_save + ): + # Set up product with a default branch + self.product.repository_default_branch = self.branch_default + + # Set up the branch as not default + self.branch_default.is_default_branch = False + + set_default_branch(self.branch_default, False) + + # Verify that product's repository_default_branch was set to None + self.assertIsNone(self.product.repository_default_branch) + mock_branch_save.assert_not_called() + mock_product_save.assert_called() + + @patch("application.core.services.branch.Branch.save") + @patch("application.core.models.Product.save") + def test_set_default_branch_when_branch_is_not_default_and_product_has_no_default( + self, mock_product_save, mock_branch_save + ): + # Product has no default branch set + self.product.repository_default_branch = None + + # Set up the branch as not default + self.branch_default.is_default_branch = False + + set_default_branch(self.branch_default, False) + + # Verify that nothing changed + self.assertIsNone(self.product.repository_default_branch) + mock_branch_save.assert_not_called() + mock_product_save.assert_not_called() + + @patch("application.core.services.branch.Branch.objects") + @patch("application.core.services.branch.Branch.save") + @patch("application.core.models.Product.save") + def test_set_default_branch_with_multiple_other_default_branches( + self, mock_product_save, mock_branch_save, mock_branch_objects + ): + # Set up the branch as default + self.branch_non_default.is_default_branch = True + + # Create multiple other branches for the same product + branch1 = Branch(name="branch1", product=self.product, is_default_branch=True) + branch2 = Branch(name="branch2", product=self.product, is_default_branch=True) + + # Mock the filter to return the other branches + mock_branch_objects.filter.return_value.exclude.return_value = [branch1, branch2] + + set_default_branch(self.branch_non_default, False) + + # Verify that all other branches were set to not default + self.assertFalse(branch1.is_default_branch) + self.assertFalse(branch2.is_default_branch) + self.assertEqual(mock_branch_save.call_count, 2) # 2 other branches + + # Verify that product's repository_default_branch was set + self.assertEqual(self.product.repository_default_branch, self.branch_non_default) + mock_product_save.assert_called() + + # Verify the filter has been called with the correct arguments + mock_branch_objects.filter.assert_called_once_with(product=self.product, is_default_branch=True) + mock_branch_objects.filter().exclude.assert_called_once_with(pk=self.branch_non_default.pk) + + @patch("application.core.services.branch.Branch.objects") + @patch("application.core.services.branch.Branch.save") + @patch("application.core.models.Product.save") + def test_is_default_branch_not_dirty(self, mock_product_save, mock_branch_save, mock_branch_objects): + set_default_branch(self.branch_non_default, False) + + # Verify no mocks have been called + mock_branch_objects.filter.assert_not_called() + mock_branch_objects.filter().exclude.assert_not_called() + mock_product_save.assert_not_called() + mock_branch_save.assert_not_called() diff --git a/backend/unittests/core/services/test_housekeeping.py b/backend/unittests/core/services/test_housekeeping.py index cdfa33415..4b0bb2313 100644 --- a/backend/unittests/core/services/test_housekeeping.py +++ b/backend/unittests/core/services/test_housekeeping.py @@ -29,14 +29,13 @@ def setUp(self) -> None: return super().setUp() - @patch( - "application.core.services.housekeeping.delete_inactive_branches_for_product" - ) - def test_delete_inactive_branches( - self, mock_delete_inactive_branches_for_product: Mock - ): - delete_inactive_branches_and_set_flags() + @patch("application.core.services.housekeeping.delete_inactive_branches_for_product") + def test_delete_inactive_branches(self, mock_delete_inactive_branches_for_product: Mock): + mock_delete_inactive_branches_for_product.return_value = 2 + message = delete_inactive_branches_and_set_flags() + + self.assertEqual(message, "Deleted 4 inactive branches in 2 products.") expected_calls = [ call(Product.objects.get(name="db_product_internal")), call(Product.objects.get(name="db_product_external")), @@ -114,9 +113,7 @@ def test_delete_inactive_branches_for_product_exempt(self, mock_settings_load): self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_not_active( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_not_active(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_keep_inactive_days = 9 mock_settings_load.return_value = settings @@ -134,9 +131,7 @@ def test_delete_inactive_branches_for_product_product_not_active( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_specific_delete( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_specific_delete(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -160,9 +155,7 @@ def test_delete_inactive_branches_for_product_product_specific_delete( pass @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_specific_too_early( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_specific_too_early(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -181,9 +174,7 @@ def test_delete_inactive_branches_for_product_product_specific_too_early( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_specific_exempt( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_specific_exempt(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -191,9 +182,7 @@ def test_delete_inactive_branches_for_product_product_specific_exempt( product = Product.objects.get(name="db_product_internal") product.repository_branch_housekeeping_active = True product.repository_branch_housekeeping_keep_inactive_days = 9 - product.repository_branch_housekeeping_exempt_branches = ( - "db_branch_internal_m.*" - ) + product.repository_branch_housekeeping_exempt_branches = "db_branch_internal_m.*" product.save() delete_inactive_branches_for_product(product) @@ -205,9 +194,7 @@ def test_delete_inactive_branches_for_product_product_specific_exempt( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_specific_protected( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_specific_protected(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -229,9 +216,7 @@ def test_delete_inactive_branches_for_product_product_specific_protected( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_group_not_active( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_group_not_active(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -254,9 +239,7 @@ def test_delete_inactive_branches_for_product_product_group_not_active( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_group_too_early( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_group_too_early(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -280,9 +263,7 @@ def test_delete_inactive_branches_for_product_product_group_too_early( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_group_exempt( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_group_exempt(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings @@ -290,9 +271,7 @@ def test_delete_inactive_branches_for_product_product_group_exempt( product_group = Product.objects.get(name="db_product_group") product_group.repository_branch_housekeeping_active = True product_group.repository_branch_housekeeping_keep_inactive_days = 9 - product_group.repository_branch_housekeeping_exempt_branches = ( - "db_branch_internal_m.*" - ) + product_group.repository_branch_housekeeping_exempt_branches = "db_branch_internal_m.*" product_group.save() product = Product.objects.get(name="db_product_internal") product.product_group = product_group @@ -309,9 +288,7 @@ def test_delete_inactive_branches_for_product_product_group_exempt( self.fail("Branch should not have been deleted") @patch("application.commons.models.Settings.load") - def test_delete_inactive_branches_for_product_product_group_delete( - self, mock_settings_load - ): + def test_delete_inactive_branches_for_product_product_group_delete(self, mock_settings_load): settings = Settings() settings.branch_housekeeping_active = False mock_settings_load.return_value = settings diff --git a/backend/unittests/core/services/test_observation.py b/backend/unittests/core/services/test_observation.py index f9b63a04e..4864b5b11 100644 --- a/backend/unittests/core/services/test_observation.py +++ b/backend/unittests/core/services/test_observation.py @@ -3,6 +3,8 @@ from application.core.models import Observation from application.core.services.observation import ( _get_string_to_hash, + _normalize_update_impact_score_and_fix_available, + get_current_priority, get_current_severity, get_current_status, get_identity_hash, @@ -15,6 +17,8 @@ class TestObservation(BaseTestCase): def setUp(self) -> None: self.addTypeEqualityFunc(Observation, _observation_equal) + self.observation = Observation() + return super().setUp() # --- identity hash --- @@ -73,6 +77,7 @@ def test_get_current_severity_assessment(self): title="assessment_severity", current_severity=Severity.SEVERITY_NONE, parser_severity=Severity.SEVERITY_LOW, + rule_rego_severity=Severity.SEVERITY_LOW, rule_severity=Severity.SEVERITY_LOW, assessment_severity=Severity.SEVERITY_MEDIUM, cvss3_score=9.5, @@ -90,6 +95,17 @@ def test_get_current_severity_rule(self): ) self.assertEqual(Severity.SEVERITY_MEDIUM, get_current_severity(observation)) + def test_get_current_severity_rule_rego(self): + observation = Observation( + title="rule_severity", + current_severity=Severity.SEVERITY_NONE, + parser_severity=Severity.SEVERITY_CRITICAL, + rule_severity=Severity.SEVERITY_MEDIUM, + rule_rego_severity=Severity.SEVERITY_LOW, + cvss3_score=9.5, + ) + self.assertEqual(Severity.SEVERITY_LOW, get_current_severity(observation)) + def test_get_current_severity_parser(self): observation = Observation( title="parser_severity", @@ -199,6 +215,7 @@ def test_get_current_status_assessment(self): current_status=Status.STATUS_RESOLVED, parser_status=Status.STATUS_NOT_AFFECTED, rule_status=Status.STATUS_DUPLICATE, + rule_rego_status=Status.STATUS_DUPLICATE, assessment_status=Status.STATUS_FALSE_POSITIVE, cvss3_score=9.5, ) @@ -214,6 +231,17 @@ def test_get_current_status_rule(self): ) self.assertEqual(Status.STATUS_DUPLICATE, get_current_status(observation)) + def test_get_current_status_rule_rego(self): + observation = Observation( + title="assessment_status", + current_status=Status.STATUS_RESOLVED, + parser_status=Status.STATUS_NOT_AFFECTED, + rule_status=Status.STATUS_FALSE_POSITIVE, + rule_rego_status=Status.STATUS_DUPLICATE, + cvss3_score=9.5, + ) + self.assertEqual(Status.STATUS_DUPLICATE, get_current_status(observation)) + def test_get_current_status_parser(self): observation = Observation( title="parser_status", @@ -222,6 +250,42 @@ def test_get_current_status_parser(self): ) self.assertEqual(Status.STATUS_NOT_AFFECTED, get_current_status(observation)) + # --- get_current_priority --- + + def test_get_current_priority_assessment(self): + observation = Observation( + title="assessment_priority", + current_priority=4, + rule_priority=3, + rule_rego_priority=2, + assessment_priority=1, + ) + self.assertEqual(1, get_current_priority(observation)) + + def test_get_current_priority_rule_rego(self): + observation = Observation( + title="assessment_status", + current_priority=4, + rule_priority=3, + rule_rego_priority=2, + ) + self.assertEqual(2, get_current_priority(observation)) + + def test_get_current_priority_rule(self): + observation = Observation( + title="assessment_status", + current_priority=4, + rule_priority=3, + ) + self.assertEqual(3, get_current_priority(observation)) + + def test_get_current_priority(self): + observation = Observation( + title="assessment_status", + current_priority=4, + ) + self.assertEqual(None, get_current_priority(observation)) + # --- normalize_observation_fields --- def test_normalize_observation_fields_empty(self): @@ -239,11 +303,7 @@ def test_normalize_observation_fields_none(self): before_observation = Observation(title="empty") after_observation = deepcopy(before_observation) for key in dir(after_observation): - if ( - key not in _get_excludes() - and not callable(getattr(after_observation, key)) - and not key.startswith("_") - ): + if key not in _get_excludes() and not callable(getattr(after_observation, key)) and not key.startswith("_"): value = after_observation.__dict__.get(key) if value == "": after_observation.__dict__[key] = None @@ -277,9 +337,7 @@ def test_normalize_observation_fields_selected_fields(self): self.assertEqual(before_observation, after_observation) def test_normalize_observation_fields_origin_component_name_version_1(self): - before_observation = Observation( - title="empty", origin_component_name_version="component_name" - ) + before_observation = Observation(title="empty", origin_component_name_version="component_name") after_observation = deepcopy(before_observation) before_observation.current_severity = Severity.SEVERITY_UNKNOWN @@ -287,6 +345,7 @@ def test_normalize_observation_fields_origin_component_name_version_1(self): before_observation.current_status = Status.STATUS_OPEN before_observation.origin_component_name = "component_name" before_observation.origin_component_version = "" + before_observation.fix_available = False normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) @@ -303,6 +362,7 @@ def test_normalize_observation_fields_origin_component_name_version_2(self): before_observation.current_status = Status.STATUS_OPEN before_observation.origin_component_name = "component_name" before_observation.origin_component_version = "component_version" + before_observation.fix_available = False normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) @@ -318,31 +378,27 @@ def test_normalize_observation_fields_origin_component_name_component_version(se before_observation.current_severity = Severity.SEVERITY_UNKNOWN before_observation.numerical_severity = 6 before_observation.current_status = Status.STATUS_OPEN - before_observation.origin_component_name_version = ( - "component_name:component_version" - ) + before_observation.origin_component_name_version = "component_name:component_version" + before_observation.fix_available = False normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) def test_normalize_observation_fields_origin_component_name(self): - before_observation = Observation( - title="empty", origin_component_name="component_name" - ) + before_observation = Observation(title="empty", origin_component_name="component_name") after_observation = deepcopy(before_observation) before_observation.current_severity = Severity.SEVERITY_UNKNOWN before_observation.numerical_severity = 6 before_observation.current_status = Status.STATUS_OPEN before_observation.origin_component_name_version = "component_name" + before_observation.fix_available = False normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) def test_normalize_observation_fields_origin_docker_image_name_tag_1(self): - before_observation = Observation( - title="empty", origin_docker_image_name_tag="docker_image_name" - ) + before_observation = Observation(title="empty", origin_docker_image_name_tag="docker_image_name") after_observation = deepcopy(before_observation) before_observation.current_severity = Severity.SEVERITY_UNKNOWN @@ -366,9 +422,7 @@ def test_normalize_observation_fields_origin_docker_image_name_tag_2(self): before_observation.current_status = Status.STATUS_OPEN before_observation.origin_docker_image_name = "docker_image_name" before_observation.origin_docker_image_tag = "docker_image_tag" - before_observation.origin_docker_image_name_tag_short = ( - "docker_image_name:docker_image_tag" - ) + before_observation.origin_docker_image_name_tag_short = "docker_image_name:docker_image_tag" normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) @@ -386,20 +440,14 @@ def test_normalize_observation_fields_origin_docker_image_name_docker_image_tag( before_observation.current_severity = Severity.SEVERITY_UNKNOWN before_observation.numerical_severity = 6 before_observation.current_status = Status.STATUS_OPEN - before_observation.origin_docker_image_name_tag = ( - "docker_image_name:docker_image_tag" - ) - before_observation.origin_docker_image_name_tag_short = ( - "docker_image_name:docker_image_tag" - ) + before_observation.origin_docker_image_name_tag = "docker_image_name:docker_image_tag" + before_observation.origin_docker_image_name_tag_short = "docker_image_name:docker_image_tag" normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) def test_normalize_observation_fields_origin_docker_image_name(self): - before_observation = Observation( - title="empty", origin_docker_image_name="docker_image_name" - ) + before_observation = Observation(title="empty", origin_docker_image_name="docker_image_name") after_observation = deepcopy(before_observation) before_observation.current_severity = Severity.SEVERITY_UNKNOWN @@ -411,20 +459,126 @@ def test_normalize_observation_fields_origin_docker_image_name(self): normalize_observation_fields(after_observation) self.assertEqual(before_observation, after_observation) + # --- update_impact_score and fix_available --- + + def test_no_origin_component_name(self): + """Test when origin_component_name is None or empty""" + self.observation.origin_component_name = None + self.observation.recommendation = "Upgrade to version 2.0.0" + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertIsNone(self.observation.fix_available) + self.assertIsNone(self.observation.update_impact_score) + + def test_fix_available_false_no_recommendation(self): + """Test when there's no recommendation""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = None + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertFalse(self.observation.fix_available) + self.assertIsNone(self.observation.update_impact_score) + + def test_fix_available_false_no_component_version(self): + """Test when there's no component version""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade to version 2.0.0" + self.observation.origin_component_version = None + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertIsNone(self.observation.update_impact_score) + + def test_fix_available_true_with_simple_recommendation(self): + """Test when there's a simple recommendation with version""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade to version 2.0.0" + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 100) # 1 major version diff + + def test_fix_available_true_with_complex_recommendation(self): + """Test when there's a complex recommendation with number in component name""" + self.observation.origin_component_name = "test-component1" + self.observation.recommendation = "Upgrade test-component1 to version 2.0.0" + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 100) # 1 major version diff + + def test_fix_available_true_minor_version_diff(self): + """Test when there's a minor version difference""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade to version 1.5.0" + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 50) # 5 minor version diff + + def test_fix_available_true_patch_version_diff(self): + """Test when there's a patch version difference""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade to version 1.0.5" + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 5) # 5 patch version diff + + def test_fix_available_true_no_version_diff(self): + """Test when there's no version difference""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade to version 1.0.0" + self.observation.origin_component_version = "1.0.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 0) # No diff + + def test_fix_available_true_with_complex_version_numbers(self): + """Test with complex version numbers""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade to version 10.5.2" + self.observation.origin_component_version = "2.1.0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 800) # 8 major versions diff + + def test_fix_available_true_with_version_numbers_with_chars(self): + """Test with a version number containing characters""" + self.observation.origin_component_name = "test-component" + self.observation.recommendation = "Upgrade package protobuf to version 32.dev0 or above." + self.observation.origin_component_version = "30..dev0" + + _normalize_update_impact_score_and_fix_available(self.observation) + + self.assertTrue(self.observation.fix_available) + self.assertEqual(self.observation.update_impact_score, 200) # 8 major versions diff + def _observation_equal(expected_observation, actual_observation, msg=None): for key in dir(expected_observation): - if ( - key not in _get_excludes() - and not callable(getattr(expected_observation, key)) - and not key.startswith("_") - ): + if key not in _get_excludes() and not callable(getattr(expected_observation, key)) and not key.startswith("_"): expected_value = expected_observation.__dict__.get(key) actual_value = actual_observation.__dict__.get(key) if expected_value != actual_value: - raise AssertionError( - f"Key {key}: expected: {expected_value}, actual: {actual_value}" - ) + raise AssertionError(f"Key {key}: expected: {expected_value}, actual: {actual_value}") def _get_excludes(): diff --git a/backend/unittests/core/services/test_observation_log.py b/backend/unittests/core/services/test_observation_log.py index e7a37023b..2ff32424a 100644 --- a/backend/unittests/core/services/test_observation_log.py +++ b/backend/unittests/core/services/test_observation_log.py @@ -42,9 +42,7 @@ def test_create_observation_log( ) self.assertEqual(date(2024, 7, 1), observation_log.risk_acceptance_expiry_date) - self.assertEqual( - self.observation_1.last_observation_log, observation_log.created - ) + self.assertEqual(self.observation_1.last_observation_log, observation_log.created) observation_log.save.assert_called_once() self.observation_1.save.assert_called_once() diff --git a/backend/unittests/core/services/test_observations_bulk_actions.py b/backend/unittests/core/services/test_observations_bulk_actions.py index 0770659c2..c115cb985 100644 --- a/backend/unittests/core/services/test_observations_bulk_actions.py +++ b/backend/unittests/core/services/test_observations_bulk_actions.py @@ -10,7 +10,7 @@ observations_bulk_assessment, observations_bulk_delete, ) -from application.core.types import Severity, Status, VexJustification +from application.core.types import Severity, Status, VEX_Justification from unittests.base_test_case import BaseTestCase @@ -31,7 +31,7 @@ def test_observations_bulk_assessment(self, save_mock, check_mock): new_status=Status.STATUS_OPEN, comment="comment", observation_ids=[1, 2], - new_vex_justification=VexJustification.STATUS_COMPONENT_NOT_PRESENT, + new_vex_justification=VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT, new_risk_acceptance_expiry_date=date(2024, 7, 1), ) @@ -42,7 +42,7 @@ def test_observations_bulk_assessment(self, save_mock, check_mock): new_severity=Severity.SEVERITY_CRITICAL, new_status=Status.STATUS_OPEN, comment="comment", - new_vex_justification=VexJustification.STATUS_COMPONENT_NOT_PRESENT, + new_vex_justification=VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT, new_risk_acceptance_expiry_date=date(2024, 7, 1), ), call( @@ -50,7 +50,7 @@ def test_observations_bulk_assessment(self, save_mock, check_mock): new_severity=Severity.SEVERITY_CRITICAL, new_status=Status.STATUS_OPEN, comment="comment", - new_vex_justification=VexJustification.STATUS_COMPONENT_NOT_PRESENT, + new_vex_justification=VEX_Justification.JUSTIFICATION_COMPONENT_NOT_PRESENT, new_risk_acceptance_expiry_date=date(2024, 7, 1), ), ] @@ -58,9 +58,6 @@ def test_observations_bulk_assessment(self, save_mock, check_mock): @patch("application.core.services.observations_bulk_actions._check_observations") @patch("django.db.models.query.QuerySet.delete") - @patch( - "application.core.services.observations_bulk_actions.push_deleted_observation_to_issue_tracker" - ) @patch("application.core.services.observations_bulk_actions.get_current_user") @patch("application.core.services.observations_bulk_actions.check_security_gate") @patch("application.core.models.Product.save") @@ -69,7 +66,6 @@ def test_observations_bulk_delete( product_save_mock, check_security_gate_mock, current_user_mock, - push_issue_tracker_mock, delete_mock, check_mock, ): @@ -87,7 +83,6 @@ def test_observations_bulk_delete( call(self.product_1, "issue_1", self.user_internal), call(self.product_1, "issue_2", self.user_internal), ] - push_issue_tracker_mock.assert_has_calls(calls) check_security_gate_mock.assert_called_once() self.product_1.save.assert_called_once() diff --git a/backend/unittests/core/services/test_potential_duplicates.py b/backend/unittests/core/services/test_potential_duplicates.py index 62fb45d80..a5b698424 100644 --- a/backend/unittests/core/services/test_potential_duplicates.py +++ b/backend/unittests/core/services/test_potential_duplicates.py @@ -27,18 +27,12 @@ def setUp(self): @patch("application.core.services.potential_duplicates.set_potential_duplicate") @patch("application.core.models.Potential_Duplicate.objects.filter") - def test_set_potential_duplicate_both_ways( - self, filter_mock, set_potential_duplicate_mock - ): - potential_duplicate_observation = Observation( - title="observation_2", product=self.product_1 - ) + def test_set_potential_duplicate_both_ways(self, filter_mock, set_potential_duplicate_mock): + potential_duplicate_observation = Observation(title="observation_2", product=self.product_1) potential_duplicate = Potential_Duplicate() potential_duplicate.observation = self.observation_1 - potential_duplicate.potential_duplicate_observation = ( - potential_duplicate_observation - ) + potential_duplicate.potential_duplicate_observation = potential_duplicate_observation filter_mock.return_value = [potential_duplicate] @@ -89,11 +83,12 @@ def test_find_potential_duplicates_components(self): product=Product.objects.get(id=1), branch=None, file=testfile, - service="", + service_name="", docker_image_name_tag="", endpoint_url="", kubernetes_cluster="", suppress_licenses=False, + sbom=False, ) file_upload_observations(file_upload_parameters) @@ -101,9 +96,7 @@ def test_find_potential_duplicates_components(self): self.assertEqual(4, len(observations)) for observation in observations: self.assertTrue(observation.has_potential_duplicates) - for potential_duplicate in Potential_Duplicate.objects.filter( - observation=observation - ): + for potential_duplicate in Potential_Duplicate.objects.filter(observation=observation): self.assertEqual( potential_duplicate.type, Potential_Duplicate.POTENTIAL_DUPLICATE_TYPE_COMPONENT, diff --git a/backend/unittests/core/services/test_product_api_token.py b/backend/unittests/core/services/test_product_api_token.py new file mode 100644 index 000000000..8e76a20da --- /dev/null +++ b/backend/unittests/core/services/test_product_api_token.py @@ -0,0 +1,192 @@ +from datetime import date +from unittest.mock import patch + +from rest_framework.exceptions import ValidationError + +from application.access_control.models import API_Token_Multiple, User +from application.authorization.services.roles_permissions import Roles +from application.core.models import Product_Member +from application.core.services.product_api_token import ( + create_product_api_token, + get_product_api_tokens, + revoke_product_api_token, +) +from unittests.base_test_case import BaseTestCase + + +class TestProductApiToken(BaseTestCase): + @patch("application.core.services.product_api_token.get_user_by_username") + @patch("application.access_control.models.API_Token_Multiple.objects.get") + def test_create_product_api_token_exists(self, api_token_get_mock, user_mock): + user = User() + user_mock.return_value = user + api_token_get_mock.return_value = None + + with self.assertRaises(ValidationError) as e: + create_product_api_token(self.product_1, Roles.Upload, "api_token_name", date.today()) + user_mock.assert_called_with("-product-None-api_token_name-api_token-") + api_token_get_mock.assert_called_with(user=user) + self.assertEqual("API token with this name already exists.", str(e)) + + @patch("application.core.services.product_api_token.get_user_by_username") + @patch("application.access_control.models.API_Token_Multiple.objects.get") + @patch("application.access_control.models.API_Token_Multiple.save") + @patch("application.access_control.models.User.save") + @patch("application.core.models.Product_Member.save") + @patch("application.access_control.models.User.set_unusable_password") + def test_create_product_api_token_with_user( + self, + set_unusable_password_mock, + product_member_save_mock, + user_save_mock, + api_token_save_mock, + api_token_get_mock, + user_mock, + ): + user = User() + user_mock.return_value = user + api_token_get_mock.side_effect = API_Token_Multiple.DoesNotExist() + + api_token = create_product_api_token(self.product_1, Roles.Upload, "api_token_name", date.today()) + + self.assertEqual(42, len(api_token)) + + user_mock.assert_called_with("-product-None-api_token_name-api_token-") + api_token_get_mock.assert_called_with(user=user) + api_token_save_mock.assert_called() + user_save_mock.assert_called() + product_member_save_mock.assert_called() + set_unusable_password_mock.assert_called() + + @patch("application.core.services.product_api_token.get_user_by_username") + @patch("application.access_control.models.API_Token_Multiple.save") + @patch("application.access_control.models.User.save") + @patch("application.core.models.Product_Member.save") + @patch("application.access_control.models.User.set_unusable_password") + def test_create_product_api_token_without_user( + self, + set_unusable_password_mock, + product_member_save_mock, + user_save_mock, + api_token_save_mock, + user_mock, + ): + user_mock.return_value = None + + api_token = create_product_api_token(self.product_1, Roles.Upload, "api_token_name", None) + + self.assertEqual(42, len(api_token)) + + user_mock.assert_called_with("-product-None-api_token_name-api_token-") + api_token_save_mock.assert_called() + user_save_mock.assert_called() + product_member_save_mock.assert_called() + set_unusable_password_mock.assert_called() + + @patch("application.access_control.models.API_Token_Multiple.delete") + @patch("application.access_control.models.User.save") + @patch("application.core.models.Product_Member.delete") + @patch("application.core.services.product_api_token.get_product_member") + def test_revoke_product_no_product_member( + self, + get_product_member_mock, + product_member_delete_mock, + user_save_mock, + api_token_delete_mock, + ): + user = User(username="username", full_name="full_name") + api_token = API_Token_Multiple(user=user, api_token_hash="hash") + get_product_member_mock.return_value = None + + revoke_product_api_token(self.product_1, api_token) + + api_token_delete_mock.assert_called() + get_product_member_mock.assert_called_with(self.product_1, user) + product_member_delete_mock.assert_not_called() + user_save_mock.assert_called() + + @patch("application.access_control.models.API_Token_Multiple.delete") + @patch("application.access_control.models.User.save") + @patch("application.core.models.Product_Member.delete") + @patch("application.core.services.product_api_token.get_product_member") + def test_revoke_product_api_token( + self, + get_product_member_mock, + product_member_delete_mock, + user_save_mock, + api_token_delete_mock, + ): + user = User(username="username", full_name="full_name") + api_token = API_Token_Multiple(user=user, api_token_hash="hash") + get_product_member_mock.return_value = Product_Member() + + revoke_product_api_token(self.product_1, api_token) + + api_token_delete_mock.assert_called() + get_product_member_mock.assert_called_with(self.product_1, user) + product_member_delete_mock.assert_called() + user_save_mock.assert_called() + + @patch("application.access_control.models.User.objects.filter") + def test_get_product_api_tokens_no_user(self, user_mock): + user_mock.return_value = [] + + product_api_tokens = get_product_api_tokens(self.product_1) + + self.assertEqual(0, len(product_api_tokens)) + user_mock.assert_called_with(username__startswith="-product-None-") + + @patch("application.access_control.models.User.objects.filter") + @patch("application.core.services.product_api_token.get_product_member") + def test_get_product_api_tokens_no_product_member(self, product_member_mock, user_mock): + user = User() + user_mock.return_value = [user] + + product_member_mock.return_value = None + + product_api_tokens = get_product_api_tokens(self.product_1) + + self.assertEqual(0, len(product_api_tokens)) + user_mock.assert_called_with(username__startswith="-product-None-") + product_member_mock.assert_called_with(self.product_1, user) + + @patch("application.access_control.models.User.objects.filter") + @patch("application.core.services.product_api_token.get_product_member") + @patch("application.access_control.models.API_Token_Multiple.objects.get") + def test_get_product_api_tokens_no_api_token(self, api_token_mock, product_member_mock, user_mock): + user = User() + user_mock.return_value = [user] + product_member_mock.return_value = Product_Member(role=Roles.Upload) + api_token_mock.side_effect = API_Token_Multiple.DoesNotExist() + + product_api_tokens = get_product_api_tokens(self.product_1) + + self.assertEqual(0, len(product_api_tokens)) + user_mock.assert_called_with(username__startswith="-product-None-") + product_member_mock.assert_called_with(self.product_1, user) + api_token_mock.assert_called_with(user=user) + + @patch("application.access_control.models.User.objects.filter") + @patch("application.core.services.product_api_token.get_product_member") + @patch("application.access_control.models.API_Token_Multiple.objects.get") + def test_get_product_api_tokens_success(self, api_token_mock, product_member_mock, user_mock): + user = User() + user_mock.return_value = [user] + product_member_mock.return_value = Product_Member(role=Roles.Upload) + expiration_date = date(2025, 11, 14) + api_token = API_Token_Multiple( + user=user, name="api_token_name", api_token_hash="hash", expiration_date=expiration_date + ) + api_token_mock.return_value = api_token + + product_api_tokens = get_product_api_tokens(self.product_1) + + self.assertEqual(1, len(product_api_tokens)) + self.assertEqual(self.product_1.pk, product_api_tokens[0].id) + self.assertEqual(Roles.Upload, product_api_tokens[0].role) + self.assertEqual("api_token_name", product_api_tokens[0].name) + self.assertEqual(expiration_date, product_api_tokens[0].expiration_date) + + user_mock.assert_called_with(username__startswith="-product-None-") + product_member_mock.assert_called_with(self.product_1, user) + api_token_mock.assert_called_with(user=user) diff --git a/backend/unittests/core/services/test_risk_acceptance_expiry.py b/backend/unittests/core/services/test_risk_acceptance_expiry.py index a56775ed4..33149059e 100644 --- a/backend/unittests/core/services/test_risk_acceptance_expiry.py +++ b/backend/unittests/core/services/test_risk_acceptance_expiry.py @@ -13,37 +13,23 @@ class TestCalculateRiskAcceptanceExpiryDate(BaseTestCase): def test_product_risk_acceptance_expiry_active_is_none(self): product = Product(risk_acceptance_expiry_active=None) product.product_group = Product(risk_acceptance_expiry_active=None) - self.assertEqual( - date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product) - ) + self.assertEqual(date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product)) def test_product_risk_acceptance_expiry_active_is_false(self): product = Product(risk_acceptance_expiry_active=False) self.assertIsNone(calculate_risk_acceptance_expiry_date(product)) def test_product_risk_acceptance_expiry_active_is_true_and_days_is_none(self): - product = Product( - risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=None - ) - self.assertEqual( - date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product) - ) + product = Product(risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=None) + self.assertEqual(date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product)) def test_product_risk_acceptance_expiry_active_is_true_and_days_is_zero(self): - product = Product( - risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=0 - ) - self.assertEqual( - date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product) - ) + product = Product(risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=0) + self.assertEqual(date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product)) def test_product_risk_acceptance_expiry_active_is_true_and_days_is_positive(self): - product = Product( - risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=1 - ) - self.assertEqual( - date.today() + timedelta(1), calculate_risk_acceptance_expiry_date(product) - ) + product = Product(risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=1) + self.assertEqual(date.today() + timedelta(1), calculate_risk_acceptance_expiry_date(product)) def test_product_group_risk_acceptance_expiry_active_is_false(self): product = Product(risk_acceptance_expiry_active=None) @@ -52,32 +38,20 @@ def test_product_group_risk_acceptance_expiry_active_is_false(self): def test_product_group_risk_acceptance_expiry_active_is_true_and_days_is_none(self): product = Product(risk_acceptance_expiry_active=None) - product.product_group = Product( - risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=None - ) - self.assertEqual( - date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product) - ) + product.product_group = Product(risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=None) + self.assertEqual(date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product)) def test_product_group_risk_acceptance_expiry_active_is_true_and_days_is_zero(self): product = Product(risk_acceptance_expiry_active=None) - product.product_group = Product( - risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=0 - ) - self.assertEqual( - date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product) - ) + product.product_group = Product(risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=0) + self.assertEqual(date.today() + timedelta(30), calculate_risk_acceptance_expiry_date(product)) def test_product_group_risk_acceptance_expiry_active_is_true_and_days_is_positive( self, ): product = Product(risk_acceptance_expiry_active=None) - product.product_group = Product( - risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=2 - ) - self.assertEqual( - date.today() + timedelta(2), calculate_risk_acceptance_expiry_date(product) - ) + product.product_group = Product(risk_acceptance_expiry_active=True, risk_acceptance_expiry_days=2) + self.assertEqual(date.today() + timedelta(2), calculate_risk_acceptance_expiry_date(product)) def test_settings_risk_acceptance_expiry_days_is_zero(self): product = Product(risk_acceptance_expiry_active=None) diff --git a/backend/unittests/core/services/test_security_gate.py b/backend/unittests/core/services/test_security_gate.py index a7e6d7916..b64bea685 100644 --- a/backend/unittests/core/services/test_security_gate.py +++ b/backend/unittests/core/services/test_security_gate.py @@ -2,103 +2,136 @@ from application.commons.models import Settings from application.core.models import Product -from application.core.services.security_gate import check_security_gate +from application.core.services.security_gate import ( + check_security_gate, + check_security_gate_observation, +) from unittests.base_test_case import BaseTestCase class TestSecurityGate(BaseTestCase): @patch("application.core.models.Product.save") - @patch( - "application.core.services.security_gate.send_product_security_gate_notification" - ) + @patch("application.core.services.security_gate.send_product_security_gate_notification") def test_check_security_gate_unchanged(self, notification_mock, save_mock): product = Product(security_gate_passed=None, security_gate_active=False) + check_security_gate(product) + self.assertIsNone(product.security_gate_passed) save_mock.assert_not_called() notification_mock.assert_not_called() @patch("application.core.models.Product.save") - @patch( - "application.core.services.security_gate.send_product_security_gate_notification" - ) + @patch("application.core.services.security_gate.send_product_security_gate_notification") def test_check_security_gate_false_and_changed(self, notification_mock, save_mock): product = Product(security_gate_passed=True, security_gate_active=False) + check_security_gate(product) + self.assertIsNone(product.security_gate_passed) save_mock.assert_called() notification_mock.assert_called_with(product) @patch("application.core.models.Product.save") - @patch( - "application.core.services.security_gate.send_product_security_gate_notification" - ) - def test_check_security_gate_false_and_changed_product_group( - self, notification_mock, save_mock - ): + @patch("application.core.services.security_gate.send_product_security_gate_notification") + def test_check_security_gate_false_and_changed_product_group(self, notification_mock, save_mock): product_group = Product(is_product_group=True, security_gate_active=False) product = Product( product_group=product_group, security_gate_passed=True, security_gate_active=True, ) + check_security_gate(product) + self.assertIsNone(product.security_gate_passed) save_mock.assert_called() notification_mock.assert_called_with(product) - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_critical(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_true_critical(self, get_product_mock): product = Product( + id=999, security_gate_passed=False, security_gate_active=True, security_gate_threshold_critical=1, ) + product.active_critical_observation_count = 2 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) - @patch("application.core.models.Observation.objects.filter") + @patch("application.core.services.security_gate.get_product_by_id") @patch("application.core.models.Product.save") - @patch("application.commons.models.Notification.save") + @patch("application.notifications.models.Notification.save") def test_check_security_gate_true_critical_product_group( - self, notification_save_mock, product_save_mock, filter_mock + self, notification_save_mock, product_save_mock, get_product_mock ): - filter_mock.return_value.count.return_value = 2 product_group = Product( is_product_group=True, security_gate_active=True, security_gate_threshold_critical=1, ) product = Product( + id=999, product_group=product_group, security_gate_passed=True, security_gate_active=False, ) + product.active_critical_observation_count = 2 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) product_save_mock.assert_called() notification_save_mock.assert_called() - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_high(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_true_high(self, get_product_mock): product = Product( + id=999, security_gate_passed=False, security_gate_active=True, security_gate_threshold_critical=3, security_gate_threshold_high=1, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 2 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) - @patch("application.core.models.Observation.objects.filter") + @patch("application.core.services.security_gate.get_product_by_id") @patch("application.core.models.Product.save") - @patch("application.commons.models.Notification.save") + @patch("application.notifications.models.Notification.save") def test_check_security_gate_true_high_product_group( - self, notification_save_mock, product_save_mock, filter_mock + self, notification_save_mock, product_save_mock, get_product_mock ): - filter_mock.return_value.count.return_value = 2 product_group = Product( is_product_group=True, security_gate_active=True, @@ -106,35 +139,57 @@ def test_check_security_gate_true_high_product_group( security_gate_threshold_high=1, ) product = Product( + id=999, product_group=product_group, security_gate_passed=True, security_gate_active=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 2 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) product_save_mock.assert_called() notification_save_mock.assert_called() - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_medium(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_true_medium(self, get_product_mock): product = Product( + id=999, security_gate_passed=False, security_gate_active=True, security_gate_threshold_critical=3, security_gate_threshold_high=3, security_gate_threshold_medium=1, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 2 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) - @patch("application.core.models.Observation.objects.filter") + @patch("application.core.services.security_gate.get_product_by_id") @patch("application.core.models.Product.save") - @patch("application.commons.models.Notification.save") + @patch("application.notifications.models.Notification.save") def test_check_security_gate_true_medium_product_group( - self, notification_save_mock, product_save_mock, filter_mock + self, notification_save_mock, product_save_mock, get_product_mock ): - filter_mock.return_value.count.return_value = 2 product_group = Product( is_product_group=True, security_gate_active=True, @@ -143,19 +198,31 @@ def test_check_security_gate_true_medium_product_group( security_gate_threshold_medium=1, ) product = Product( + id=999, product_group=product_group, security_gate_passed=True, security_gate_active=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 2 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) product_save_mock.assert_called() notification_save_mock.assert_called() - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_low(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_true_low(self, get_product_mock): product = Product( + id=999, security_gate_passed=False, security_gate_active=True, security_gate_threshold_critical=3, @@ -163,17 +230,28 @@ def test_check_security_gate_true_low(self, mock): security_gate_threshold_medium=3, security_gate_threshold_low=1, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 2 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) - @patch("application.core.models.Observation.objects.filter") + @patch("application.core.services.security_gate.get_product_by_id") @patch("application.core.models.Product.save") - @patch("application.commons.models.Notification.save") + @patch("application.notifications.models.Notification.save") def test_check_security_gate_true_low_product_group( - self, notification_save_mock, product_save_mock, filter_mock + self, notification_save_mock, product_save_mock, get_product_mock ): - filter_mock.return_value.count.return_value = 2 product_group = Product( + id=999, is_product_group=True, security_gate_active=True, security_gate_threshold_critical=3, @@ -182,19 +260,31 @@ def test_check_security_gate_true_low_product_group( security_gate_threshold_low=1, ) product = Product( + id=999, product_group=product_group, security_gate_passed=True, security_gate_active=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 2 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) product_save_mock.assert_called() notification_save_mock.assert_called() - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_none(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_true_none(self, get_product_mock): product = Product( + id=999, security_gate_passed=False, security_gate_active=True, security_gate_threshold_critical=3, @@ -203,16 +293,26 @@ def test_check_security_gate_true_none(self, mock): security_gate_threshold_low=3, security_gate_threshold_none=1, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 2 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) - @patch("application.core.models.Observation.objects.filter") + @patch("application.core.services.security_gate.get_product_by_id") @patch("application.core.models.Product.save") - @patch("application.commons.models.Notification.save") + @patch("application.notifications.models.Notification.save") def test_check_security_gate_true_none_product_group( - self, notification_save_mock, product_save_mock, filter_mock + self, notification_save_mock, product_save_mock, get_product_mock ): - filter_mock.return_value.count.return_value = 2 product_group = Product( is_product_group=True, security_gate_active=True, @@ -223,19 +323,31 @@ def test_check_security_gate_true_none_product_group( security_gate_threshold_none=1, ) product = Product( + id=999, product_group=product_group, security_gate_passed=True, security_gate_active=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 2 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) product_save_mock.assert_called() notification_save_mock.assert_called() - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_unknown(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_true_unknown(self, get_product_mock): product = Product( + id=999, security_gate_passed=False, security_gate_active=True, security_gate_threshold_critical=3, @@ -245,16 +357,26 @@ def test_check_security_gate_true_unknown(self, mock): security_gate_threshold_none=3, security_gate_threshold_unknown=1, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 2 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) - @patch("application.core.models.Observation.objects.filter") + @patch("application.core.services.security_gate.get_product_by_id") @patch("application.core.models.Product.save") - @patch("application.commons.models.Notification.save") + @patch("application.notifications.models.Notification.save") def test_check_security_gate_true_unknown_product_group( - self, notification_save_mock, product_save_mock, filter_mock + self, notification_save_mock, product_save_mock, get_product_mock ): - filter_mock.return_value.count.return_value = 2 product_group = Product( is_product_group=True, security_gate_active=True, @@ -266,19 +388,32 @@ def test_check_security_gate_true_unknown_product_group( security_gate_threshold_unknown=1, ) product = Product( + id=999, product_group=product_group, security_gate_passed=True, security_gate_active=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 2 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) product_save_mock.assert_called() notification_save_mock.assert_called() - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_true_no_match(self, mock): - mock.return_value.count.return_value = 2 + @patch("application.core.services.security_gate.get_product_by_id") + @patch("application.core.models.Product.save") + def test_check_security_gate_true_no_match(self, product_save_mock, get_product_mock): product = Product( + id=999, security_gate_passed=True, security_gate_active=True, security_gate_threshold_critical=3, @@ -288,57 +423,102 @@ def test_check_security_gate_true_no_match(self, mock): security_gate_threshold_none=3, security_gate_threshold_unknown=3, ) + product.active_critical_observation_count = 2 + product.active_high_observation_count = 2 + product.active_medium_observation_count = 2 + product.active_low_observation_count = 2 + product.active_none_observation_count = 2 + product.active_unknown_observation_count = 2 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertTrue(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) + product_save_mock.assert_not_called() @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_critical(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_critical(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 1 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=False, ) + product.active_critical_observation_count = 2 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_high(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_high(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 3 settings.security_gate_threshold_high = 1 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 2 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_medium(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_medium(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 3 settings.security_gate_threshold_high = 3 settings.security_gate_threshold_medium = 1 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 2 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_low(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_low(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 3 settings.security_gate_threshold_high = 3 @@ -346,16 +526,27 @@ def test_check_security_gate_none_low(self, mock, mock_settings_load): settings.security_gate_threshold_low = 1 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 2 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_none(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_none(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 3 settings.security_gate_threshold_high = 3 @@ -364,16 +555,27 @@ def test_check_security_gate_none_none(self, mock, mock_settings_load): settings.security_gate_threshold_none = 1 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 2 + product.active_unknown_observation_count = 0 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_unknown(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_unknown(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 3 settings.security_gate_threshold_high = 3 @@ -383,16 +585,27 @@ def test_check_security_gate_none_unknown(self, mock, mock_settings_load): settings.security_gate_threshold_unknown = 1 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=False, ) + product.active_critical_observation_count = 0 + product.active_high_observation_count = 0 + product.active_medium_observation_count = 0 + product.active_low_observation_count = 0 + product.active_none_observation_count = 0 + product.active_unknown_observation_count = 2 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertFalse(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) @patch("application.commons.models.Settings.load") - @patch("application.core.models.Observation.objects.filter") - def test_check_security_gate_none_no_match(self, mock, mock_settings_load): + @patch("application.core.services.security_gate.get_product_by_id") + def test_check_security_gate_none_no_match(self, get_product_mock, mock_settings_load): settings = Settings() settings.security_gate_threshold_critical = 3 settings.security_gate_threshold_high = 3 @@ -402,9 +615,44 @@ def test_check_security_gate_none_no_match(self, mock, mock_settings_load): settings.security_gate_threshold_unknown = 3 mock_settings_load.return_value = settings - mock.return_value.count.return_value = 2 product = Product( + id=999, security_gate_passed=True, ) + product.active_critical_observation_count = 2 + product.active_high_observation_count = 2 + product.active_medium_observation_count = 2 + product.active_low_observation_count = 2 + product.active_none_observation_count = 2 + product.active_unknown_observation_count = 2 + + get_product_mock.return_value = product + check_security_gate(product) + self.assertTrue(product.security_gate_passed) + get_product_mock.assert_called_once_with(product_id=999, is_product_group=False, with_annotations=True) + + @patch("application.core.services.security_gate.check_security_gate") + def test_check_security_gate_observation_same_branch(self, mock): + self.product_1.repository_default_branch = self.branch_1 + self.observation_1.branch = self.branch_1 + + check_security_gate_observation(self.observation_1) + mock.assert_called_with(self.product_1) + + @patch("application.core.services.security_gate.check_security_gate") + def test_check_security_gate_observation_no_branch(self, mock): + self.product_1.repository_default_branch = None + self.observation_1.branch = None + + check_security_gate_observation(self.observation_1) + mock.assert_called_with(self.product_1) + + @patch("application.core.services.security_gate.check_security_gate") + def test_check_security_gate_observation_different_branch(self, mock): + self.product_1.repository_default_branch = self.branch_1 + self.observation_1.branch = self.branch_2 + + check_security_gate_observation(self.observation_1) + mock.assert_not_called() diff --git a/backend/unittests/core/test_models.py b/backend/unittests/core/test_models.py index c3047e00b..da57e3509 100644 --- a/backend/unittests/core/test_models.py +++ b/backend/unittests/core/test_models.py @@ -1,4 +1,5 @@ -from unittest.mock import patch +from django.core.management import call_command +from django.utils import timezone from application.core.models import Observation, Product from application.import_observations.models import Parser @@ -17,17 +18,27 @@ def test_str(self): observation = Observation(title="observation_title", product=product) self.assertEqual("product_name / observation_title", str(observation)) - @patch("application.core.models.normalize_observation_fields") - @patch("application.core.models.get_identity_hash") - @patch("django.db.models.Model.save") - def test_save(self, save_mock, hash_mock, normalize_mock): - hash_mock.return_value = "hash" - - product = Product(name="product_name") - observation = Observation(title="observation_title", product=product) + def test_save(self): + call_command( + "loaddata", + [ + "unittests/fixtures/unittests_fixtures.json", + ], + ) + product = Product.objects.get(pk=1) + + observation = Observation( + title="observation_title", + product=product, + import_last_seen=timezone.now(), + parser=Parser.objects.first(), + origin_component_name="component", + origin_component_version="1.0.0", + ) observation.save() - self.assertEqual("hash", observation.identity_hash) - save_mock.assert_called() - hash_mock.assert_called_with(observation) - normalize_mock.assert_called_with(observation) + # check if pre_save signal is working + self.assertEqual("4d0ea3fe1e7e00756da57c54073dd41e2e140ecf6b139d0780c3dedecd08db75", observation.identity_hash) + self.assertEqual("component:1.0.0", observation.origin_component_name_version) + product.refresh_from_db() + self.assertTrue(1, product.has_component) diff --git a/backend/unittests/core/test_signals.py b/backend/unittests/core/test_signals.py new file mode 100644 index 000000000..0373fcfa5 --- /dev/null +++ b/backend/unittests/core/test_signals.py @@ -0,0 +1,172 @@ +# backend/unittests/core/test_signals.py +import unittest +from unittest.mock import MagicMock, call, patch + +from django.db.models.signals import post_delete, post_save, pre_save +from django.test import TestCase + +from application.access_control.models import User +from application.authorization.services.roles_permissions import Roles +from application.commons.models import Settings +from application.core.models import Branch, Observation, Product, Product_Member +from application.core.services.branch import set_default_branch +from application.core.services.observation import ( + get_identity_hash, + normalize_observation_fields, + set_product_flags, +) +from application.core.services.security_gate import check_security_gate +from application.core.signals import ( + branch_post_save, + observation_pre_save, + product_post_delete, + product_post_save, + settings_post_save, + settings_post_save_task, +) + + +class TestCoreSignals(TestCase): + def setUp(self): + # Create test user + self.user = User.objects.create(username="test_user", email="test@example.com") + + @patch("application.core.signals.normalize_observation_fields") + @patch("application.core.signals.get_identity_hash") + @patch("application.core.signals.set_product_flags") + def test_observation_pre_save( + self, mock_set_product_flags, mock_get_identity_hash, mock_normalize_observation_fields + ): + # Create an observation instance + observation = Observation( + title="Test Observation", + description="Test description", + ) + + # Call the signal handler directly + observation_pre_save(Observation, observation) + + # Verify that the functions were called + mock_normalize_observation_fields.assert_called_once_with(observation) + mock_get_identity_hash.assert_called_once_with(observation) + mock_set_product_flags.assert_called_once_with(observation) + + def test_product_post_delete(self): + # Create a product and a user with a username that starts with the pattern + product = Product.objects.create(name="Test Product") + User.objects.create(username=f"-product-{product.pk}-test_user") + + # Verify user exists + self.assertTrue(User.objects.filter(username=f"-product-{product.pk}-test_user").exists()) + + # Call the signal handler + product_post_delete(Product, product) + + # Verify user was deleted + self.assertFalse(User.objects.filter(username=f"-product-{product.pk}-test_user").exists()) + + @patch("application.core.signals.get_current_user") + @patch("application.core.signals.Product_Member") + def test_product_post_save_created(self, mock_product_member, mock_get_current_user): + # Mock current user + mock_get_current_user.return_value = self.user + + # Create a new product (created=True) + product = Product.objects.create(name="New Product") + + # Verify that Product_Member was created + mock_product_member.assert_called_once_with(product=product, user=self.user, role=Roles.Owner) + mock_product_member.return_value.save.assert_called_once() + + @patch("application.core.signals.check_security_gate") + def test_product_post_save_updated_security_gate_changed(self, mock_check_security_gate): + # Create a product + product = Product.objects.create( + name="Test Product", + security_gate_active=True, + security_gate_threshold_critical=1, + security_gate_threshold_high=2, + security_gate_threshold_medium=3, + security_gate_threshold_low=4, + security_gate_threshold_none=5, + security_gate_threshold_unknown=6, + ) + + # Update the product with changed security gate settings + product.security_gate_active = False + product.save() + + # Verify that check_security_gate was called + mock_check_security_gate.assert_called_once_with(product) + + @patch("application.core.signals.check_security_gate") + def test_product_post_save_updated_product_group(self, mock_check_security_gate): + # Create a product group + product_group = Product.objects.create(name="Test Product Group", is_product_group=True) + + # Create 2 products in the group + Product.objects.create(name="Test Product 1", product_group=product_group) + Product.objects.create(name="Test Product 2", product_group=product_group) + + # Update the product group with changed security gate settings + product_group.security_gate_active = True + product_group.save() + + # Verify that check_security_gate was called for both products + self.assertEqual(mock_check_security_gate.call_count, 2) + + @patch("application.core.signals.set_default_branch") + def test_branch_post_save(self, mock_set_default_branch): + # Create a branch + branch = Branch.objects.create(name="Test Branch", product=Product.objects.create(name="Test Product")) + + # Call the signal handler + branch_post_save(Branch, branch, created=True) + + # Verify that set_default_branch was called + mock_set_default_branch.assert_called_with(branch, True) + + @patch("application.core.signals.settings_post_save_task") + def test_settings_post_save(self, mock_settings_post_save_task): + # Create settings + settings = Settings.objects.create(security_gate_active=False) + + # Update settings with changed security gate settings + settings.security_gate_active = True + settings.save() + + # Verify that settings_post_save_task was called + mock_settings_post_save_task.assert_called_once() + + @patch("application.core.signals.logger") + @patch("application.core.signals.Product") + @patch("application.core.signals.check_security_gate") + def test_settings_post_save_task(self, mock_check_security_gate, mock_product_model, mock_logger): + # Mock Product objects + mock_product = MagicMock() + mock_product.is_product_group = False + mock_product_model.objects.filter.return_value = [mock_product] + + # Call the task + settings_post_save_task() + + # Verify that logger was called + mock_logger.info.assert_has_calls( + [call("--- Settings post_save_task - start ---"), call("--- Settings post_save_task - finished ---")] + ) + + # Verify that check_security_gate was called for each product + mock_check_security_gate.assert_called_once_with(mock_product) + + def test_settings_post_save_unit_tests_mode(self): + # Create settings + settings = Settings.objects.create() + + # Update settings with changed security gate settings + settings.security_gate_active = True + settings.save() + + # In unittests mode, the task should not be triggered + # We can't easily test this without mocking the task, so we just verify + # the logic flow works correctly + self.assertTrue(True) # Placeholder to ensure test runs diff --git a/backend/unittests/epss/services/files/cvss_bt.csv b/backend/unittests/epss/services/files/cvss_bt.csv new file mode 100644 index 000000000..56ee4d891 --- /dev/null +++ b/backend/unittests/epss/services/files/cvss_bt.csv @@ -0,0 +1,14 @@ +cve,cvss-bt_score,cvss-bt_severity,cvss-bt_vector,cvss_version,base_score,base_severity,base_vector,assigner,published_date,epss,cisa_kev,vulncheck_kev,exploitdb,metasploit,nuclei,poc_github +CVE-2015-0001,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,True,True,True,True,True +CVE-2025-0001,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,True,True,True,True,True +CVE-2025-0002,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:N/VA:N/SC:N/SI:N/SA:N/E:P,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,False,False,False,False,False +CVE-2025-0003,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,False,True,False,False,False,False +CVE-2025-0004,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:N/VA:N/SC:N/SI:N/SA:N/E:P,cve@mitre.org,1988-10-01T04:00Z,0.07764,False,False,True,False,False,False +CVE-2025-0005,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,False,False,False,True,False,False +CVE-2025-0006,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,False,False,False,False,True,False +CVE-2025-0007,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,False,False,False,False,False,True +CVE-2025-0008,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:3.1/invalid,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,False,False,False,False,True +CVE-2025-0009,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,CVSS:4.0/invalid,cve@mitre.org,1988-10-01T04:00Z,0.07764,False,True,True,False,False,False +GHSA-2015-0001,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,True,True,True,True,True +CVE-abcd-0001,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,True,True,True,True,True +CVE-abcd,9.0,HIGH,AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC,2.0,10,HIGH,1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H,cve@mitre.org,1988-10-01T04:00Z,0.07764,True,True,True,True,True,True diff --git a/backend/unittests/epss/services/test_cvss_bt.py b/backend/unittests/epss/services/test_cvss_bt.py new file mode 100644 index 000000000..21e7a9c8c --- /dev/null +++ b/backend/unittests/epss/services/test_cvss_bt.py @@ -0,0 +1,220 @@ +from datetime import timezone as datetime_timezone +from os import path +from unittest.mock import patch + +from django.utils import timezone + +from application.commons.models import Settings +from application.core.models import Observation, Product +from application.core.types import Severity +from application.epss.models import Exploit_Information +from application.epss.services.cvss_bt import ( + apply_exploit_information_observations, + import_cvss_bt, +) +from application.import_observations.models import Parser +from application.import_observations.types import Parser_Source, Parser_Type +from unittests.base_test_case import BaseTestCase + + +class TestCVSS_BT(BaseTestCase): + + @patch("requests.get") + @patch("epss.services.cvss_bt.timezone.now") + def test_import_cvss_bt(self, mock_now, mock_requests_get) -> None: + mock_now.return_value = timezone.datetime(2025, 1, 1, 0, 0, 0, 452618, datetime_timezone.utc) + mock_requests_get.return_value = MockResponse() + + parser = Parser(name="Parser", type=Parser_Type.TYPE_OTHER, source=Parser_Source.SOURCE_OTHER) + parser.save() + + product = Product(name="CVSS_BT Test") + product.save() + + observation = Observation( + title="too old", + vulnerability_id="CVE-2015-0001", + product=product, + parser_severity=Severity.SEVERITY_UNKNOWN, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="all", + vulnerability_id="CVE-2025-0001", + parser_severity=Severity.SEVERITY_UNKNOWN, + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="cisa", + vulnerability_id="CVE-2025-0002", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="vulncheck", + vulnerability_id="CVE-2025-0003", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="exploitdb", + vulnerability_id="CVE-2025-0004", + product=product, + cvss4_vector="CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:L/VI:L/VA:L/SC:N/SI:N/SA:N/E:P", + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="metasploit", + vulnerability_id="CVE-2025-0005", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="nuclei", + vulnerability_id="CVE-2025-0006", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="poc_github", + vulnerability_id="CVE-2025-0007", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="invalid cvss 3.1", + vulnerability_id="CVE-2025-0008", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="invalid cvss 4.0", + vulnerability_id="CVE-2025-0009", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + observation = Observation( + title="no change", + vulnerability_id="CVE-2025-0010", + product=product, + import_last_seen=timezone.now(), + parser=parser, + ).save() + + # with feature_exploit_information = True + + import_cvss_bt() + + mock_requests_get.assert_called_with( + "https://raw.githubusercontent.com/t0sche/cvss-bt/refs/heads/main/cvss-bt.csv", timeout=300, stream=True + ) + + self.assertEqual(9, Exploit_Information.objects.count()) + + observation = Observation.objects.get(title="too old") + self.assertEqual("", observation.cve_found_in) + + observation = Observation.objects.get(title="all") + self.assertEqual( + "CISA KEV, Exploit-DB, Metasploit, Nuclei, PoC GitHub, VulnCheck KEV", observation.cve_found_in + ) + self.assertEqual("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector) + self.assertEqual(7.5, observation.cvss3_score) + self.assertEqual(Severity.SEVERITY_HIGH, observation.current_severity) + + observation = Observation.objects.get(title="cisa") + self.assertEqual("CISA KEV", observation.cve_found_in) + self.assertEqual( + "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:N/VA:N/SC:N/SI:N/SA:N/E:P", observation.cvss4_vector + ) + self.assertEqual(Severity.SEVERITY_HIGH, observation.current_severity) + + observation = Observation.objects.get(title="exploitdb") + self.assertEqual("Exploit-DB", observation.cve_found_in) + self.assertEqual( + "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:L/VI:L/VA:L/SC:N/SI:N/SA:N/E:P", observation.cvss4_vector + ) + + observation = Observation.objects.get(title="metasploit") + self.assertEqual("Metasploit", observation.cve_found_in) + + observation = Observation.objects.get(title="nuclei") + self.assertEqual("Nuclei", observation.cve_found_in) + + observation = Observation.objects.get(title="poc_github") + self.assertEqual("PoC GitHub", observation.cve_found_in) + + observation = Observation.objects.get(title="vulncheck") + self.assertEqual("VulnCheck KEV", observation.cve_found_in) + + observation = Observation.objects.get(title="invalid cvss 3.1") + self.assertEqual("CISA KEV, PoC GitHub", observation.cve_found_in) + self.assertEqual("", observation.cvss3_vector) + + observation = Observation.objects.get(title="invalid cvss 4.0") + self.assertEqual("Exploit-DB, VulnCheck KEV", observation.cve_found_in) + self.assertEqual("", observation.cvss4_vector) + + observation = Observation.objects.get(title="no change") + self.assertEqual("", observation.cve_found_in) + + # with feature_exploit_information = False + + settings = Settings.load() + settings.feature_exploit_information = False + + apply_exploit_information_observations(settings) + + observation = Observation.objects.get(title="all") + self.assertEqual("", observation.cve_found_in) + + observation = Observation.objects.get(title="cisa") + self.assertEqual("", observation.cve_found_in) + + observation = Observation.objects.get(title="exploitdb") + self.assertEqual("", observation.cve_found_in) + + observation = Observation.objects.get(title="metasploit") + self.assertEqual("", observation.cve_found_in) + + observation = Observation.objects.get(title="nuclei") + self.assertEqual("", observation.cve_found_in) + + observation = Observation.objects.get(title="poc_github") + self.assertEqual("", observation.cve_found_in) + + +class MockResponse: + def __init__(self): + self.in_file = open(path.dirname(__file__) + "/files/cvss_bt.csv", "rb") + self.raise_for_status_called = False + + def raise_for_status(self): + self.raise_for_status_called = True + + def iter_lines(self): + lines = [] + for line in self.in_file.readlines(): + lines.append(line) + return lines diff --git a/backend/unittests/epss/services/test_epss.py b/backend/unittests/epss/services/test_epss.py index 59041916a..98c5eeab6 100644 --- a/backend/unittests/epss/services/test_epss.py +++ b/backend/unittests/epss/services/test_epss.py @@ -5,7 +5,7 @@ from application.core.models import Observation from application.epss.models import EPSS_Score from application.epss.services.epss import ( - epss_apply_observation, + apply_epss, epss_apply_observations, ) from unittests.base_test_case import BaseTestCase @@ -29,73 +29,47 @@ def test_epss_apply_observations(self, mock_bulk_update, mock_filter): observation_2.vulnerability_id = "CVE-2" observation_2.save() - EPSS_Score.objects.create( - cve="CVE-1", epss_score=0.00383, epss_percentile=0.72606 - ) + EPSS_Score.objects.create(cve="CVE-1", epss_score=0.00383, epss_percentile=0.72606) mock_filter.return_value = Observation.objects.all() epss_apply_observations() mock_filter.assert_called_with(vulnerability_id__startswith="CVE-") - mock_bulk_update.assert_has_calls( - [call([observation_1], ["epss_score", "epss_percentile"])] - ) + mock_bulk_update.assert_has_calls([call([observation_1], ["epss_score", "epss_percentile"])]) @patch("application.epss.models.EPSS_Score.objects.filter") - @patch("application.core.models.Observation.save") - def test_epss_apply_observation_not_cve( - self, mock_observation_save, mock_epss_score - ): - epss_apply_observation(Observation.objects.all()[0]) + def test_apply_epss_not_cve(self, mock_epss_score): + apply_epss(Observation.objects.all()[0]) mock_epss_score.assert_not_called() - mock_observation_save.assert_not_called() @patch("application.epss.models.EPSS_Score.objects.get") - @patch("application.core.models.Observation.save") - def test_epss_apply_observation_no_epss( - self, mock_observation_save, mock_epss_score - ): + def test_apply_epss_no_epss(self, mock_epss_score): mock_epss_score.side_effect = EPSS_Score.DoesNotExist() cve_observation = Observation(vulnerability_id="CVE-2020-1234") - epss_apply_observation(cve_observation) + apply_epss(cve_observation) mock_epss_score.assert_called_with(cve="CVE-2020-1234") - mock_observation_save.assert_not_called() @patch("application.epss.models.EPSS_Score.objects.get") - @patch("application.core.models.Observation.save") - def test_epss_apply_observation_cve_different( - self, mock_observation_save, mock_epss_score_get - ): - mock_epss_score_get.return_value = EPSS_Score( - cve="CVE-2020-1234", epss_score=1, epss_percentile=1 - ) + def test_apply_epss_cve_different(self, mock_epss_score_get): + mock_epss_score_get.return_value = EPSS_Score(cve="CVE-2020-1234", epss_score=1, epss_percentile=1) cve_observation = Observation(vulnerability_id="CVE-2020-1234") - epss_apply_observation(cve_observation) + apply_epss(cve_observation) self.assertEqual(cve_observation.epss_score, 100) self.assertEqual(cve_observation.epss_percentile, 100) mock_epss_score_get.assert_called_with(cve="CVE-2020-1234") - mock_observation_save.assert_called_once() @patch("application.epss.models.EPSS_Score.objects.get") - @patch("application.core.models.Observation.save") - def test_epss_apply_observation_cve_same( - self, mock_observation_save, mock_epss_score_get - ): - mock_epss_score_get.return_value = EPSS_Score( - cve="CVE-2020-1234", epss_score=0.00383, epss_percentile=0.72606 - ) - cve_observation = Observation( - vulnerability_id="CVE-2020-1234", epss_score=0.383, epss_percentile=72.606 - ) - - epss_apply_observation(cve_observation) + def test_apply_epss_cve_same(self, mock_epss_score_get): + mock_epss_score_get.return_value = EPSS_Score(cve="CVE-2020-1234", epss_score=0.00383, epss_percentile=0.72606) + cve_observation = Observation(vulnerability_id="CVE-2020-1234", epss_score=0.383, epss_percentile=72.606) + + apply_epss(cve_observation) self.assertEqual(cve_observation.epss_score, 0.383) self.assertEqual(cve_observation.epss_percentile, 72.606) mock_epss_score_get.assert_called_with(cve="CVE-2020-1234") - mock_observation_save.assert_not_called() diff --git a/backend/unittests/fixtures/import_observations_fixtures.json b/backend/unittests/fixtures/import_observations_fixtures.json index c106c77ea..e886ad720 100644 --- a/backend/unittests/fixtures/import_observations_fixtures.json +++ b/backend/unittests/fixtures/import_observations_fixtures.json @@ -20,7 +20,27 @@ } }, { - "model": "access_control.api_token", + "model": "access_control.user", + "pk": 2, + "fields": { + "password": "", + "last_login": null, + "is_superuser": true, + "username": "admin", + "first_name": "", + "last_name": "", + "email": "", + "is_staff": false, + "is_active": true, + "date_joined": "2022-12-12T18:48:08.514Z", + "full_name": "Administrator", + "is_external": true, + "groups": [], + "user_permissions": [] + } + }, + { + "model": "access_control.api_token_multiple", "pk": 1, "fields": { "api_token_hash": "argon2$argon2id$v=19$m=102400,t=2,p=8$bUc4bk13R2RLSElVMlVoRENLeGoyaA$NMzcg5d9N6jufieKF+nADLa4AdLGdMb5lFVPN8zKPm0", @@ -54,7 +74,8 @@ "pk": 1, "fields": { "product": 1, - "name": "db_branch_import" + "name": "db_branch_import", + "is_default_branch": true } }, { @@ -88,6 +109,17 @@ "class_name": "CycloneDXParser" } }, + { + "model": "import_observations.parser", + "pk": 3, + "fields": { + "name": "Dependency Track", + "type": "SCA", + "source": "API", + "module_name": "dependency_track", + "class_name": "DependencyTrack" + } + }, { "model": "rules.rule", "pk": 1, diff --git a/backend/application/licenses/fixtures/initial_data.json b/backend/unittests/fixtures/initial_license_data.json similarity index 100% rename from backend/application/licenses/fixtures/initial_data.json rename to backend/unittests/fixtures/initial_license_data.json diff --git a/backend/unittests/fixtures/unittests_fixtures.json b/backend/unittests/fixtures/unittests_fixtures.json index 2c8dafd4f..908d97c63 100644 --- a/backend/unittests/fixtures/unittests_fixtures.json +++ b/backend/unittests/fixtures/unittests_fixtures.json @@ -1,6 +1,6 @@ [ { - "model": "access_control.api_token", + "model": "access_control.api_token_multiple", "pk": 1, "fields": { "api_token_hash": "argon2$argon2id$v=19$m=102400,t=2,p=8$bUc4bk13R2RLSElVMlVoRENLeGoyaA$NMzcg5d9N6jufieKF+nADLa4AdLGdMb5lFVPN8zKPm0", @@ -216,6 +216,7 @@ "name": "db_product_external", "description": "", "repository_prefix": "", + "repository_default_branch": 3, "security_gate_passed": null, "security_gate_active": false, "security_gate_threshold_critical": null, @@ -236,7 +237,6 @@ "name": "db_product_group", "description": "", "repository_prefix": "", - "repository_default_branch": 1, "security_gate_passed": true, "security_gate_active": null, "security_gate_threshold_critical": null, @@ -258,7 +258,6 @@ "name": "db_product_group_admin_only", "description": "", "repository_prefix": "", - "repository_default_branch": 1, "security_gate_passed": true, "security_gate_active": null, "security_gate_threshold_critical": null, @@ -278,7 +277,8 @@ "pk": 1, "fields": { "product": 1, - "name": "db_branch_internal_dev" + "name": "db_branch_internal_dev", + "is_default_branch": true } }, { @@ -294,7 +294,8 @@ "pk": 3, "fields": { "product": 2, - "name": "db_branch_external" + "name": "db_branch_external", + "is_default_branch": true } }, { @@ -455,7 +456,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c", + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be", "general_rule": null, "product_rule": 1 } @@ -511,7 +512,7 @@ "created": "2022-12-15T16:10:35.521Z", "modified": "2022-12-16T16:13:18.283Z", "last_observation_log": "2022-12-16T16:13:18.283Z", - "identity_hash": "bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007", + "identity_hash": "da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9", "general_rule": null, "product_rule": 2 } @@ -733,7 +734,7 @@ } }, { - "model": "commons.notification", + "model": "notifications.notification", "pk": 1, "fields": { "type": "Exception", @@ -746,7 +747,7 @@ } }, { - "model": "commons.notification", + "model": "notifications.notification", "pk": 2, "fields": { "type": "Exception", @@ -759,7 +760,7 @@ } }, { - "model": "commons.notification", + "model": "notifications.notification", "pk": 3, "fields": { "type": "Security gate", @@ -771,7 +772,7 @@ } }, { - "model": "commons.notification", + "model": "notifications.notification", "pk": 4, "fields": { "type": "Security gate", @@ -783,7 +784,7 @@ } }, { - "model": "commons.notification", + "model": "notifications.notification", "pk": 5, "fields": { "type": "Task", @@ -798,7 +799,7 @@ } }, { - "model": "commons.notification", + "model": "notifications.notification", "pk": 6, "fields": { "type": "Task", @@ -818,20 +819,21 @@ "fields": { "product": 1, "date": "2022-07-09", - "open_critical": 1, - "open_high": 2, - "open_medium": 3, - "open_low": 4, - "open_none": 5, - "open_unknown": 6, + "active_critical": 1, + "active_high": 2, + "active_medium": 3, + "active_low": 4, + "active_none": 5, + "active_unknown": 6, "open": 7, - "resolved": 8, - "duplicate": 9, - "false_positive": 10, - "in_review": 11, - "not_affected": 12, - "not_security": 13, - "risk_accepted": 14 + "affected": 8, + "resolved": 9, + "duplicate": 10, + "false_positive": 11, + "in_review": 12, + "not_affected": 13, + "not_security": 14, + "risk_accepted": 15 } }, { @@ -840,20 +842,21 @@ "fields": { "product": 1, "date": "2022-07-10", - "open_critical": 2, - "open_high": 3, - "open_medium": 4, - "open_low": 5, - "open_none": 6, - "open_unknown": 7, + "active_critical": 2, + "active_high": 3, + "active_medium": 4, + "active_low": 5, + "active_none": 6, + "active_unknown": 7, "open": 8, - "resolved": 9, - "duplicate": 10, - "false_positive": 11, - "in_review": 12, - "not_affected": 13, - "not_security": 14, - "risk_accepted": 15 + "affected": 9, + "resolved": 10, + "duplicate": 11, + "false_positive": 12, + "in_review": 13, + "not_affected": 14, + "not_security": 15, + "risk_accepted": 16 } }, { @@ -862,20 +865,21 @@ "fields": { "product": 2, "date": "2022-07-09", - "open_critical": 4, - "open_high": 5, - "open_medium": 6, - "open_low": 7, - "open_none": 8, - "open_unknown": 9, + "active_critical": 4, + "active_high": 5, + "active_medium": 6, + "active_low": 7, + "active_none": 8, + "active_unknown": 9, "open": 10, - "resolved": 11, - "duplicate": 12, - "false_positive": 13, - "in_review": 14, - "not_affected": 15, - "not_security": 16, - "risk_accepted": 17 + "affected": 11, + "resolved": 12, + "duplicate": 13, + "false_positive": 14, + "in_review": 15, + "not_affected": 16, + "not_security": 17, + "risk_accepted": 18 } }, { @@ -884,20 +888,21 @@ "fields": { "product": 2, "date": "2022-07-10", - "open_critical": 5, - "open_high": 6, - "open_medium": 7, - "open_low": 8, - "open_none": 9, - "open_unknown": 10, + "active_critical": 5, + "active_high": 6, + "active_medium": 7, + "active_low": 8, + "active_none": 9, + "active_unknown": 10, "open": 11, - "resolved": 12, - "duplicate": 13, - "false_positive": 14, - "in_review": 15, - "not_affected": 16, - "not_security": 17, - "risk_accepted": 18 + "affected": 12, + "resolved": 13, + "duplicate": 14, + "false_positive": 15, + "in_review": 16, + "not_affected": 17, + "not_security": 18, + "risk_accepted": 19 } }, { @@ -1038,6 +1043,70 @@ "name": "CVE_vulnerability_1" } }, + { + "model": "vex.cyclonedx", + "pk": 1, + "fields": { + "user": 4, + "product": 1, + "document_id_prefix": "cyclonedx_prefix", + "document_base_id": "2024_0001", + "version": 1, + "content_hash": "abcdef123456", + "author": "author", + "manufacturer": "manufacturer", + "first_issued": "2022-12-15T16:10:35.513Z", + "last_updated": "2022-12-16T16:13:18.282Z" + } + }, + { + "model": "vex.cyclonedx", + "pk": 2, + "fields": { + "user": 4, + "product": null, + "document_id_prefix": "cyclonedx_prefix", + "document_base_id": "2024_0002", + "version": 1, + "content_hash": "abcdef123456", + "author": "author", + "manufacturer": "manufacturer", + "first_issued": "2022-12-15T16:10:35.513Z", + "last_updated": "2022-12-16T16:13:18.282Z" + } + }, + { + "model": "vex.cyclonedx_branch", + "pk": 1, + "fields": { + "cyclonedx": 1, + "branch": 1 + } + }, + { + "model": "vex.cyclonedx_branch", + "pk": 2, + "fields": { + "cyclonedx": 2, + "branch": 2 + } + }, + { + "model": "vex.cyclonedx_vulnerability", + "pk": 1, + "fields": { + "cyclonedx": 2, + "name": "CVE_vulnerability" + } + }, + { + "model": "vex.cyclonedx_vulnerability", + "pk": 2, + "fields": { + "cyclonedx": 1, + "name": "CVE_vulnerability_1" + } + }, { "model": "vex.vex_counter", "pk": 1, @@ -1047,5 +1116,27 @@ "counter": 2 } + }, + { + "model": "background_tasks.periodic_task", + "pk": 1, + "fields": { + "task": "Calculate product metrics", + "start_time": "2022-12-15T16:10:35.513Z", + "duration": 1234, + "status": "Success", + "message": "Task completed successfully" + } + }, + { + "model": "background_tasks.periodic_task", + "pk": 2, + "fields": { + "task": "Calculate product metrics", + "start_time": "2022-12-16T16:10:35.513Z", + "duration": 5678, + "status": "Failure", + "message": "Exception has occurred" + } } ] diff --git a/backend/unittests/fixtures/unittests_license_fixtures.json b/backend/unittests/fixtures/unittests_license_fixtures.json index 8942fdcbb..87f83d822 100644 --- a/backend/unittests/fixtures/unittests_license_fixtures.json +++ b/backend/unittests/fixtures/unittests_license_fixtures.json @@ -1,9 +1,40 @@ [ + { + "model": "licenses.concluded_license", + "pk": 1, + "fields": { + "product": 1, + "component_purl_type": "npm", + "component_name": "internal_component", + "component_version": "1.0.0", + "manual_concluded_spdx_license": null, + "manual_concluded_license_expression": "expression", + "manual_concluded_non_spdx_license": "", + "user": 1, + "last_updated": "2022-12-15T16:10:35.513Z" + } + }, + { + "model": "licenses.concluded_license", + "pk": 2, + "fields": { + "product": 2, + "component_purl_type": "pypi", + "component_name": "external_component", + "component_version": "2.0.0", + "manual_concluded_spdx_license": null, + "manual_concluded_license_expression": "", + "manual_concluded_non_spdx_license": "non spdx", + "user": 1, + "last_updated": "2022-12-15T16:10:35.513Z" + + } + }, { "model": "licenses.license_component", "pk": 1, "fields": { - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c", + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be", "product": 1, "upload_filename": "", "component_name": "internal_component", @@ -13,8 +44,8 @@ "component_purl_type": "", "component_cpe": "", "component_dependencies": "", - "license_name": "internal license", - "non_spdx_license": "internal license", + "imported_declared_license_name": "internal license", + "imported_declared_non_spdx_license": "internal license", "evaluation_result": "Allowed", "numerical_evaluation_result": 1, "created": "2022-12-15T16:10:35.513Z", @@ -26,7 +57,7 @@ "model": "licenses.license_component", "pk": 2, "fields": { - "identity_hash": "bc8e59b7687fe3533616b3914c636389c131eac3bdbda1b67d8d26f890a74007", + "identity_hash": "da3a81cebbfa79d18f0c0ba0046edacb2428d23a93f4b561ddd54b0478d16cb9", "product": 2, "upload_filename": "", "component_name": "external_component", @@ -36,8 +67,8 @@ "component_purl_type": "", "component_cpe": "", "component_dependencies": "", - "license_name": "external license", - "non_spdx_license": "external license", + "imported_declared_license_name": "external license", + "imported_declared_non_spdx_license": "external license", "evaluation_result": "Review required", "numerical_evaluation_result": 2, "created": "2022-12-15T16:10:35.513Z", diff --git a/backend/unittests/fixtures/vex_fixtures.json b/backend/unittests/fixtures/vex_fixtures.json index 12c349e73..58dd912c2 100644 --- a/backend/unittests/fixtures/vex_fixtures.json +++ b/backend/unittests/fixtures/vex_fixtures.json @@ -138,7 +138,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { @@ -193,7 +193,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { @@ -248,7 +248,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { @@ -329,7 +329,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { @@ -385,7 +385,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { @@ -441,7 +441,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { @@ -495,7 +495,7 @@ "created": "2022-12-15T16:10:35.513Z", "modified": "2022-12-16T16:13:18.282Z", "last_observation_log": "2022-12-16T16:13:18.281Z", - "identity_hash": "6eef8088480aa2523aeeb64ad35f876a942cc3172cfb36752f3a052a4f88642c" + "identity_hash": "12b30c8b800bd9607d01a3cd2f1cd72af4b8c948b2e7831a48bfc2589616f0be" } }, { diff --git a/backend/unittests/import_observations/api/__init__.py b/backend/unittests/import_observations/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/import_observations/api/test_views.py b/backend/unittests/import_observations/api/test_views.py new file mode 100644 index 000000000..576e1c46c --- /dev/null +++ b/backend/unittests/import_observations/api/test_views.py @@ -0,0 +1,557 @@ +from dataclasses import dataclass +from os import path +from unittest.mock import ANY, patch + +from django.core.files.uploadedfile import SimpleUploadedFile +from django.core.management import call_command +from rest_framework.test import APIClient + +from application.access_control.models import User +from application.core.models import Branch, Product, Service +from application.import_observations.services.import_observations import ( + FileUploadParameters, +) +from unittests.base_test_case import BaseTestCase + + +@dataclass +class APITest: + username: str + url: str + post_data: str + expected_status_code: int + expected_data: str + + +class TestImport(BaseTestCase): + @classmethod + @patch("application.core.signals.get_current_user") + def setUpClass(self, mock_user): + mock_user.return_value = None + call_command( + "loaddata", + [ + "unittests/fixtures/initial_license_data.json", + "unittests/fixtures/unittests_fixtures.json", + "unittests/fixtures/unittests_license_fixtures.json", + ], + ) + super().setUpClass() + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.import_observations.api.views.file_upload_observations") + def _test_api(self, data: APITest, mock_file_upload_observations, mock_authenticate): + user = User.objects.get(username=data.username) + mock_authenticate.return_value = user, None + mock_file_upload_observations.return_value = 1, 2, 3, 4, 5, 6 + + with open(path.dirname(__file__) + "/test_views.py", "rb") as file: + uploaded_file = SimpleUploadedFile("file.txt", file.read(), content_type="multipart/form-data") + + post_data = { + "file": uploaded_file, + } + post_data.update(data.post_data) + + api_client = APIClient() + response = api_client.post(data.url, post_data) + + self.assertEqual(response.status_code, data.expected_status_code) + self.assertEqual(response.data, data.expected_data) + + if response.status_code != 200: + mock_file_upload_observations.assert_not_called() + else: + if data.post_data.get("product"): + product = Product.objects.get(id=data.post_data.get("product")) + else: + product = Product.objects.get(name=data.post_data.get("product_name")) + if data.post_data.get("branch"): + branch = Branch.objects.get(id=data.post_data.get("branch")) + else: + branch = Branch.objects.get(product=product.pk, name=data.post_data.get("branch_name")) + service_name = "" + if data.post_data.get("service_id"): + service_name = Service.objects.get(id=data.post_data.get("service_id")).name + elif data.post_data.get("service"): + service_name = data.post_data.get("service") + sbom = "sbom" in data.url + mock_file_upload_observations.assert_called_once_with( + FileUploadParameters( + product=product, + branch=branch, + file=ANY, + service_name=service_name, + docker_image_name_tag=data.post_data.get("docker_image_name_tag", ""), + endpoint_url=data.post_data.get("endpoint_url", ""), + kubernetes_cluster=data.post_data.get("kubernetes_cluster", ""), + suppress_licenses=data.post_data.get("suppress_licenses", False), + sbom=sbom, + ) + ) + + def test_file_upload_observations_by_id_no_product(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_observations_by_id/", + post_data={}, + expected_status_code=400, + expected_data={"message": "Product: This field is required."}, + ) + self._test_api(data) + + def test_file_upload_observations_by_id_product_not_found(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_observations_by_id/", + post_data={ + "product": 99999, + }, + expected_status_code=400, + expected_data={"message": "Product 99999 does not exist"}, + ) + self._test_api(data) + + def test_file_upload_observations_by_id_no_permission(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_observations_by_id/", + post_data={ + "product": 1, + }, + expected_status_code=403, + expected_data={"message": "You do not have permission to perform this action."}, + ) + self._test_api(data) + + def test_file_upload_observations_by_id_branch_not_found(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_id/", + post_data={ + "product": 1, + "branch": 3, + }, + expected_status_code=400, + expected_data={"message": "Branch 3 does not exist for product db_product_internal"}, + ) + self._test_api(data) + + def test_file_upload_observations_by_id_service_not_found(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_id/", + post_data={ + "product": 1, + "service_id": 3, + }, + expected_status_code=400, + expected_data={"message": "Service 3 does not exist for product db_product_internal"}, + ) + self._test_api(data) + + def test_file_upload_observations_by_id_service_name_successful(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_id/", + post_data={ + "product": 1, + "branch": 1, + "service": "service_name", + "docker_image_name_tag": "docker_image_name_tag", + "endpoint_url": "endpoint_url", + "kubernetes_cluster": "kubernetes_cluster", + "suppress_licenses": True, + }, + expected_status_code=200, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + "observations_new": 1, + "observations_resolved": 3, + "observations_updated": 2, + }, + ) + self._test_api(data) + + def test_file_upload_observations_by_id_service_id_successful(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_id/", + post_data={ + "product": 1, + "branch": 1, + "service_id": 1, + "docker_image_name_tag": "docker_image_name_tag", + "endpoint_url": "endpoint_url", + "kubernetes_cluster": "kubernetes_cluster", + "suppress_licenses": True, + }, + expected_status_code=200, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + "observations_new": 1, + "observations_resolved": 3, + "observations_updated": 2, + }, + ) + self._test_api(data) + + def test_file_upload_observations_by_name_no_product(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_observations_by_name/", + post_data={}, + expected_status_code=400, + expected_data={"message": "Product name: This field is required."}, + ) + self._test_api(data) + + def test_file_upload_observations_by_name_product_not_found(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_observations_by_name/", + post_data={ + "product_name": "Unknown Product", + }, + expected_status_code=400, + expected_data={"message": "Product Unknown Product does not exist"}, + ) + self._test_api(data) + + def test_file_upload_observations_by_name_no_permission(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_observations_by_name/", + post_data={ + "product_name": "db_product_internal", + }, + expected_status_code=403, + expected_data={"message": "You do not have permission to perform this action."}, + ) + self._test_api(data) + + def test_file_upload_observations_by_name_branch_not_found_default(self): + product = Product.objects.get(name="db_product_internal") + product.repository_default_branch = None + product.save() + + for branch in Branch.objects.filter(product=product): + branch.is_default_branch = False + branch.save() + + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_name/", + post_data={ + "product_name": "db_product_internal", + "branch_name": "Unknown Branch", + "service": "service_name", + "docker_image_name_tag": "docker_image_name_tag", + "endpoint_url": "endpoint_url", + "kubernetes_cluster": "kubernetes_cluster", + "suppress_licenses": True, + }, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + "observations_new": 1, + "observations_resolved": 3, + "observations_updated": 2, + }, + expected_status_code=200, + ) + self._test_api(data) + + product = Product.objects.get(name="db_product_internal") + branch = Branch.objects.get(product=product.pk, name="Unknown Branch") + + self.assertEqual(branch, product.repository_default_branch) + self.assertTrue(branch.is_default_branch) + + def test_file_upload_observations_by_name_branch_not_found_no_default(self): + product = Product.objects.get(name="db_product_internal") + + for branch in Branch.objects.filter(product=product): + branch.is_default_branch = False + branch.save() + + branch = Branch.objects.filter(product=product).first() + if not branch: + self.fail() + branch.is_default_branch = True + branch.save() + + product.repository_default_branch = branch + product.save + + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_name/", + post_data={ + "product_name": "db_product_internal", + "branch_name": "Unknown Branch", + "service": "service_name", + "docker_image_name_tag": "docker_image_name_tag", + "endpoint_url": "endpoint_url", + "kubernetes_cluster": "kubernetes_cluster", + "suppress_licenses": True, + }, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + "observations_new": 1, + "observations_resolved": 3, + "observations_updated": 2, + }, + expected_status_code=200, + ) + self._test_api(data) + + product = Product.objects.get(name="db_product_internal") + branch = Branch.objects.get(product=product.pk, name="Unknown Branch") + + self.assertNotEqual(branch, product.repository_default_branch) + self.assertFalse(branch.is_default_branch) + + def test_file_upload_observations_by_name_successful(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_observations_by_name/", + post_data={ + "product_name": "db_product_internal", + "branch_name": "db_branch_internal_dev", + "service": "service_name", + "docker_image_name_tag": "docker_image_name_tag", + "endpoint_url": "endpoint_url", + "kubernetes_cluster": "kubernetes_cluster", + "suppress_licenses": True, + }, + expected_status_code=200, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + "observations_new": 1, + "observations_resolved": 3, + "observations_updated": 2, + }, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_no_product(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_sbom_by_id/", + post_data={}, + expected_status_code=400, + expected_data={"message": "Product: This field is required."}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_product_not_found(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_sbom_by_id/", + post_data={ + "product": 99999, + }, + expected_status_code=400, + expected_data={"message": "Product 99999 does not exist"}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_no_permission(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_sbom_by_id/", + post_data={ + "product": 1, + }, + expected_status_code=403, + expected_data={"message": "You do not have permission to perform this action."}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_branch_not_found(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_id/", + post_data={ + "product": 1, + "branch": 3, + }, + expected_status_code=400, + expected_data={"message": "Branch 3 does not exist for product db_product_internal"}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_service_not_found(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_id/", + post_data={ + "product": 1, + "service_id": 3, + }, + expected_status_code=400, + expected_data={"message": "Service 3 does not exist for product db_product_internal"}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_service_name_successful(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_id/", + post_data={ + "product": 1, + "branch": 1, + "service": "service_name", + }, + expected_status_code=200, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + }, + ) + self._test_api(data) + + def test_file_upload_sbom_by_id_service_id_successful(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_id/", + post_data={"product": 1, "branch": 1, "service_id": 1}, + expected_status_code=200, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + }, + ) + self._test_api(data) + + def test_file_upload_sbom_by_name_no_product(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_sbom_by_name/", + post_data={}, + expected_status_code=400, + expected_data={"message": "Product name: This field is required."}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_name_product_not_found(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_sbom_by_name/", + post_data={ + "product_name": "Unknown Product", + }, + expected_status_code=400, + expected_data={"message": "Product Unknown Product does not exist"}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_name_no_permission(self): + data = APITest( + username="db_internal_read", + url="/api/import/file_upload_sbom_by_name/", + post_data={ + "product_name": "db_product_internal", + }, + expected_status_code=403, + expected_data={"message": "You do not have permission to perform this action."}, + ) + self._test_api(data) + + def test_file_upload_sbom_by_name_branch_not_found_default(self): + product = Product.objects.get(name="db_product_internal") + product.repository_default_branch = None + product.save() + + for branch in Branch.objects.filter(product=product): + branch.is_default_branch = False + branch.save() + + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_name/", + post_data={ + "product_name": "db_product_internal", + "branch_name": "Unknown Branch", + }, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + }, + expected_status_code=200, + ) + self._test_api(data) + + product = Product.objects.get(name="db_product_internal") + branch = Branch.objects.get(product=product.pk, name="Unknown Branch") + + self.assertEqual(branch, product.repository_default_branch) + self.assertTrue(branch.is_default_branch) + + def test_file_upload_sbom_by_name_branch_not_found_no_default(self): + product = Product.objects.get(name="db_product_internal") + + for branch in Branch.objects.filter(product=product): + branch.is_default_branch = False + branch.save() + + branch = Branch.objects.filter(product=product).first() + if not branch: + self.fail() + branch.is_default_branch = True + branch.save() + + product.repository_default_branch = branch + product.save + + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_name/", + post_data={ + "product_name": "db_product_internal", + "branch_name": "Unknown Branch", + }, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + }, + expected_status_code=200, + ) + self._test_api(data) + + product = Product.objects.get(name="db_product_internal") + branch = Branch.objects.get(product=product.pk, name="Unknown Branch") + + self.assertNotEqual(branch, product.repository_default_branch) + self.assertFalse(branch.is_default_branch) + + def test_file_upload_sbom_by_name_successful(self): + data = APITest( + username="db_internal_write", + url="/api/import/file_upload_sbom_by_name/", + post_data={ + "product_name": "db_product_internal", + "branch_name": "db_branch_internal_dev", + }, + expected_status_code=200, + expected_data={ + "license_components_deleted": 6, + "license_components_new": 4, + "license_components_updated": 5, + }, + ) + self._test_api(data) diff --git a/backend/unittests/import_observations/management/test_register_parsers.py b/backend/unittests/import_observations/management/test_register_parsers.py index 6707bfd94..4580009bd 100644 --- a/backend/unittests/import_observations/management/test_register_parsers.py +++ b/backend/unittests/import_observations/management/test_register_parsers.py @@ -17,7 +17,7 @@ def test_register_parsers(self): command.handle() parsers = Parser.objects.all().order_by("name") - self.assertEqual(13, len(parsers)) + self.assertEqual(15, len(parsers)) parser = parsers[0] self.assertEqual("Azure Defender", parser.name) @@ -33,14 +33,14 @@ def test_register_parsers(self): self.assertEqual("cryptolyzer", parser.module_name) self.assertEqual("CryptoLyzerParser", parser.class_name) - parser = parsers[5] + parser = parsers[6] self.assertEqual("Manual", parser.name) self.assertEqual("Manual", parser.type) self.assertEqual("Manual", parser.source) self.assertEqual("", parser.module_name) self.assertEqual("", parser.class_name) - parser = parsers[11] + parser = parsers[13] self.assertEqual("Trivy Operator Prometheus", parser.name) self.assertEqual("Other", parser.type) self.assertEqual("API", parser.source) diff --git a/backend/unittests/import_observations/parsers/azure_defender/test_parser.py b/backend/unittests/import_observations/parsers/azure_defender/test_parser.py index 3174366c8..8b0aba40f 100644 --- a/backend/unittests/import_observations/parsers/azure_defender/test_parser.py +++ b/backend/unittests/import_observations/parsers/azure_defender/test_parser.py @@ -3,6 +3,7 @@ from rest_framework.exceptions import ValidationError +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.azure_defender.parser import ( AzureDefenderParser, @@ -28,9 +29,11 @@ def test_defender(self): ) as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("Azure Defender", parser.name) - self.assertTrue(isinstance(parser_instance, AzureDefenderParser)) + self.assertIsInstance(parser_instance, AzureDefenderParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("Azure Defender", scanner) self.assertEqual(2, len(observations)) observation = observations[0] diff --git a/backend/unittests/import_observations/parsers/cryptolyzer/test_parser.py b/backend/unittests/import_observations/parsers/cryptolyzer/test_parser.py index 2b2b0cce5..41fd07736 100644 --- a/backend/unittests/import_observations/parsers/cryptolyzer/test_parser.py +++ b/backend/unittests/import_observations/parsers/cryptolyzer/test_parser.py @@ -1,6 +1,7 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.cryptolyzer.parser import CryptoLyzerParser from application.import_observations.services.parser_detector import detect_parser @@ -11,21 +12,23 @@ def test_no_observations(self): with open(path.dirname(__file__) + "/files/no_observations.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CryptoLyzer", parser.name) - self.assertTrue(isinstance(parser_instance, CryptoLyzerParser)) + self.assertIsInstance(parser_instance, CryptoLyzerParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("CryptoLyzer", scanner) self.assertEqual(0, len(observations)) def test_multiple_observations(self): - with open( - path.dirname(__file__) + "/files/multiple_observations.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/multiple_observations.json") as testfile: parser = CryptoLyzerParser() parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CryptoLyzer", parser.name) - self.assertTrue(isinstance(parser_instance, CryptoLyzerParser)) + self.assertIsInstance(parser_instance, CryptoLyzerParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("CryptoLyzer", scanner) self.assertEqual(4, len(observations)) observation = observations[0] @@ -35,9 +38,7 @@ def test_multiple_observations(self): observation.description, ) self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) - self.assertEqual( - "https://www.example.org:443", observation.origin_endpoint_url - ) + self.assertEqual("https://www.example.org:443", observation.origin_endpoint_url) self.assertEqual("CryptoLyzer", observation.scanner) self.assertEqual( "https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TG02102/BSI-TR-02102-2.pdf?__blob=publicationFile&v=5", @@ -56,18 +57,14 @@ def test_multiple_observations(self): observation.description, ) self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) - self.assertEqual( - "https://www.example.org:443", observation.origin_endpoint_url - ) + self.assertEqual("https://www.example.org:443", observation.origin_endpoint_url) self.assertEqual("CryptoLyzer", observation.scanner) self.assertEqual( "https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TG02102/BSI-TR-02102-2.pdf?__blob=publicationFile&v=5", observation.unsaved_references[0], ) self.assertEqual("Result", observation.unsaved_evidences[0][0]) - self.assertIn( - "TLS_CHACHA20_POLY1305_SHA256", observation.unsaved_evidences[0][1] - ) + self.assertIn("TLS_CHACHA20_POLY1305_SHA256", observation.unsaved_evidences[0][1]) observation = observations[2] self.assertEqual("Unrecommended elliptic curves", observation.title) @@ -76,9 +73,7 @@ def test_multiple_observations(self): observation.description, ) self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) - self.assertEqual( - "https://www.example.org:443", observation.origin_endpoint_url - ) + self.assertEqual("https://www.example.org:443", observation.origin_endpoint_url) self.assertEqual("CryptoLyzer", observation.scanner) self.assertEqual( "https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TG02102/BSI-TR-02102-2.pdf?__blob=publicationFile&v=5", @@ -94,9 +89,7 @@ def test_multiple_observations(self): observation.description, ) self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) - self.assertEqual( - "https://www.example.org:443", observation.origin_endpoint_url - ) + self.assertEqual("https://www.example.org:443", observation.origin_endpoint_url) self.assertEqual("CryptoLyzer", observation.scanner) self.assertEqual( "https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TG02102/BSI-TR-02102-2.pdf?__blob=publicationFile&v=5", @@ -109,9 +102,11 @@ def test_weak_tls(self): with open(path.dirname(__file__) + "/files/tls10.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CryptoLyzer", parser.name) - self.assertTrue(isinstance(parser_instance, CryptoLyzerParser)) + self.assertIsInstance(parser_instance, CryptoLyzerParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("CryptoLyzer", scanner) self.assertEqual(3, len(observations)) observation = observations[0] @@ -121,9 +116,7 @@ def test_weak_tls(self): observation.description, ) self.assertEqual(Severity.SEVERITY_HIGH, observation.parser_severity) - self.assertEqual( - "https://tls-v1-0.badssl.com:443", observation.origin_endpoint_url - ) + self.assertEqual("https://tls-v1-0.badssl.com:443", observation.origin_endpoint_url) self.assertEqual("CryptoLyzer", observation.scanner) self.assertEqual( "https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TG02102/BSI-TR-02102-2.pdf?__blob=publicationFile&v=5", diff --git a/backend/unittests/import_observations/parsers/cyclone_dx/files/changed/licenses_1.json b/backend/unittests/import_observations/parsers/cyclone_dx/files/changed/licenses_1.json index 812fb65bb..a5d018d23 100644 --- a/backend/unittests/import_observations/parsers/cyclone_dx/files/changed/licenses_1.json +++ b/backend/unittests/import_observations/parsers/cyclone_dx/files/changed/licenses_1.json @@ -31,11 +31,11 @@ ], "component": { "name": "SecObserve", - "version": "1.26.0", + "version": "1.48.0", "description": "SecObserve is an open source vulnerability management system for software development and cloud environments.", "type": "application", - "bom-ref": "pkg:pypi/secobserve@1.26.0", - "purl": "pkg:pypi/secobserve@1.26.0" + "bom-ref": "pkg:pypi/secobserve@1.48.0", + "purl": "pkg:pypi/secobserve@1.48.0" }, "properties": [ { @@ -59,7 +59,8 @@ ], "licenses": [ { - "expression": "MIT" + "expression": "MIT", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/argon2-cffi-bindings@21.2.1", @@ -101,7 +102,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -138,13 +140,15 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } }, { "license": { "id": "BSD-3-Clause", - "url": "https://opensource.org/licenses/BSD-3-Clause" + "url": "https://opensource.org/licenses/BSD-3-Clause", + "acknowledgement": "concluded" } } ], @@ -186,8 +190,8 @@ "licenses": [ { "license": { - "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "name": "attrs non-standard license", + "acknowledgement": "concluded" } } ], @@ -224,7 +228,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -267,7 +272,8 @@ { "license": { "id": "MPL-2.0", - "url": "https://opensource.org/licenses/MPL-2.0" + "url": "https://opensource.org/licenses/MPL-2.0", + "acknowledgement": "concluded" } } ], @@ -310,7 +316,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -353,7 +360,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -394,7 +402,8 @@ ], "licenses": [ { - "expression": "LGPL-3.0-or-later OR GPL-3.0-or-later" + "expression": "LGPL-3.0-or-later OR GPL-3.0-or-later", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/cryptography@43.0.1", @@ -438,7 +447,8 @@ ], "licenses": [ { - "expression": "LGPL-3.0-or-later AND BSD-3-Clause" + "expression": "LGPL-3.0-or-later AND BSD-3-Clause", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/cvss@3.2", @@ -478,7 +488,8 @@ ], "licenses": [ { - "expression": "Apache-2.0 AND (BSD-3-Clause OR MIT)" + "expression": "Apache-2.0 AND (BSD-3-Clause OR MIT)", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/defusedcsv@2.0.0", @@ -520,7 +531,8 @@ { "license": { "id": "PSF-2.0", - "url": "https://opensource.org/licenses/PSF-2.0" + "url": "https://opensource.org/licenses/PSF-2.0", + "acknowledgement": "concluded" } } ], @@ -563,7 +575,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -600,7 +613,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -637,7 +651,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -680,7 +695,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -760,7 +776,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -803,13 +820,15 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } }, { "license": { "id": "BSD-3-Clause", - "url": "https://opensource.org/licenses/BSD-3-Clause" + "url": "https://opensource.org/licenses/BSD-3-Clause", + "acknowledgement": "concluded" } } ], @@ -846,7 +865,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -889,7 +909,8 @@ { "license": { "id": "ISC", - "url": "https://opensource.org/licenses/ISC" + "url": "https://opensource.org/licenses/ISC", + "acknowledgement": "concluded" } } ], @@ -926,7 +947,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -969,7 +991,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -1010,7 +1033,8 @@ ], "licenses": [ { - "expression": "GPL-3.0-or-later WITH Bison-exception-2.2" + "expression": "GPL-3.0-or-later WITH Bison-exception-2.2", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/email-validator@2.1.1", @@ -1062,7 +1086,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1105,7 +1130,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1142,7 +1168,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1185,7 +1212,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -1222,7 +1250,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -1265,7 +1294,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1302,7 +1332,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1345,13 +1376,15 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } }, { "license": { "id": "BSD-2-Clause", - "url": "https://opensource.org/licenses/BSD-2-Clause" + "url": "https://opensource.org/licenses/BSD-2-Clause", + "acknowledgement": "concluded" } } ], @@ -1394,7 +1427,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -1437,7 +1471,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1474,7 +1509,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1511,7 +1547,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1548,7 +1585,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -1589,7 +1627,8 @@ ], "licenses": [ { - "expression": "OSI Approved" + "expression": "OSI Approved", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/oauthlib@3.2.2", @@ -1631,7 +1670,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1674,7 +1714,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -1717,13 +1758,15 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } }, { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -1758,7 +1801,8 @@ ], "licenses": [ { - "expression": "CMU License (MIT-CMU)" + "expression": "CMU License (MIT-CMU)", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/pillow@10.4.0", @@ -1804,7 +1848,8 @@ { "license": { "id": "LGPL-3.0-only", - "url": "https://opensource.org/licenses/LGPL-3.0-only" + "url": "https://opensource.org/licenses/LGPL-3.0-only", + "acknowledgement": "concluded" } } ], @@ -1847,7 +1892,8 @@ { "license": { "id": "LGPL-3.0-only", - "url": "https://opensource.org/licenses/LGPL-3.0-only" + "url": "https://opensource.org/licenses/LGPL-3.0-only", + "acknowledgement": "concluded" } } ], @@ -1890,7 +1936,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -1937,13 +1984,15 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } }, { "license": { "id": "BSD-3-Clause", - "url": "https://opensource.org/licenses/BSD-3-Clause" + "url": "https://opensource.org/licenses/BSD-3-Clause", + "acknowledgement": "concluded" } } ], @@ -1986,7 +2035,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2033,7 +2083,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2076,7 +2127,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2113,7 +2165,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2156,7 +2209,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2199,7 +2253,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2242,7 +2297,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2279,13 +2335,15 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } }, { "license": { "id": "ISC", - "url": "https://opensource.org/licenses/ISC" + "url": "https://opensource.org/licenses/ISC", + "acknowledgement": "concluded" } } ], @@ -2328,7 +2386,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -2371,7 +2430,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -2414,7 +2474,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2451,7 +2512,8 @@ { "license": { "id": "0BSD", - "url": "https://opensource.org/licenses/0BSD" + "url": "https://opensource.org/licenses/0BSD", + "acknowledgement": "concluded" } } ], @@ -2488,7 +2550,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2525,7 +2588,8 @@ { "license": { "id": "PSF-2.0", - "url": "https://opensource.org/licenses/PSF-2.0" + "url": "https://opensource.org/licenses/PSF-2.0", + "acknowledgement": "concluded" } } ], @@ -2562,7 +2626,8 @@ { "license": { "id": "Apache-2.0", - "url": "https://opensource.org/licenses/Apache-2.0" + "url": "https://opensource.org/licenses/Apache-2.0", + "acknowledgement": "concluded" } } ], @@ -2603,7 +2668,8 @@ ], "licenses": [ { - "expression": "OSI Approved OR BSD 3-Clause License or Apache License, Version 2.0" + "expression": "OSI Approved OR BSD 3-Clause License or Apache License, Version 2.0", + "acknowledgement": "concluded" } ], "purl": "pkg:pypi/uritemplate@4.1.1", @@ -2645,7 +2711,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2682,7 +2749,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2719,7 +2787,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2756,7 +2825,8 @@ { "license": { "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "url": "https://opensource.org/licenses/MIT", + "acknowledgement": "concluded" } } ], @@ -2786,7 +2856,7 @@ ], "dependencies": [ { - "ref": "pkg:pypi/secobserve@1.26.0", + "ref": "pkg:pypi/secobserve@1.48.0", "dependsOn": [ "pkg:pypi/argon2-cffi@23.1.0", "pkg:pypi/cvss@3.2", diff --git a/backend/unittests/import_observations/parsers/cyclone_dx/files/grype.json b/backend/unittests/import_observations/parsers/cyclone_dx/files/grype.json index 60b43fd1f..5c62f800b 100644 --- a/backend/unittests/import_observations/parsers/cyclone_dx/files/grype.json +++ b/backend/unittests/import_observations/parsers/cyclone_dx/files/grype.json @@ -1,7 +1,6 @@ { "bomFormat": "CycloneDX", "specVersion": "1.4", - "serialNumber": "urn:uuid:3043a7c6-d8b4-4e44-a01a-05f27ca147d9", "version": 1, "metadata": { "timestamp": "2023-04-12T19:55:50Z", diff --git a/backend/unittests/import_observations/parsers/cyclone_dx/files/licenses_1.json b/backend/unittests/import_observations/parsers/cyclone_dx/files/licenses_1.json index 078c15d09..b17d7758f 100644 --- a/backend/unittests/import_observations/parsers/cyclone_dx/files/licenses_1.json +++ b/backend/unittests/import_observations/parsers/cyclone_dx/files/licenses_1.json @@ -31,11 +31,11 @@ ], "component": { "name": "SecObserve", - "version": "1.26.0", + "version": "1.48.0", "description": "SecObserve is an open source vulnerability management system for software development and cloud environments.", "type": "application", - "bom-ref": "pkg:pypi/secobserve@1.26.0", - "purl": "pkg:pypi/secobserve@1.26.0" + "bom-ref": "pkg:pypi/secobserve@1.48.0", + "purl": "pkg:pypi/secobserve@1.48.0" }, "properties": [ { @@ -189,8 +189,7 @@ "licenses": [ { "license": { - "id": "MIT", - "url": "https://opensource.org/licenses/MIT" + "name": "attrs non-standard license" } } ], @@ -2789,7 +2788,7 @@ ], "dependencies": [ { - "ref": "pkg:pypi/secobserve@1.26.0", + "ref": "pkg:pypi/secobserve@1.48.0", "dependsOn": [ "pkg:pypi/argon2-cffi@23.1.0", "pkg:pypi/cvss@3.2", diff --git a/backend/unittests/import_observations/parsers/cyclone_dx/files/trivy.json b/backend/unittests/import_observations/parsers/cyclone_dx/files/trivy.json index a2b0a1b55..ab942a6b2 100644 --- a/backend/unittests/import_observations/parsers/cyclone_dx/files/trivy.json +++ b/backend/unittests/import_observations/parsers/cyclone_dx/files/trivy.json @@ -2,7 +2,7 @@ "bomFormat": "CycloneDX", "specVersion": "1.4", "serialNumber": "urn:uuid:fa9f9148-2935-422a-b058-20afa8cafa82", - "version": 1, + "version": 99, "metadata": { "timestamp": "2023-04-12T19:57:36+00:00", "tools": [ @@ -431,10 +431,16 @@ "version": "20220614-r4", "licenses": [ { - "expression": "MPL-2.0" + "license": { + "id": "MPL-2.0", + "acknowledgement": "declared" + } }, { - "expression": "MIT" + "license": { + "name": "MIT", + "acknowledgement": "declard" + } } ], "purl": "pkg:apk/alpine/ca-certificates@20220614-r4?distro=3.17.3", @@ -468,10 +474,16 @@ "version": "20220614-r4", "licenses": [ { - "expression": "MPL-2.0" + "license": { + "name": "MPL-2.0", + "acknowledgement": "concluded" + } }, { - "expression": "MIT" + "license": { + "id": "MIT", + "acknowledgement": "concluded" + } } ], "purl": "pkg:apk/alpine/ca-certificates-bundle@20220614-r4?distro=3.17.3", @@ -3150,6 +3162,20 @@ "vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:H" } ], + "references": [ + { + "id": "GHSA-35m5-8cvj-8783", + "source": { + "name": "name 1" + } + }, + { + "id": "alias 2", + "source": { + "name": "name 2" + } + } + ], "description": "No description is available for this CVE.", "recommendation": "Upgrade libxml2 to version 2.10.4-r0", "advisories": [ @@ -3227,4 +3253,4 @@ ] } ] -} +} \ No newline at end of file diff --git a/backend/unittests/import_observations/parsers/cyclone_dx/test_parser.py b/backend/unittests/import_observations/parsers/cyclone_dx/test_parser.py index 96d65e7c4..94236e049 100644 --- a/backend/unittests/import_observations/parsers/cyclone_dx/test_parser.py +++ b/backend/unittests/import_observations/parsers/cyclone_dx/test_parser.py @@ -1,19 +1,22 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.cyclone_dx.parser import CycloneDXParser from application.import_observations.services.parser_detector import detect_parser class TestCycloneDXParser(TestCase): - def test_grype(self): + def test_grype_no_bom_link(self): with open(path.dirname(__file__) + "/files/grype.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CycloneDX", parser.name) - self.assertTrue(isinstance(parser_instance, CycloneDXParser)) + self.assertIsInstance(parser_instance, CycloneDXParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("grype / 0.59.1", scanner) self.assertEqual(8, len(observations)) observation = observations[0] @@ -28,16 +31,13 @@ def test_grype(self): self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) self.assertEqual("python", observation.origin_component_name) self.assertEqual("3.11.3", observation.origin_component_version) - self.assertEqual( - "pkg:generic/python@3.11.3", observation.origin_component_purl - ) + self.assertEqual("pkg:generic/python@3.11.3", observation.origin_component_purl) self.assertEqual( "cpe:2.3:a:python_software_foundation:python:3.11.3:*:*:*:*:*:*:*", observation.origin_component_cpe, ) - self.assertEqual( - "example/example:dev", observation.origin_docker_image_name - ) + self.assertEqual("", observation.origin_component_cyclonedx_bom_link) + self.assertEqual("example/example:dev", observation.origin_docker_image_name) self.assertEqual("", observation.origin_docker_image_tag) self.assertEqual( "sha256:88901af20b50287be153ec4f20ed78f947eb5fa0d0a52432ced6e261b66b6cbc", @@ -69,9 +69,7 @@ def test_grype(self): self.assertEqual("CVE-2022-47015", observation.vulnerability_id) self.assertEqual("", observation.parser_severity) self.assertEqual(6.5, observation.cvss3_score) - self.assertEqual( - "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector - ) + self.assertEqual("CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector) self.assertEqual("mariadb-client", observation.origin_component_name) self.assertEqual("10.6.12-r0", observation.origin_component_version) self.assertEqual( @@ -82,9 +80,7 @@ def test_grype(self): "cpe:2.3:a:mariadb-client:mariadb-client:10.6.12-r0:*:*:*:*:*:*:*", observation.origin_component_cpe, ) - self.assertEqual( - "example/example:dev", observation.origin_docker_image_name - ) + self.assertEqual("example/example:dev", observation.origin_docker_image_name) self.assertEqual("", observation.origin_docker_image_tag) self.assertEqual( "sha256:88901af20b50287be153ec4f20ed78f947eb5fa0d0a52432ced6e261b66b6cbc", @@ -113,17 +109,17 @@ def test_grype_component_version(self): with open(path.dirname(__file__) + "/files/grype_2.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CycloneDX", parser.name) - self.assertTrue(isinstance(parser_instance, CycloneDXParser)) + self.assertIsInstance(parser_instance, CycloneDXParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("grype / 0.65.1", scanner) self.assertEqual(1, len(observations)) observation = observations[0] self.assertEqual("CVE-2018-20225", observation.vulnerability_id) self.assertEqual("grype / 0.65.1", observation.scanner) - self.assertEqual( - "example/example-backend", observation.origin_docker_image_name - ) + self.assertEqual("example/example-backend", observation.origin_docker_image_name) self.assertEqual("dev", observation.origin_docker_image_tag) self.assertEqual("", observation.origin_docker_image_digest) @@ -131,29 +127,31 @@ def test_grype_tools_components(self): with open(path.dirname(__file__) + "/files/grype_3.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CycloneDX", parser.name) - self.assertTrue(isinstance(parser_instance, CycloneDXParser)) + self.assertIsInstance(parser_instance, CycloneDXParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("grype / 0.73.5", scanner) self.assertEqual(1, len(observations)) observation = observations[0] self.assertEqual("CVE-2023-42363", observation.vulnerability_id) self.assertEqual("grype / 0.73.5", observation.scanner) - self.assertEqual( - "example/example-backend", observation.origin_docker_image_name - ) + self.assertEqual("example/example-backend", observation.origin_docker_image_name) self.assertEqual("dev", observation.origin_docker_image_tag) self.assertEqual("", observation.origin_docker_image_digest) - def test_trivy(self): + def test_trivy_observations(self): self.maxDiff = None with open(path.dirname(__file__) + "/files/trivy.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("CycloneDX", parser.name) - self.assertTrue(isinstance(parser_instance, CycloneDXParser)) + self.assertIsInstance(parser_instance, CycloneDXParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("trivy / 0.38.3", scanner) self.assertEqual(2, len(observations)) observation = observations[0] @@ -162,11 +160,10 @@ def test_trivy(self): description = """No description is available for this CVE.""" self.assertEqual(description, observation.description) self.assertEqual("CVE-2023-29469", observation.vulnerability_id) + self.assertEqual("GHSA-35m5-8cvj-8783, alias 2", observation.vulnerability_id_aliases) self.assertEqual("", observation.parser_severity) self.assertEqual(5.9, observation.cvss3_score) - self.assertEqual( - "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector - ) + self.assertEqual("CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector) self.assertEqual("libxml2", observation.origin_component_name) self.assertEqual("2.10.3-r1", observation.origin_component_version) self.assertEqual( @@ -174,15 +171,15 @@ def test_trivy(self): observation.origin_component_purl, ) self.assertEqual( - "example/example-frontend:dev", observation.origin_docker_image_name + "urn:cdx:fa9f9148-2935-422a-b058-20afa8cafa82/99#pkg:apk/alpine/libxml2@2.10.3-r1?distro=3.17.3", + observation.origin_component_cyclonedx_bom_link, ) + self.assertEqual("example/example-frontend:dev", observation.origin_docker_image_name) self.assertEqual("", observation.origin_docker_image_tag) self.assertEqual("", observation.origin_docker_image_digest) - expected_dependencies = """example/example-frontend:dev --> alpine:3.17.3 -alpine:3.17.3 --> libxml2:2.10.3-r1""" - self.assertEqual( - expected_dependencies, observation.origin_component_dependencies - ) + expected_dependencies = """alpine:3.17.3 --> libxml2:2.10.3-r1 +example/example-frontend:dev --> alpine:3.17.3""" + self.assertEqual(expected_dependencies, observation.origin_component_dependencies) self.assertEqual( "https://access.redhat.com/security/cve/CVE-2023-29469", observation.unsaved_references[0], @@ -200,11 +197,14 @@ def test_trivy(self): observation = observations[1] self.assertEqual("CVE-2023-28484", observation.title) - expected_dependencies = """example/example-frontend:dev --> alpine:3.17.3 -alpine:3.17.3 --> busybox:1.35.0-r29 + expected_dependencies = """alpine:3.17.3 --> busybox:1.35.0-r29 +alpine:3.17.3 --> geoip:1.6.12-r3 alpine:3.17.3 --> icu-data-en:72.1-r1 alpine:3.17.3 --> icu-libs:72.1-r1 busybox:1.35.0-r29 --> icu-libs:72.1-r1 +example/example-frontend:dev --> alpine:3.17.3 +geoip:1.6.12-r3 --> icu-data-en:72.1-r1 +icu-data-en:72.1-r1 --> geoip:1.6.12-r3 icu-data-en:72.1-r1 --> icu-libs:72.1-r1""" self.assertEqual( expected_dependencies, @@ -212,11 +212,51 @@ def test_trivy(self): ) self.assertEqual("", observation.parser_severity) self.assertEqual(5.9, observation.cvss3_score) - self.assertEqual( - "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector - ) + self.assertEqual("CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector) self.assertEqual(8.8, observation.cvss4_score) self.assertEqual( "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:L/VA:L/SC:L/SI:L/SA:N", observation.cvss4_vector, ) + + def test_trivy_license_components(self): + self.maxDiff = None + + with open(path.dirname(__file__) + "/files/trivy.json") as testfile: + parser, parser_instance, data = detect_parser(testfile) + self.assertEqual("CycloneDX", parser.name) + self.assertIsInstance(parser_instance, CycloneDXParser) + + license_components, scanner = parser_instance.get_license_components(data) + + self.assertEqual("trivy / 0.38.3", scanner) + self.assertEqual(87, len(license_components)) + + license_component = license_components[10] + self.assertEqual("c-ares", license_component.component_name) + self.assertEqual("1.18.1-r1", license_component.component_version) + self.assertEqual("pkg:apk/alpine/c-ares@1.18.1-r1?distro=3.17.3", license_component.component_purl) + self.assertEqual("", license_component.component_cpe) + self.assertEqual( + "urn:cdx:fa9f9148-2935-422a-b058-20afa8cafa82/99#pkg:apk/alpine/c-ares@1.18.1-r1?distro=3.17.3", + license_component.component_cyclonedx_bom_link, + ) + dependencies = """alpine:3.17.3 --> c-ares:1.18.1-r1 +example/example-frontend:dev --> alpine:3.17.3""" + self.assertEqual(dependencies, license_component.component_dependencies) + self.assertEqual(["MIT"], license_component.unsaved_declared_licenses) + self.assertEqual("Component", license_component.unsaved_evidences[0][0]) + self.assertIn( + '"bom-ref": "pkg:apk/alpine/c-ares@1.18.1-r1?distro=3.17.3"', + license_component.unsaved_evidences[0][1], + ) + + license_component = license_components[11] + self.assertEqual("ca-certificates", license_component.component_name) + self.assertEqual("20220614-r4", license_component.component_version) + self.assertEqual(["MPL-2.0", "MIT"], license_component.unsaved_declared_licenses) + + license_component = license_components[12] + self.assertEqual("ca-certificates-bundle", license_component.component_name) + self.assertEqual("20220614-r4", license_component.component_version) + self.assertEqual(["MPL-2.0", "MIT"], license_component.unsaved_concluded_licenses) diff --git a/backend/unittests/import_observations/parsers/drheader/test_parser.py b/backend/unittests/import_observations/parsers/drheader/test_parser.py index 0fad04dce..b21f3f2c3 100644 --- a/backend/unittests/import_observations/parsers/drheader/test_parser.py +++ b/backend/unittests/import_observations/parsers/drheader/test_parser.py @@ -1,19 +1,22 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.drheader.parser import DrHEADerParser from application.import_observations.services.parser_detector import detect_parser -class TestCycloneDXParser(TestCase): +class TestDrHeaderParser(TestCase): def test_drheader(self): with open(path.dirname(__file__) + "/files/drheader.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("DrHeader", parser.name) - self.assertTrue(isinstance(parser_instance, DrHEADerParser)) + self.assertIsInstance(parser_instance, DrHEADerParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("DrHeader", scanner) self.assertEqual(6, len(observations)) observation = observations[1] diff --git a/backend/unittests/import_observations/parsers/gitleaks/__init__.py b/backend/unittests/import_observations/parsers/gitleaks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.dir.json b/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.dir.json new file mode 100644 index 000000000..194cc4ed6 --- /dev/null +++ b/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.dir.json @@ -0,0 +1,64 @@ +{ + "findings": [ + { + "RuleID": "generic-api-key", + "Description": "Detected a Generic API Key, potentially exposing access to various services and sensitive operations.", + "StartLine": 18, + "EndLine": 19, + "StartColumn": 9, + "EndColumn": 90, + "Match": "DJANGO_SECRET_KEY=NxYPEF5lNGgk3yonndjSbwP7\r\n7uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "Secret": "NxYPEF5lNGgk3yonndjSbwP7\r\n7uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "File": "backend/bin/run_pylint.sh", + "SymlinkFile": "", + "Commit": "", + "Entropy": 5.152115, + "Author": "", + "Email": "", + "Date": "", + "Message": "", + "Tags": [], + "Fingerprint": "backend/bin/run_pylint.sh:generic-api-key:18" + }, + { + "RuleID": "generic-api-key", + "Description": "Detected a Generic API Key, potentially exposing access to various services and sensitive operations.", + "StartLine": 19, + "EndLine": 19, + "StartColumn": 9, + "EndColumn": 73, + "Match": "FIELD_ENCRYPTION_KEY=DtlkqVb3wlaVdJK_BU-3mB4wwuuf8xx8YNInajiJ7GU=", + "Secret": "DtlkqVb3wlaVdJK_BU-3mB4wwuuf8xx8YNInajiJ7GU=", + "File": "backend/bin/run_pylint.sh", + "SymlinkFile": "", + "Commit": "", + "Entropy": 4.8968205, + "Author": "", + "Email": "", + "Date": "", + "Message": "", + "Tags": [], + "Fingerprint": "backend/bin/run_pylint.sh:generic-api-key:19" + }, + { + "RuleID": "generic-api-key", + "Description": "Detected a Generic API Key, potentially exposing access to various services and sensitive operations.", + "StartLine": 18, + "EndLine": 18, + "StartColumn": 9, + "EndColumn": 90, + "Match": "DJANGO_SECRET_KEY=NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "Secret": "NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "File": "backend/bin/run_mypy.sh", + "SymlinkFile": "", + "Commit": "", + "Entropy": 5.152115, + "Author": "", + "Email": "", + "Date": "", + "Message": "", + "Tags": [], + "Fingerprint": "backend/bin/run_mypy.sh:generic-api-key:18" + } + ] +} diff --git a/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.empty.json b/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.empty.json new file mode 100644 index 000000000..282dba8bb --- /dev/null +++ b/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.empty.json @@ -0,0 +1,3 @@ +{ + "findings": [] +} diff --git a/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.git.json b/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.git.json new file mode 100644 index 000000000..936e5eabb --- /dev/null +++ b/backend/unittests/import_observations/parsers/gitleaks/files/gitleaks.git.json @@ -0,0 +1,67 @@ +{ + "findings": [ + { + "RuleID": "generic-api-key", + "Description": "Detected a Generic API Key, potentially exposing access to various services and sensitive operations.", + "StartLine": 42, + "EndLine": 43, + "StartColumn": 8, + "EndColumn": 90, + "Match": "DJANGO_SECRET_KEY: NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "Secret": "NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "File": "docker-compose-playwright.yml", + "SymlinkFile": "", + "Commit": "b718aa975b79d141225d928ba2822ea010b74d2b", + "Link": "https://github.com/SecObserve/SecObserve/blob/b718aa975b79d141225d928ba2822ea010b74d2b/docker-compose-playwright.yml#L42", + "Entropy": 5.152115, + "Author": "Stefan Fleckenstein", + "Email": "stefan.fleckenstein@example.org", + "Date": "2023-09-22T07:53:30Z", + "Message": "chore: switch from cypress to playwright (#568)\n\n* chore: switch from cypress to playwright\n\n* fix: database file for sqlite\n\n* fix: flake8", + "Tags": [], + "Fingerprint": "b718aa975b79d141225d928ba2822ea010b74d2b:docker-compose-playwright.yml:generic-api-key:42" + }, + { + "RuleID": "generic-api-key", + "Description": "Detected a Generic API Key, potentially exposing access to various services and sensitive operations.", + "StartLine": 43, + "EndLine": 43, + "StartColumn": 8, + "EndColumn": 73, + "Match": "FIELD_ENCRYPTION_KEY: DtlkqVb3wlaVdJK_BU-3mB4wwuuf8xx8YNInajiJ7GU=", + "Secret": "DtlkqVb3wlaVdJK_BU-3mB4wwuuf8xx8YNInajiJ7GU=", + "File": "docker-compose-playwright.yml", + "SymlinkFile": "", + "Commit": "b718aa975b79d141225d928ba2822ea010b74d2b", + "Link": "https://github.com/SecObserve/SecObserve/blob/b718aa975b79d141225d928ba2822ea010b74d2b/docker-compose-playwright.yml#L43", + "Entropy": 4.8968205, + "Author": "Stefan Fleckenstein", + "Email": "stefan.fleckenstein@example.org", + "Date": "2023-09-22T07:53:30Z", + "Message": "chore: switch from cypress to playwright (#568)\n\n* chore: switch from cypress to playwright\n\n* fix: database file for sqlite\n\n* fix: flake8", + "Tags": [], + "Fingerprint": "b718aa975b79d141225d928ba2822ea010b74d2b:docker-compose-playwright.yml:generic-api-key:43" + }, + { + "RuleID": "generic-api-key", + "Description": "Detected a Generic API Key, potentially exposing access to various services and sensitive operations.", + "StartLine": 47, + "EndLine": 47, + "StartColumn": 8, + "EndColumn": 90, + "Match": "DJANGO_SECRET_KEY: NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "Secret": "NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r", + "File": "docker-compose-playwright.yml", + "SymlinkFile": "", + "Commit": "a22291a13233b1282ddd0e54e479385690c06fa5", + "Link": "https://github.com/SecObserve/SecObserve/blob/a22291a13233b1282ddd0e54e479385690c06fa5/docker-compose-playwright.yml#L47", + "Entropy": 5.152115, + "Author": "Stefan Fleckenstein", + "Email": "stefan.fleckenstein@example.org", + "Date": "2023-09-21T20:15:21Z", + "Message": "chore: switch from cypress to playwright", + "Tags": [], + "Fingerprint": "a22291a13233b1282ddd0e54e479385690c06fa5:docker-compose-playwright.yml:generic-api-key:47" + } + ] +} \ No newline at end of file diff --git a/backend/unittests/import_observations/parsers/gitleaks/test_parser.py b/backend/unittests/import_observations/parsers/gitleaks/test_parser.py new file mode 100644 index 000000000..4fb14aff6 --- /dev/null +++ b/backend/unittests/import_observations/parsers/gitleaks/test_parser.py @@ -0,0 +1,99 @@ +from json import load +from os import path +from unittest import TestCase + +from rest_framework.exceptions import ValidationError + +from application.core.models import Product +from application.core.types import Severity +from application.import_observations.parsers.gitleaks.parser import GitleaksParser +from application.import_observations.services.parser_detector import detect_parser + + +class TestGitleaksParser(TestCase): + def test_no_observations(self): + with open(path.dirname(__file__) + "/files/gitleaks.empty.json") as testfile: + parser = GitleaksParser() + data = load(testfile) + + self.assertTrue(parser.check_format(data)) + + def test_gitleaks_dir(self): + with open(path.dirname(__file__) + "/files/gitleaks.dir.json") as testfile: + parser, parser_instance, data = detect_parser(testfile) + self.assertEqual("Gitleaks", parser.name) + self.assertIsInstance(parser_instance, GitleaksParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("Gitleaks", scanner) + self.assertEqual(3, len(observations)) + + observation = observations[0] + self.assertEqual("generic-api-key", observation.title) + description = """Detected a Generic API Key, potentially exposing access to various services and sensitive operations. + +**Match:** `DJANGO_SECRET_KEY=REDACTED`""" + self.assertEqual(description, observation.description) + self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) + + self.assertEqual("backend/bin/run_pylint.sh", observation.origin_source_file) + self.assertEqual(18, observation.origin_source_line_start) + self.assertEqual(19, observation.origin_source_line_end) + + self.assertEqual("Entry", observation.unsaved_evidences[0][0]) + self.assertIn( + '"Entropy": 5.152115', + observation.unsaved_evidences[0][1], + ) + + def test_gitleaks_git(self): + with open(path.dirname(__file__) + "/files/gitleaks.git.json") as testfile: + parser, parser_instance, data = detect_parser(testfile) + self.assertEqual("Gitleaks", parser.name) + self.assertIsInstance(parser_instance, GitleaksParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("Gitleaks", scanner) + self.assertEqual(3, len(observations)) + + observation = observations[0] + self.assertEqual("generic-api-key", observation.title) + description = """Detected a Generic API Key, potentially exposing access to various services and sensitive operations. + +**Match:** `DJANGO_SECRET_KEY: REDACTED` + +**Commit hash:** b718aa975b79d141225d928ba2822ea010b74d2b + +**Commit date:** 2023-09-22T07:53:30Z + +**Commit message:** chore: switch from cypress to playwright (#568) ...""" + self.assertEqual(description, observation.description) + self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) + + self.assertEqual("docker-compose-playwright.yml", observation.origin_source_file) + self.assertEqual(42, observation.origin_source_line_start) + self.assertEqual(43, observation.origin_source_line_end) + self.assertEqual( + "https://github.com/SecObserve/SecObserve/blob/b718aa975b79d141225d928ba2822ea010b74d2b/docker-compose-playwright.yml#L42", + observation.origin_source_file_link, + ) + + self.assertEqual("Entry", observation.unsaved_evidences[0][0]) + self.assertIn( + '"Entropy": 5.152115', + observation.unsaved_evidences[0][1], + ) + + observation = observations[2] + description = """Detected a Generic API Key, potentially exposing access to various services and sensitive operations. + +**Match:** `DJANGO_SECRET_KEY: REDACTED` + +**Commit hash:** a22291a13233b1282ddd0e54e479385690c06fa5 + +**Commit date:** 2023-09-21T20:15:21Z + +**Commit message:** chore: switch from cypress to playwright""" + self.assertEqual(description, observation.description) diff --git a/backend/unittests/import_observations/parsers/ocsf/files/other_finding.json b/backend/unittests/import_observations/parsers/ocsf/files/other_finding.json index 687f31b48..6d5c3cd65 100644 --- a/backend/unittests/import_observations/parsers/ocsf/files/other_finding.json +++ b/backend/unittests/import_observations/parsers/ocsf/files/other_finding.json @@ -13,7 +13,7 @@ "severity": "High", "status": "New", "status_code": "FAIL", - "status_detail": "Public access to nodes is enabled for cluster 'aks-dev' in subscription 'MaibornWolff - MAKS II'", + "status_detail": "Public access to nodes is enabled for cluster 'aks-dev' in subscription 'SecObserve'", "status_id": 1, "unmapped": { "check_type": "", @@ -60,14 +60,14 @@ "class_uid": 2002, "cloud": { "account": { - "name": "MaibornWolff - MAKS II", + "name": "SecObserve", "type": "Azure_AD_Account", "type_id": 6, "uid": "7c8df2de-ec2f-441a-b9be-1a8852acc9dd", "labels": [] }, "org": { - "name": "maiborn.net", + "name": "example.net", "uid": "b8d7ad48-53f4-4c29-a71c-0717f0d3a5d0" }, "provider": "azure", diff --git a/backend/unittests/import_observations/parsers/ocsf/files/prowler_kubernetes.ocsf.json b/backend/unittests/import_observations/parsers/ocsf/files/prowler_kubernetes.ocsf.json new file mode 100644 index 000000000..1affa7181 --- /dev/null +++ b/backend/unittests/import_observations/parsers/ocsf/files/prowler_kubernetes.ocsf.json @@ -0,0 +1,366 @@ +[ + { + "message": "Pod cert-manager does not use HostPorts. FAIL", + "metadata": { + "event_code": "core_minimize_admission_hostport_containers", + "product": { + "name": "Prowler", + "uid": "prowler", + "vendor_name": "Prowler", + "version": "5.16.1" + }, + "profiles": [ + "container", + "datetime" + ], + "version": "1.5.0" + }, + "severity_id": 4, + "severity": "High", + "status": "New", + "status_code": "FAIL", + "status_detail": "Pod cert-manager does not use HostPorts.", + "status_id": 1, + "unmapped": { + "related_url": "https://kubernetes.io/docs/concepts/security/pod-security-standards/", + "categories": [ + "internet-exposed" + ], + "depends_on": [], + "related_to": [], + "additional_urls": [], + "notes": "Carefully evaluate the need for HostPorts in container configurations and prefer network policies for secure communication.", + "compliance": { + "PCI-4.0": [ + "1.2.5.17", + "1.2.8.13", + "1.2.8.16", + "1.2.8.20", + "1.2.8.28", + "1.2.8.30", + "1.2.8.41", + "1.3.1.8", + "1.3.1.29", + "1.3.1.34", + "1.3.2.18", + "1.3.2.28", + "1.3.2.45", + "1.4.2.26", + "1.4.2.43", + "1.4.4.7", + "1.5.1.16", + "1.5.1.32", + "1.5.1.40", + "10.3.2.18", + "10.3.2.19", + "11.5.1.1.1", + "2.2.5.17", + "3.5.1.3.6", + "3.5.1.3.14", + "3.5.1.3.20", + "3.5.1.3.23", + "A1.1.3.26", + "A1.1.3.40", + "A3.4.1.8", + "A3.4.1.18" + ], + "CIS-1.11.1": [ + "5.2.13" + ], + "ProwlerThreatScore-1.0": [ + "2.1.2" + ], + "CIS-1.10": [ + "5.2.13" + ], + "CIS-1.8": [ + "5.2.13" + ] + } + }, + "activity_name": "Create", + "activity_id": 1, + "finding_info": { + "created_time": 1768388905, + "created_time_dt": "2026-01-14T11:08:25.883259", + "desc": "This check ensures that Kubernetes clusters are configured to minimize the admission of containers that require the use of HostPorts. This helps maintain network policy controls and reduce security risks.", + "title": "Minimize the admission of containers which use HostPorts", + "types": [], + "uid": "prowler-kubernetes-core_minimize_admission_hostport_containers-cluster_node-namespace: cert-manager-cert-manager" + }, + "resources": [ + { + "data": { + "details": "", + "metadata": { + "name": "cert-manager", + "uid": "aa8f6baf-1b52-4023-adca-4bef65a59e51", + "namespace": "cert-manager", + "labels": { + "app": "cert-manager", + "app.kubernetes.io/component": "controller", + "app.kubernetes.io/instance": "cert-manager", + "app.kubernetes.io/managed-by": "Helm", + "app.kubernetes.io/name": "cert-manager", + "app.kubernetes.io/version": "v1.7.0", + "helm.sh/chart": "cert-manager-v1.7.0", + "pod-template-hash": "67644fb9d8" + }, + "annotations": { + "prometheus.io/path": "/metrics", + "prometheus.io/port": "9402", + "prometheus.io/scrape": "true" + }, + "node_name": "cluster_node-jlze6bf4fi", + "service_account": "cert-manager", + "status_phase": "Running", + "pod_ip": "242.59.13.70", + "host_ip": "109.0.85.203", + "host_pid": null, + "host_ipc": null, + "host_network": null, + "security_context": { + "app_armor_profile": null, + "fs_group": null, + "fs_group_change_policy": null, + "run_as_group": null, + "run_as_non_root": true, + "run_as_user": null, + "se_linux_change_policy": null, + "se_linux_options": null, + "seccomp_profile": null, + "supplemental_groups": null, + "supplemental_groups_policy": null, + "sysctls": null, + "windows_options": null + }, + "containers": { + "cert-manager": { + "name": "cert-manager", + "image": "quay.io/jetstack/cert-manager-controller@sha256:d6d12274f4b9c9c9cae2bcdc837744006d5f301c1dfa3e50f4a67d08f3bf9589", + "command": null, + "ports": [ + { + "containerPort": 1234 + } + ], + "env": [ + { + "name": "POD_NAMESPACE", + "value": null + } + ], + "security_context": {} + } + } + } + }, + "group": { + "name": "core" + }, + "labels": [], + "name": "cert-manager", + "namespace": "cert-manager-namespace", + "type": "KubernetesPod", + "uid": "aa8f6baf-1b52-4023-adca-4bef65a59e51" + } + ], + "category_name": "Findings", + "class_name": "Detection Finding", + "remediation": { + "desc": "Limit the use of HostPorts in Kubernetes containers to maintain network security.", + "references": [ + "https://kubernetes.io/docs/concepts/security/pod-security-standards/" + ] + }, + "risk_details": "Permitting containers with HostPorts can bypass network policy controls, increasing the risk of unauthorized network access.", + "time": 1768388905, + "time_dt": "2026-01-14T11:08:25.883259", + "type_uid": 200401, + "type_name": "Detection Finding: Create", + "category_uid": 2, + "class_uid": 2004 + }, + { + "message": "Pod cert-manager does not use HostPorts. PASS", + "metadata": { + "event_code": "core_minimize_admission_hostport_containers", + "product": { + "name": "Prowler", + "uid": "prowler", + "vendor_name": "Prowler", + "version": "5.16.1" + }, + "profiles": [ + "container", + "datetime" + ], + "version": "1.5.0" + }, + "severity_id": 4, + "severity": "High", + "status": "New", + "status_code": "PASS", + "status_detail": "Pod cert-manager does not use HostPorts.", + "status_id": 1, + "unmapped": { + "related_url": "https://kubernetes.io/docs/concepts/security/pod-security-standards/", + "categories": [ + "internet-exposed" + ], + "depends_on": [], + "related_to": [], + "additional_urls": [], + "notes": "Carefully evaluate the need for HostPorts in container configurations and prefer network policies for secure communication.", + "compliance": { + "PCI-4.0": [ + "1.2.5.17", + "1.2.8.13", + "1.2.8.16", + "1.2.8.20", + "1.2.8.28", + "1.2.8.30", + "1.2.8.41", + "1.3.1.8", + "1.3.1.29", + "1.3.1.34", + "1.3.2.18", + "1.3.2.28", + "1.3.2.45", + "1.4.2.26", + "1.4.2.43", + "1.4.4.7", + "1.5.1.16", + "1.5.1.32", + "1.5.1.40", + "10.3.2.18", + "10.3.2.19", + "11.5.1.1.1", + "2.2.5.17", + "3.5.1.3.6", + "3.5.1.3.14", + "3.5.1.3.20", + "3.5.1.3.23", + "A1.1.3.26", + "A1.1.3.40", + "A3.4.1.8", + "A3.4.1.18" + ], + "CIS-1.11.1": [ + "5.2.13" + ], + "ProwlerThreatScore-1.0": [ + "2.1.2" + ], + "CIS-1.10": [ + "5.2.13" + ], + "CIS-1.8": [ + "5.2.13" + ] + } + }, + "activity_name": "Create", + "activity_id": 1, + "finding_info": { + "created_time": 1768388905, + "created_time_dt": "2026-01-14T11:08:25.883259", + "desc": "This check ensures that Kubernetes clusters are configured to minimize the admission of containers that require the use of HostPorts. This helps maintain network policy controls and reduce security risks.", + "title": "Minimize the admission of containers which use HostPorts", + "types": [], + "uid": "prowler-kubernetes-core_minimize_admission_hostport_containers-cluster_node-namespace: cert-manager-cert-manager" + }, + "resources": [ + { + "data": { + "details": "", + "metadata": { + "name": "cert-manager", + "uid": "aa8f6baf-1b52-4023-adca-4bef65a59e51", + "namespace": "cert-manager", + "labels": { + "app": "cert-manager", + "app.kubernetes.io/component": "controller", + "app.kubernetes.io/instance": "cert-manager", + "app.kubernetes.io/managed-by": "Helm", + "app.kubernetes.io/name": "cert-manager", + "app.kubernetes.io/version": "v1.7.0", + "helm.sh/chart": "cert-manager-v1.7.0", + "pod-template-hash": "67644fb9d8" + }, + "annotations": { + "prometheus.io/path": "/metrics", + "prometheus.io/port": "9402", + "prometheus.io/scrape": "true" + }, + "node_name": "cluster_node-jlze6bf4fi", + "service_account": "cert-manager", + "status_phase": "Running", + "pod_ip": "242.59.13.70", + "host_ip": "109.0.85.203", + "host_pid": null, + "host_ipc": null, + "host_network": null, + "security_context": { + "app_armor_profile": null, + "fs_group": null, + "fs_group_change_policy": null, + "run_as_group": null, + "run_as_non_root": true, + "run_as_user": null, + "se_linux_change_policy": null, + "se_linux_options": null, + "seccomp_profile": null, + "supplemental_groups": null, + "supplemental_groups_policy": null, + "sysctls": null, + "windows_options": null + }, + "containers": { + "cert-manager": { + "name": "cert-manager", + "image": "quay.io/jetstack/cert-manager-controller@sha256:d6d12274f4b9c9c9cae2bcdc837744006d5f301c1dfa3e50f4a67d08f3bf9589", + "command": null, + "ports": [ + { + "containerPort": 1234 + } + ], + "env": [ + { + "name": "POD_NAMESPACE", + "value": null + } + ], + "security_context": {} + } + } + } + }, + "group": { + "name": "core" + }, + "labels": [], + "name": "cert-manager", + "namespace": "cert-manager-namespace", + "type": "KubernetesPod", + "uid": "aa8f6baf-1b52-4023-adca-4bef65a59e51" + } + ], + "category_name": "Findings", + "class_name": "Detection Finding", + "remediation": { + "desc": "Limit the use of HostPorts in Kubernetes containers to maintain network security.", + "references": [ + "https://kubernetes.io/docs/concepts/security/pod-security-standards/" + ] + }, + "risk_details": "Permitting containers with HostPorts can bypass network policy controls, increasing the risk of unauthorized network access.", + "time": 1768388905, + "time_dt": "2026-01-14T11:08:25.883259", + "type_uid": 200401, + "type_name": "Detection Finding: Create", + "category_uid": 2, + "class_uid": 2004 + } +] \ No newline at end of file diff --git a/backend/unittests/import_observations/parsers/ocsf/test_parser.py b/backend/unittests/import_observations/parsers/ocsf/test_parser.py index 7ada14c4e..ab35b48d1 100644 --- a/backend/unittests/import_observations/parsers/ocsf/test_parser.py +++ b/backend/unittests/import_observations/parsers/ocsf/test_parser.py @@ -1,6 +1,7 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.ocsf.parser import OCSFParser from application.import_observations.services.parser_detector import detect_parser @@ -11,20 +12,22 @@ def test_other_finding(self): with open(path.dirname(__file__) + "/files/other_finding.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("OCSF (Open Cybersecurity Schema Framework)", parser.name) - self.assertTrue(isinstance(parser_instance, OCSFParser)) + self.assertIsInstance(parser_instance, OCSFParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("OCSF (Open Cybersecurity Schema Framework)", scanner) self.assertEqual(0, len(observations)) def test_prowler_multiple_findings(self): - with open( - path.dirname(__file__) + "/files/prowler_multiple_findings.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/prowler_multiple_findings.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("OCSF (Open Cybersecurity Schema Framework)", parser.name) - self.assertTrue(isinstance(parser_instance, OCSFParser)) + self.assertIsInstance(parser_instance, OCSFParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("Prowler / 4.5.0", scanner) self.assertEqual(2, len(observations)) observation = observations[0] @@ -69,7 +72,7 @@ def test_prowler_multiple_findings(self): ) self.assertEqual("OCSF Finding", observation.unsaved_evidences[0][0]) self.assertIn( - '"uid": "prowler-azure-aks_clusters_created_with_private_nodes-6c63340e-8a77-447a-9a9f-6c8277e6bc83-westeurope-aks-dev"', + '"uid":"prowler-azure-aks_clusters_created_with_private_nodes-6c63340e-8a77-447a-9a9f-6c8277e6bc83-westeurope-aks-dev"', observation.unsaved_evidences[0][1], ) @@ -107,6 +110,48 @@ def test_prowler_multiple_findings(self): self.assertEqual(2, len(observation.unsaved_references)) self.assertEqual("OCSF Finding", observation.unsaved_evidences[0][0]) self.assertIn( - '"uid": "prowler-azure-aks_clusters_public_access_disabled-6c63340e-8a77-447a-9a9f-6c8277e6bc83-westeurope-aks-prod"', + '"uid":"prowler-azure-aks_clusters_public_access_disabled-6c63340e-8a77-447a-9a9f-6c8277e6bc83-westeurope-aks-prod"', observation.unsaved_evidences[0][1], ) + + def test_prowler_kubernetes(self): + with open(path.dirname(__file__) + "/files/prowler_kubernetes.ocsf.json") as testfile: + + self.maxDiff = None + + parser, parser_instance, data = detect_parser(testfile) + self.assertEqual("OCSF (Open Cybersecurity Schema Framework)", parser.name) + self.assertIsInstance(parser_instance, OCSFParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("Prowler / 5.16.1", scanner) + self.assertEqual(1, len(observations)) + + observation = observations[0] + self.assertEqual("Prowler / 5.16.1", observation.scanner) + self.assertEqual( + "Minimize the admission of containers which use HostPorts", + observation.title, + ) + description = """This check ensures that Kubernetes clusters are configured to minimize the admission of containers that require the use of HostPorts. This helps maintain network policy controls and reduce security risks. + +**Status detail:** Pod cert-manager does not use HostPorts. + +**Risk details:** Permitting containers with HostPorts can bypass network policy controls, increasing the risk of unauthorized network access. + +**Notes:** Carefully evaluate the need for HostPorts in container configurations and prefer network policies for secure communication.""" + self.assertEqual(description, observation.description) + self.assertEqual( + "Limit the use of HostPorts in Kubernetes containers to maintain network security.", + observation.recommendation, + ) + self.assertEqual(Severity.SEVERITY_HIGH, observation.parser_severity) + self.assertEqual("", observation.origin_cloud_provider) + self.assertEqual("", observation.origin_cloud_account_subscription_project) + self.assertEqual("", observation.origin_cloud_resource) + self.assertEqual("", observation.origin_cloud_resource_type) + self.assertEqual("", observation.origin_kubernetes_cluster) + self.assertEqual("cert-manager-namespace", observation.origin_kubernetes_namespace) + self.assertEqual("KubernetesPod", observation.origin_kubernetes_resource_type) + self.assertEqual("cert-manager", observation.origin_kubernetes_resource_name) diff --git a/backend/unittests/import_observations/parsers/osv/__init__.py b/backend/unittests/import_observations/parsers/osv/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_java_python.json b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_java_python.json new file mode 100644 index 000000000..e50face91 --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_java_python.json @@ -0,0 +1,29 @@ +[ + { + "model": "import_observations.osv_cache", + "pk": 1, + "fields": { + "osv_id": "GHSA-4jq9-2xhw-jpx7", + "data": "{\"id\":\"GHSA-4jq9-2xhw-jpx7\",\"summary\":\"Java: DoS Vulnerability in JSON-JAVA\",\"details\":\"### Summary\\nA denial of service vulnerability in JSON-Java was discovered by [ClusterFuzz](https://google.github.io/clusterfuzz/). A bug in the parser means that an input string of modest size can lead to indefinite amounts of memory being used. There are two issues: (1) the parser bug can be used to circumvent a check that is supposed to prevent the key in a JSON object from itself being another JSON object; (2) if a key does end up being a JSON object then it gets converted into a string, using `\\\\` to escape special characters, including `\\\\` itself. So by nesting JSON objects, with a key that is a JSON object that has a key that is a JSON object, and so on, we can get an exponential number of `\\\\` characters in the escaped string.\\n\\n### Severity\\nHigh - Because this is an already-fixed DoS vulnerability, the only remaining impact possible is for existing binaries that have not been updated yet.\\n\\n### Proof of Concept\\n```java\\npackage orgjsonbug;\\n\\nimport org.json.JSONObject;\\n\\n/**\\n * Illustrates a bug in JSON-Java.\\n */\\npublic class Bug {\\n private static String makeNested(int depth) {\\n if (depth == 0) {\\n return \\\"{\\\\\\\"a\\\\\\\":1}\\\";\\n }\\n return \\\"{\\\\\\\"a\\\\\\\":1;\\\\t\\\\0\\\" + makeNested(depth - 1) + \\\":1}\\\";\\n }\\n\\n public static void main(String[] args) {\\n String input = makeNested(30);\\n System.out.printf(\\\"Input string has length %d: %s\\\\n\\\", input.length(), input);\\n JSONObject output = new JSONObject(input);\\n System.out.printf(\\\"Output JSONObject has length %d: %s\\\\n\\\", output.toString().length(), output);\\n }\\n}\\n```\\nWhen run, this reports that the input string has length 367. Then, after a long pause, the program crashes inside new JSONObject with OutOfMemoryError.\\n\\n### Further Analysis\\nThe issue is fixed by [this PR](https://github.com/stleary/JSON-java/pull/759).\\n\\n### Timeline\\n**Date reported**: 07/14/2023\\n**Date fixed**: \\n**Date disclosed**: 10/12/2023\",\"aliases\":[\"CVE-2023-5072\"],\"modified\":\"2024-11-28T05:36:39.453647Z\",\"published\":\"2023-11-14T22:24:08Z\",\"related\":[\"CGA-7g9h-xgv7-r8j3\"],\"database_specific\":{\"github_reviewed_at\":\"2023-11-14T22:24:08Z\",\"github_reviewed\":true,\"severity\":\"HIGH\",\"cwe_ids\":[\"CWE-358\"],\"nvd_published_at\":null},\"references\":[{\"type\":\"WEB\",\"url\":\"https://github.com/google/security-research/security/advisories/GHSA-4jq9-2xhw-jpx7\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2023-5072\"},{\"type\":\"WEB\",\"url\":\"https://github.com/stleary/JSON-java/issues/758\"},{\"type\":\"WEB\",\"url\":\"https://github.com/stleary/JSON-java/issues/771\"},{\"type\":\"WEB\",\"url\":\"https://github.com/stleary/JSON-java/pull/759\"},{\"type\":\"WEB\",\"url\":\"https://github.com/stleary/JSON-java/commit/60662e2f8384d3449822a3a1179bfe8de67b55bb\"},{\"type\":\"PACKAGE\",\"url\":\"https://github.com/stleary/JSON-java\"}],\"affected\":[{\"package\":{\"name\":\"org.json:json\",\"ecosystem\":\"Maven\",\"purl\":\"pkg:maven/org.json/json\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"20231013\"}]}],\"versions\":[\"20070829\",\"20080701\",\"20090211\",\"20131018\",\"20140107\",\"20141113\",\"20150729\",\"20151123\",\"20160212\",\"20160807\",\"20160810\",\"20170516\",\"20171018\",\"20180130\",\"20180813\",\"20190722\",\"20200518\",\"20201115\",\"20210307\",\"20211205\",\"20220320\",\"20220924\",\"20230227\",\"20230618\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2023/11/GHSA-4jq9-2xhw-jpx7/GHSA-4jq9-2xhw-jpx7.json\",\"last_known_affected_version_range\":\"\\u003c= 20230618\"}}],\"schema_version\":\"1.6.0\"}", + "modified": "2025-10-30T19:23:43.662Z" + } + }, + { + "model": "import_observations.osv_cache", + "pk": 2, + "fields": { + "osv_id": "GHSA-3vqj-43w4-2q58", + "data": "{\"id\":\"GHSA-3vqj-43w4-2q58\",\"summary\":\"json stack overflow vulnerability\",\"details\":\"A stack overflow in the XML.toJSONObject component of hutool-json v5.8.10 and org.json:json before version 20230227 allows attackers to cause a Denial of Service (DoS) via crafted JSON or XML data.\",\"aliases\":[\"CVE-2022-45688\"],\"modified\":\"2024-04-15T20:32:09.965200Z\",\"published\":\"2022-12-13T15:30:26Z\",\"database_specific\":{\"github_reviewed_at\":\"2022-12-13T19:25:03Z\",\"github_reviewed\":true,\"severity\":\"HIGH\",\"cwe_ids\":[\"CWE-787\"],\"nvd_published_at\":\"2022-12-13T15:15:00Z\"},\"references\":[{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2022-45688\"},{\"type\":\"WEB\",\"url\":\"https://github.com/dromara/hutool/issues/2748\"},{\"type\":\"WEB\",\"url\":\"https://github.com/stleary/JSON-java/issues/708\"},{\"type\":\"WEB\",\"url\":\"https://github.com/dromara/hutool/commit/6a2b585de0a380e8c12016dbaa1620b69be11b8c\"},{\"type\":\"WEB\",\"url\":\"https://github.com/stleary/JSON-java/commit/a6e412bded7a0ad605adfeca029318f184c32102\"},{\"type\":\"WEB\",\"url\":\"https://github.com/dromara/hutool/releases/tag/5.8.25\"}],\"affected\":[{\"package\":{\"name\":\"cn.hutool:hutool-json\",\"ecosystem\":\"Maven\",\"purl\":\"pkg:maven/cn.hutool/hutool-json\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"5.8.25\"}]}],\"versions\":[\"4.0.0\",\"4.0.1\",\"4.0.10\",\"4.0.11\",\"4.0.12\",\"4.0.2\",\"4.0.3\",\"4.0.4\",\"4.0.5\",\"4.0.6\",\"4.0.7\",\"4.0.8\",\"4.0.9\",\"4.1.0\",\"4.1.1\",\"4.1.10\",\"4.1.11\",\"4.1.12\",\"4.1.13\",\"4.1.14\",\"4.1.15\",\"4.1.16\",\"4.1.17\",\"4.1.18\",\"4.1.19\",\"4.1.2\",\"4.1.20\",\"4.1.21\",\"4.1.3\",\"4.1.4\",\"4.1.5\",\"4.1.6\",\"4.1.7\",\"4.1.8\",\"4.1.9\",\"4.2.1\",\"4.3.0\",\"4.3.1\",\"4.3.2\",\"4.4.0\",\"4.4.1\",\"4.4.2\",\"4.4.3\",\"4.4.4\",\"4.4.5\",\"4.5.0\",\"4.5.1\",\"4.5.10\",\"4.5.11\",\"4.5.12\",\"4.5.13\",\"4.5.14\",\"4.5.15\",\"4.5.16\",\"4.5.17\",\"4.5.18\",\"4.5.2\",\"4.5.3\",\"4.5.4\",\"4.5.5\",\"4.5.6\",\"4.5.7\",\"4.5.8\",\"4.5.9\",\"4.6.0\",\"4.6.1\",\"4.6.10\",\"4.6.11\",\"4.6.12\",\"4.6.13\",\"4.6.14\",\"4.6.15\",\"4.6.16\",\"4.6.17\",\"4.6.2\",\"4.6.3\",\"4.6.4\",\"4.6.5\",\"4.6.6\",\"4.6.7\",\"4.6.8\",\"5.0.0\",\"5.0.1\",\"5.0.2\",\"5.0.3\",\"5.0.4\",\"5.0.5\",\"5.0.6\",\"5.0.7\",\"5.1.0\",\"5.1.1\",\"5.1.2\",\"5.1.3\",\"5.1.4\",\"5.1.5\",\"5.2.0\",\"5.2.1\",\"5.2.2\",\"5.2.3\",\"5.2.4\",\"5.2.5\",\"5.3.0\",\"5.3.1\",\"5.3.10\",\"5.3.2\",\"5.3.3\",\"5.3.4\",\"5.3.5\",\"5.3.6\",\"5.3.7\",\"5.3.8\",\"5.3.9\",\"5.4.0\",\"5.4.1\",\"5.4.2\",\"5.4.3\",\"5.4.4\",\"5.4.5\",\"5.4.6\",\"5.4.7\",\"5.5.0\",\"5.5.1\",\"5.5.2\",\"5.5.3\",\"5.5.4\",\"5.5.5\",\"5.5.6\",\"5.5.7\",\"5.5.8\",\"5.5.9\",\"5.6.0\",\"5.6.1\",\"5.6.2\",\"5.6.3\",\"5.6.4\",\"5.6.5\",\"5.6.6\",\"5.6.7\",\"5.7.0\",\"5.7.1\",\"5.7.10\",\"5.7.11\",\"5.7.12\",\"5.7.13\",\"5.7.14\",\"5.7.15\",\"5.7.16\",\"5.7.17\",\"5.7.18\",\"5.7.19\",\"5.7.2\",\"5.7.20\",\"5.7.21\",\"5.7.22\",\"5.7.3\",\"5.7.4\",\"5.7.5\",\"5.7.6\",\"5.7.7\",\"5.7.8\",\"5.7.9\",\"5.8.0\",\"5.8.0.M1\",\"5.8.0.M2\",\"5.8.0.M3\",\"5.8.0.M4\",\"5.8.1\",\"5.8.10\",\"5.8.11\",\"5.8.12\",\"5.8.13\",\"5.8.14\",\"5.8.15\",\"5.8.16\",\"5.8.17\",\"5.8.18\",\"5.8.19\",\"5.8.2\",\"5.8.20\",\"5.8.21\",\"5.8.22\",\"5.8.23\",\"5.8.24\",\"5.8.3\",\"5.8.4\",\"5.8.4.M1\",\"5.8.5\",\"5.8.6\",\"5.8.7\",\"5.8.8\",\"5.8.9\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/12/GHSA-3vqj-43w4-2q58/GHSA-3vqj-43w4-2q58.json\"}},{\"package\":{\"name\":\"org.json:json\",\"ecosystem\":\"Maven\",\"purl\":\"pkg:maven/org.json/json\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"20230227\"}]}],\"versions\":[\"20070829\",\"20080701\",\"20090211\",\"20131018\",\"20140107\",\"20141113\",\"20150729\",\"20151123\",\"20160212\",\"20160807\",\"20160810\",\"20170516\",\"20171018\",\"20180130\",\"20180813\",\"20190722\",\"20200518\",\"20201115\",\"20210307\",\"20211205\",\"20220320\",\"20220924\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/12/GHSA-3vqj-43w4-2q58/GHSA-3vqj-43w4-2q58.json\"}}],\"schema_version\":\"1.6.0\",\"severity\":[{\"type\":\"CVSS_V3\",\"score\":\"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H\"}]}", + "modified": "2025-08-07T20:01:58.452Z" + } + }, + { + "model": "import_observations.osv_cache", + "pk": 3, + "fields": { + "osv_id": "GHSA-m9g8-fxxm-xg86", + "data": "{\"id\":\"GHSA-m9g8-fxxm-xg86\",\"summary\":\"Django SQL injection in HasKey(lhs, rhs) on Oracle\",\"details\":\"An issue was discovered in Django 5.1 before 5.1.4, 5.0 before 5.0.10, and 4.2 before 4.2.17. Direct usage of the django.db.models.fields.json.HasKey lookup, when an Oracle database is used, is subject to SQL injection if untrusted data is used as an lhs value. (Applications that use the jsonfield.has_key lookup via __ are unaffected.)\",\"aliases\":[\"CVE-2024-53908\",\"PYSEC-2024-157\"],\"modified\":\"2025-01-14T16:52:26.125663Z\",\"published\":\"2024-12-06T12:30:47Z\",\"related\":[\"CGA-x7qq-7cr6-xfq4\"],\"database_specific\":{\"github_reviewed_at\":\"2024-12-06T18:25:23Z\",\"github_reviewed\":true,\"severity\":\"HIGH\",\"cwe_ids\":[\"CWE-89\"],\"nvd_published_at\":\"2024-12-06T12:15:18Z\"},\"references\":[{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2024-53908\"},{\"type\":\"WEB\",\"url\":\"https://docs.djangoproject.com/en/dev/releases/security\"},{\"type\":\"PACKAGE\",\"url\":\"https://github.com/django/django\"},{\"type\":\"WEB\",\"url\":\"https://github.com/pypa/advisory-database/tree/main/vulns/django/PYSEC-2024-157.yaml\"},{\"type\":\"WEB\",\"url\":\"https://groups.google.com/g/django-announce\"},{\"type\":\"WEB\",\"url\":\"https://www.djangoproject.com/weblog/2024/dec/04/security-releases\"},{\"type\":\"WEB\",\"url\":\"https://www.openwall.com/lists/oss-security/2024/12/04/3\"}],\"affected\":[{\"package\":{\"name\":\"django\",\"ecosystem\":\"PyPI\",\"purl\":\"pkg:pypi/django\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"5.0.0\"},{\"fixed\":\"5.0.10\"}]}],\"versions\":[\"5.0\",\"5.0.1\",\"5.0.2\",\"5.0.3\",\"5.0.4\",\"5.0.5\",\"5.0.6\",\"5.0.7\",\"5.0.8\",\"5.0.9\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2024/12/GHSA-m9g8-fxxm-xg86/GHSA-m9g8-fxxm-xg86.json\"}},{\"package\":{\"name\":\"django\",\"ecosystem\":\"PyPI\",\"purl\":\"pkg:pypi/django\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"5.1.0\"},{\"fixed\":\"5.1.4\"}]}],\"versions\":[\"5.1\",\"5.1.1\",\"5.1.2\",\"5.1.3\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2024/12/GHSA-m9g8-fxxm-xg86/GHSA-m9g8-fxxm-xg86.json\"}},{\"package\":{\"name\":\"django\",\"ecosystem\":\"PyPI\",\"purl\":\"pkg:pypi/django\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"4.2.0\"},{\"fixed\":\"4.2.17\"}]}],\"versions\":[\"4.2\",\"4.2.1\",\"4.2.10\",\"4.2.11\",\"4.2.12\",\"4.2.13\",\"4.2.14\",\"4.2.15\",\"4.2.16\",\"4.2.2\",\"4.2.3\",\"4.2.4\",\"4.2.5\",\"4.2.6\",\"4.2.7\",\"4.2.8\",\"4.2.9\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2024/12/GHSA-m9g8-fxxm-xg86/GHSA-m9g8-fxxm-xg86.json\"}},{\"package\":{\"name\":\"django\",\"ecosystem\":\"PyPI\",\"purl\":\"pkg:pypi/django\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"5.1\"},{\"fixed\":\"5.1.4\"}]}],\"versions\":[\"5.1\",\"5.1.1\",\"5.1.2\",\"5.1.3\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2024/12/GHSA-m9g8-fxxm-xg86/GHSA-m9g8-fxxm-xg86.json\"}},{\"package\":{\"name\":\"django\",\"ecosystem\":\"PyPI\",\"purl\":\"pkg:pypi/django\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"5.0\"},{\"fixed\":\"5.0.10\"}]}],\"versions\":[\"5.0\",\"5.0.1\",\"5.0.2\",\"5.0.3\",\"5.0.4\",\"5.0.5\",\"5.0.6\",\"5.0.7\",\"5.0.8\",\"5.0.9\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2024/12/GHSA-m9g8-fxxm-xg86/GHSA-m9g8-fxxm-xg86.json\"}},{\"package\":{\"name\":\"django\",\"ecosystem\":\"PyPI\",\"purl\":\"pkg:pypi/django\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"4.2\"},{\"fixed\":\"4.2.17\"}]}],\"versions\":[\"4.2\",\"4.2.1\",\"4.2.10\",\"4.2.11\",\"4.2.12\",\"4.2.13\",\"4.2.14\",\"4.2.15\",\"4.2.16\",\"4.2.2\",\"4.2.3\",\"4.2.4\",\"4.2.5\",\"4.2.6\",\"4.2.7\",\"4.2.8\",\"4.2.9\"],\"database_specific\":{\"source\":\"https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2024/12/GHSA-m9g8-fxxm-xg86/GHSA-m9g8-fxxm-xg86.json\"}}],\"schema_version\":\"1.6.0\",\"severity\":[{\"type\":\"CVSS_V3\",\"score\":\"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H\"},{\"type\":\"CVSS_V4\",\"score\":\"CVSS:4.0/AV:N/AC:L/AT:P/PR:N/UI:N/VC:H/VI:H/VA:H/SC:N/SI:N/SA:N/E:U\"}]}", + "modified": "2025-12-20T20:37:27Z" + } + } +] diff --git a/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_linux.json b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_linux.json new file mode 100644 index 000000000..1830d0c08 --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_linux.json @@ -0,0 +1,20 @@ +[ + { + "model": "import_observations.osv_cache", + "pk": 1, + "fields": { + "osv_id": "CVE-2024-32002", + "data": "{\"id\":\"CVE-2024-32002\",\"details\":\"Git is a revision control system. Prior to versions 2.45.1, 2.44.1, 2.43.4, 2.42.2, 2.41.1, 2.40.2, and 2.39.4, repositories with submodules can be crafted in a way that exploits a bug in Git whereby it can be fooled into writing files not into the submodule's worktree but into a `.git/` directory. This allows writing a hook that will be executed while the clone operation is still running, giving the user no opportunity to inspect the code that is being executed. The problem has been patched in versions 2.45.1, 2.44.1, 2.43.4, 2.42.2, 2.41.1, 2.40.2, and 2.39.4. If symbolic link support is disabled in Git (e.g. via `git config --global core.symlinks false`), the described attack won't work. As always, it is best to avoid cloning repositories from untrusted sources.\",\"aliases\":[\"BIT-git-2024-32002\"],\"modified\":\"2025-01-15T05:27:34.311573Z\",\"published\":\"2024-05-14T19:15:10Z\",\"related\":[\"ALSA-2024:4083\",\"ALSA-2024:4084\",\"CGA-5rcg-3vrp-xhg6\",\"DLA-3844-1\",\"DLA-3867-1\",\"DSA-5769-1\",\"GHSA-8h77-4q3w-gfgv\",\"RHSA-2024:4083\",\"RHSA-2024:4084\",\"RHSA-2024:4368\",\"RHSA-2024:4579\",\"RHSA-2024:6027\",\"RHSA-2024:6028\",\"RHSA-2024:6610\",\"RLSA-2024:4083\",\"RLSA-2024:4084\",\"SUSE-SU-2024:1807-1\",\"SUSE-SU-2024:1807-2\",\"SUSE-SU-2024:1854-1\",\"SUSE-SU-2024:2277-1\",\"SUSE-SU-2025:0197-1\",\"UBUNTU-CVE-2024-32002\",\"USN-6793-1\",\"USN-6793-2\",\"USN-7023-1\",\"openSUSE-SU-2024:13968-1\"],\"references\":[{\"type\":\"ADVISORY\",\"url\":\"https://github.com/git/git/security/advisories/GHSA-8h77-4q3w-gfgv\"},{\"type\":\"FIX\",\"url\":\"https://github.com/git/git/commit/97065761333fd62db1912d81b489db938d8c991d\"},{\"type\":\"WEB\",\"url\":\"http://www.openwall.com/lists/oss-security/2024/05/14/2\"},{\"type\":\"WEB\",\"url\":\"https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---recurse-submodulesltpathspecgt\"},{\"type\":\"WEB\",\"url\":\"https://git-scm.com/docs/git-config#Documentation/git-config.txt-coresymlinks\"},{\"type\":\"WEB\",\"url\":\"https://lists.debian.org/debian-lts-announce/2024/06/msg00018.html\"},{\"type\":\"WEB\",\"url\":\"https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/S4CK4IYTXEOBZTEM5K3T6LWOIZ3S44AR/\"},{\"type\":\"ADVISORY\",\"url\":\"https://security.alpinelinux.org/vuln/CVE-2024-32002\"},{\"type\":\"ADVISORY\",\"url\":\"https://security-tracker.debian.org/tracker/CVE-2024-32002\"}],\"affected\":[{\"package\":{\"name\":\"git\",\"ecosystem\":\"Alpine:v3.17\",\"purl\":\"pkg:apk/alpine/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2.39.5-r0\"}]}],\"versions\":[\"1.6.0.4-r1\",\"1.6.0.4-r2\",\"1.6.1-r0\",\"1.6.1.3-r0\",\"1.6.1.3-r1\",\"1.6.2.1-r0\",\"1.6.2.3-r0\",\"1.6.2.4-r0\",\"1.6.2.5-r0\",\"1.6.3.2-r0\",\"1.6.3.3-r0\",\"1.6.4-r0\",\"1.6.4.1-r0\",\"1.6.4.2-r0\",\"1.6.4.2-r1\",\"1.6.4.2-r2\",\"1.6.4.4-r0\",\"1.6.5-r0\",\"1.6.5.2-r0\",\"1.6.5.3-r0\",\"1.6.5.5-r0\",\"1.6.5.6-r0\",\"1.6.5.7-r0\",\"1.6.6-r0\",\"1.6.6.1-r0\",\"1.7.0.2-r0\",\"1.7.0.3-r0\",\"1.7.0.4-r0\",\"1.7.0.5-r0\",\"1.7.0.5-r1\",\"1.7.1-r0\",\"1.7.1-r1\",\"1.7.1-r2\",\"1.7.1.1-r0\",\"1.7.10-r0\",\"1.7.10.1-r0\",\"1.7.10.2-r0\",\"1.7.10.2-r1\",\"1.7.10.3-r0\",\"1.7.10.4-r0\",\"1.7.11-r0\",\"1.7.11.1-r0\",\"1.7.11.1-r1\",\"1.7.11.2-r0\",\"1.7.11.3-r0\",\"1.7.11.4-r0\",\"1.7.11.5-r0\",\"1.7.12-r0\",\"1.7.12.1-r0\",\"1.7.12.2-r0\",\"1.7.12.3-r0\",\"1.7.12.4-r0\",\"1.7.2-r0\",\"1.7.2.1-r0\",\"1.7.2.2-r0\",\"1.7.2.3-r0\",\"1.7.3-r0\",\"1.7.3.1-r0\",\"1.7.3.2-r0\",\"1.7.3.2-r1\",\"1.7.3.3-r0\",\"1.7.3.4-r0\",\"1.7.3.5-r0\",\"1.7.3.5-r1\",\"1.7.4-r0\",\"1.7.4-r1\",\"1.7.4.1-r0\",\"1.7.4.2-r0\",\"1.7.4.4-r0\",\"1.7.4.5-r0\",\"1.7.5.1-r0\",\"1.7.5.1-r1\",\"1.7.5.2-r0\",\"1.7.5.3-r0\",\"1.7.5.4-r0\",\"1.7.5.4-r1\",\"1.7.6-r0\",\"1.7.6.1-r0\",\"1.7.7-r0\",\"1.7.7.1-r0\",\"1.7.7.2-r0\",\"1.7.7.3-r0\",\"1.7.7.4-r0\",\"1.7.8-r0\",\"1.7.8.1-r0\",\"1.7.8.2-r0\",\"1.7.8.3-r0\",\"1.7.8.4-r0\",\"1.7.8.4-r1\",\"1.7.9-r0\",\"1.7.9.1-r0\",\"1.7.9.2-r0\",\"1.7.9.3-r0\",\"1.7.9.4-r0\",\"1.7.9.5-r0\",\"1.7.9.6-r0\",\"1.8.0-r0\",\"1.8.0.1-r0\",\"1.8.0.2-r0\",\"1.8.0.3-r0\",\"1.8.1-r0\",\"1.8.1.1-r0\",\"1.8.1.2-r0\",\"1.8.1.3-r0\",\"1.8.1.4-r0\",\"1.8.1.5-r0\",\"1.8.2-r0\",\"1.8.2.1-r0\",\"1.8.2.2-r0\",\"1.8.2.3-r0\",\"1.8.2.3-r1\",\"1.8.2.3-r2\",\"1.8.3-r0\",\"1.8.3.1-r0\",\"1.8.3.2-r0\",\"1.8.3.3-r0\",\"1.8.3.4-r0\",\"1.8.4-r0\",\"1.8.4.1-r0\",\"1.8.4.2-r0\",\"1.8.4.3-r0\",\"1.8.4.3-r1\",\"1.8.4.3-r2\",\"1.8.5.1-r0\",\"1.8.5.1-r1\",\"1.8.5.1-r2\",\"1.8.5.1-r3\",\"1.8.5.1-r4\",\"1.8.5.2-r0\",\"1.8.5.3-r0\",\"1.8.5.3-r1\",\"1.8.5.4-r0\",\"1.9.0-r0\",\"1.9.1-r0\",\"1.9.2-r0\",\"1.9.2-r1\",\"1.9.2-r2\",\"1.9.3-r0\",\"2.0.0-r0\",\"2.0.1-r0\",\"2.0.2-r0\",\"2.0.3-r0\",\"2.0.4-r0\",\"2.1.0-r0\",\"2.1.1-r0\",\"2.1.2-r0\",\"2.1.3-r0\",\"2.1.3-r1\",\"2.10.0-r0\",\"2.10.1-r0\",\"2.10.2-r0\",\"2.11.0-r0\",\"2.11.1-r0\",\"2.12.1-r0\",\"2.12.2-r0\",\"2.12.2-r1\",\"2.13.0-r0\",\"2.13.1-r0\",\"2.13.2-r0\",\"2.13.2-r1\",\"2.13.3-r0\",\"2.13.3-r1\",\"2.13.4-r0\",\"2.14.0-r0\",\"2.14.0-r1\",\"2.14.1-r0\",\"2.14.1-r1\",\"2.14.2-r0\",\"2.14.3-r0\",\"2.15.0-r0\",\"2.15.0-r1\",\"2.15.0-r2\",\"2.15.1-r0\",\"2.16.0-r0\",\"2.16.1-r0\",\"2.16.2-r0\",\"2.16.3-r0\",\"2.16.3-r1\",\"2.17.0-r0\",\"2.17.1-r0\",\"2.18.0-r0\",\"2.19.0-r0\",\"2.19.1-r0\",\"2.19.1-r1\",\"2.19.2-r0\",\"2.2.0-r0\",\"2.2.1-r0\",\"2.2.2-r0\",\"2.20.1-r0\",\"2.21.0-r0\",\"2.21.0-r1\",\"2.21.0-r2\",\"2.21.0-r3\",\"2.21.0-r4\",\"2.22.0-r0\",\"2.22.0-r1\",\"2.22.0-r2\",\"2.22.1-r0\",\"2.23.0-r0\",\"2.23.0-r1\",\"2.23.0-r2\",\"2.24.0-r0\",\"2.24.1-r0\",\"2.25.0-r0\",\"2.25.1-r0\",\"2.25.2-r0\",\"2.26.0-r0\",\"2.26.1-r0\",\"2.26.1-r1\",\"2.26.2-r0\",\"2.27.0-r0\",\"2.27.0-r1\",\"2.28.0-r0\",\"2.28.0-r1\",\"2.29.0-r0\",\"2.29.1-r0\",\"2.29.2-r0\",\"2.3.0-r0\",\"2.3.1-r0\",\"2.3.2-r0\",\"2.3.3-r0\",\"2.3.4-r0\",\"2.3.5-r0\",\"2.3.6-r0\",\"2.3.6-r1\",\"2.3.7-r0\",\"2.30.0-r0\",\"2.30.1-r0\",\"2.30.2-r0\",\"2.31.0-r0\",\"2.31.1-r0\",\"2.31.1-r1\",\"2.32.0-r0\",\"2.32.0-r1\",\"2.32.0-r2\",\"2.33.0-r0\",\"2.33.0-r1\",\"2.33.0-r2\",\"2.33.1-r0\",\"2.34.0-r0\",\"2.34.1-r0\",\"2.34.1-r1\",\"2.35.0-r0\",\"2.35.1-r0\",\"2.35.1-r1\",\"2.35.1-r2\",\"2.35.2-r0\",\"2.36.0-r0\",\"2.36.1-r0\",\"2.37.0-r0\",\"2.37.1-r0\",\"2.37.1-r1\",\"2.37.3-r0\",\"2.37.3-r1\",\"2.38.0-r0\",\"2.38.0-r1\",\"2.38.1-r0\",\"2.38.2-r0\",\"2.38.3-r0\",\"2.38.3-r1\",\"2.38.4-r0\",\"2.38.4-r1\",\"2.38.5-r0\",\"2.4.0-r0\",\"2.4.1-r0\",\"2.4.2-r0\",\"2.4.2-r1\",\"2.4.3-r0\",\"2.4.4-r0\",\"2.4.5-r0\",\"2.4.6-r0\",\"2.4.6-r1\",\"2.5.0-r0\",\"2.5.0-r1\",\"2.5.1-r0\",\"2.5.2-r0\",\"2.5.3-r0\",\"2.6.0-r0\",\"2.6.0-r1\",\"2.6.0-r2\",\"2.6.0-r3\",\"2.6.1-r0\",\"2.6.1-r1\",\"2.6.3-r0\",\"2.6.4-r0\",\"2.7.0-r0\",\"2.7.1-r0\",\"2.7.2-r0\",\"2.7.3-r0\",\"2.7.4-r0\",\"2.8.0-r0\",\"2.8.0-r1\",\"2.8.1-r0\",\"2.8.2-r0\",\"2.8.3-r0\",\"2.8.4-r0\",\"2.9.0-r0\",\"2.9.1-r0\",\"2.9.2-r0\",\"2.9.3-r0\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Alpine:v3.18\",\"purl\":\"pkg:apk/alpine/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2.40.3-r0\"}]}],\"versions\":[\"1.6.0.4-r1\",\"1.6.0.4-r2\",\"1.6.1-r0\",\"1.6.1.3-r0\",\"1.6.1.3-r1\",\"1.6.2.1-r0\",\"1.6.2.3-r0\",\"1.6.2.4-r0\",\"1.6.2.5-r0\",\"1.6.3.2-r0\",\"1.6.3.3-r0\",\"1.6.4-r0\",\"1.6.4.1-r0\",\"1.6.4.2-r0\",\"1.6.4.2-r1\",\"1.6.4.2-r2\",\"1.6.4.4-r0\",\"1.6.5-r0\",\"1.6.5.2-r0\",\"1.6.5.3-r0\",\"1.6.5.5-r0\",\"1.6.5.6-r0\",\"1.6.5.7-r0\",\"1.6.6-r0\",\"1.6.6.1-r0\",\"1.7.0.2-r0\",\"1.7.0.3-r0\",\"1.7.0.4-r0\",\"1.7.0.5-r0\",\"1.7.0.5-r1\",\"1.7.1-r0\",\"1.7.1-r1\",\"1.7.1-r2\",\"1.7.1.1-r0\",\"1.7.10-r0\",\"1.7.10.1-r0\",\"1.7.10.2-r0\",\"1.7.10.2-r1\",\"1.7.10.3-r0\",\"1.7.10.4-r0\",\"1.7.11-r0\",\"1.7.11.1-r0\",\"1.7.11.1-r1\",\"1.7.11.2-r0\",\"1.7.11.3-r0\",\"1.7.11.4-r0\",\"1.7.11.5-r0\",\"1.7.12-r0\",\"1.7.12.1-r0\",\"1.7.12.2-r0\",\"1.7.12.3-r0\",\"1.7.12.4-r0\",\"1.7.2-r0\",\"1.7.2.1-r0\",\"1.7.2.2-r0\",\"1.7.2.3-r0\",\"1.7.3-r0\",\"1.7.3.1-r0\",\"1.7.3.2-r0\",\"1.7.3.2-r1\",\"1.7.3.3-r0\",\"1.7.3.4-r0\",\"1.7.3.5-r0\",\"1.7.3.5-r1\",\"1.7.4-r0\",\"1.7.4-r1\",\"1.7.4.1-r0\",\"1.7.4.2-r0\",\"1.7.4.4-r0\",\"1.7.4.5-r0\",\"1.7.5.1-r0\",\"1.7.5.1-r1\",\"1.7.5.2-r0\",\"1.7.5.3-r0\",\"1.7.5.4-r0\",\"1.7.5.4-r1\",\"1.7.6-r0\",\"1.7.6.1-r0\",\"1.7.7-r0\",\"1.7.7.1-r0\",\"1.7.7.2-r0\",\"1.7.7.3-r0\",\"1.7.7.4-r0\",\"1.7.8-r0\",\"1.7.8.1-r0\",\"1.7.8.2-r0\",\"1.7.8.3-r0\",\"1.7.8.4-r0\",\"1.7.8.4-r1\",\"1.7.9-r0\",\"1.7.9.1-r0\",\"1.7.9.2-r0\",\"1.7.9.3-r0\",\"1.7.9.4-r0\",\"1.7.9.5-r0\",\"1.7.9.6-r0\",\"1.8.0-r0\",\"1.8.0.1-r0\",\"1.8.0.2-r0\",\"1.8.0.3-r0\",\"1.8.1-r0\",\"1.8.1.1-r0\",\"1.8.1.2-r0\",\"1.8.1.3-r0\",\"1.8.1.4-r0\",\"1.8.1.5-r0\",\"1.8.2-r0\",\"1.8.2.1-r0\",\"1.8.2.2-r0\",\"1.8.2.3-r0\",\"1.8.2.3-r1\",\"1.8.2.3-r2\",\"1.8.3-r0\",\"1.8.3.1-r0\",\"1.8.3.2-r0\",\"1.8.3.3-r0\",\"1.8.3.4-r0\",\"1.8.4-r0\",\"1.8.4.1-r0\",\"1.8.4.2-r0\",\"1.8.4.3-r0\",\"1.8.4.3-r1\",\"1.8.4.3-r2\",\"1.8.5.1-r0\",\"1.8.5.1-r1\",\"1.8.5.1-r2\",\"1.8.5.1-r3\",\"1.8.5.1-r4\",\"1.8.5.2-r0\",\"1.8.5.3-r0\",\"1.8.5.3-r1\",\"1.8.5.4-r0\",\"1.9.0-r0\",\"1.9.1-r0\",\"1.9.2-r0\",\"1.9.2-r1\",\"1.9.2-r2\",\"1.9.3-r0\",\"2.0.0-r0\",\"2.0.1-r0\",\"2.0.2-r0\",\"2.0.3-r0\",\"2.0.4-r0\",\"2.1.0-r0\",\"2.1.1-r0\",\"2.1.2-r0\",\"2.1.3-r0\",\"2.1.3-r1\",\"2.10.0-r0\",\"2.10.1-r0\",\"2.10.2-r0\",\"2.11.0-r0\",\"2.11.1-r0\",\"2.12.1-r0\",\"2.12.2-r0\",\"2.12.2-r1\",\"2.13.0-r0\",\"2.13.1-r0\",\"2.13.2-r0\",\"2.13.2-r1\",\"2.13.3-r0\",\"2.13.3-r1\",\"2.13.4-r0\",\"2.14.0-r0\",\"2.14.0-r1\",\"2.14.1-r0\",\"2.14.1-r1\",\"2.14.2-r0\",\"2.14.3-r0\",\"2.15.0-r0\",\"2.15.0-r1\",\"2.15.0-r2\",\"2.15.1-r0\",\"2.16.0-r0\",\"2.16.1-r0\",\"2.16.2-r0\",\"2.16.3-r0\",\"2.16.3-r1\",\"2.17.0-r0\",\"2.17.1-r0\",\"2.18.0-r0\",\"2.19.0-r0\",\"2.19.1-r0\",\"2.19.1-r1\",\"2.19.2-r0\",\"2.2.0-r0\",\"2.2.1-r0\",\"2.2.2-r0\",\"2.20.1-r0\",\"2.21.0-r0\",\"2.21.0-r1\",\"2.21.0-r2\",\"2.21.0-r3\",\"2.21.0-r4\",\"2.22.0-r0\",\"2.22.0-r1\",\"2.22.0-r2\",\"2.22.1-r0\",\"2.23.0-r0\",\"2.23.0-r1\",\"2.23.0-r2\",\"2.24.0-r0\",\"2.24.1-r0\",\"2.25.0-r0\",\"2.25.1-r0\",\"2.25.2-r0\",\"2.26.0-r0\",\"2.26.1-r0\",\"2.26.1-r1\",\"2.26.2-r0\",\"2.27.0-r0\",\"2.27.0-r1\",\"2.28.0-r0\",\"2.28.0-r1\",\"2.29.0-r0\",\"2.29.1-r0\",\"2.29.2-r0\",\"2.3.0-r0\",\"2.3.1-r0\",\"2.3.2-r0\",\"2.3.3-r0\",\"2.3.4-r0\",\"2.3.5-r0\",\"2.3.6-r0\",\"2.3.6-r1\",\"2.3.7-r0\",\"2.30.0-r0\",\"2.30.1-r0\",\"2.30.2-r0\",\"2.31.0-r0\",\"2.31.1-r0\",\"2.31.1-r1\",\"2.32.0-r0\",\"2.32.0-r1\",\"2.32.0-r2\",\"2.33.0-r0\",\"2.33.0-r1\",\"2.33.0-r2\",\"2.33.1-r0\",\"2.34.0-r0\",\"2.34.1-r0\",\"2.34.1-r1\",\"2.35.0-r0\",\"2.35.1-r0\",\"2.35.1-r1\",\"2.35.1-r2\",\"2.35.2-r0\",\"2.36.0-r0\",\"2.36.1-r0\",\"2.37.0-r0\",\"2.37.1-r0\",\"2.37.1-r1\",\"2.37.3-r0\",\"2.37.3-r1\",\"2.38.0-r0\",\"2.38.0-r1\",\"2.38.1-r0\",\"2.38.2-r0\",\"2.39.0-r0\",\"2.39.1-r0\",\"2.39.1-r1\",\"2.39.1-r2\",\"2.39.1-r3\",\"2.39.1-r4\",\"2.39.2-r0\",\"2.4.0-r0\",\"2.4.1-r0\",\"2.4.2-r0\",\"2.4.2-r1\",\"2.4.3-r0\",\"2.4.4-r0\",\"2.4.5-r0\",\"2.4.6-r0\",\"2.4.6-r1\",\"2.40.0-r0\",\"2.40.0-r1\",\"2.40.1-r0\",\"2.5.0-r0\",\"2.5.0-r1\",\"2.5.1-r0\",\"2.5.2-r0\",\"2.5.3-r0\",\"2.6.0-r0\",\"2.6.0-r1\",\"2.6.0-r2\",\"2.6.0-r3\",\"2.6.1-r0\",\"2.6.1-r1\",\"2.6.3-r0\",\"2.6.4-r0\",\"2.7.0-r0\",\"2.7.1-r0\",\"2.7.2-r0\",\"2.7.3-r0\",\"2.7.4-r0\",\"2.8.0-r0\",\"2.8.0-r1\",\"2.8.1-r0\",\"2.8.2-r0\",\"2.8.3-r0\",\"2.8.4-r0\",\"2.9.0-r0\",\"2.9.1-r0\",\"2.9.2-r0\",\"2.9.3-r0\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Alpine:v3.19\",\"purl\":\"pkg:apk/alpine/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2.43.4-r0\"}]}],\"versions\":[\"1.6.0.4-r1\",\"1.6.0.4-r2\",\"1.6.1-r0\",\"1.6.1.3-r0\",\"1.6.1.3-r1\",\"1.6.2.1-r0\",\"1.6.2.3-r0\",\"1.6.2.4-r0\",\"1.6.2.5-r0\",\"1.6.3.2-r0\",\"1.6.3.3-r0\",\"1.6.4-r0\",\"1.6.4.1-r0\",\"1.6.4.2-r0\",\"1.6.4.2-r1\",\"1.6.4.2-r2\",\"1.6.4.4-r0\",\"1.6.5-r0\",\"1.6.5.2-r0\",\"1.6.5.3-r0\",\"1.6.5.5-r0\",\"1.6.5.6-r0\",\"1.6.5.7-r0\",\"1.6.6-r0\",\"1.6.6.1-r0\",\"1.7.0.2-r0\",\"1.7.0.3-r0\",\"1.7.0.4-r0\",\"1.7.0.5-r0\",\"1.7.0.5-r1\",\"1.7.1-r0\",\"1.7.1-r1\",\"1.7.1-r2\",\"1.7.1.1-r0\",\"1.7.10-r0\",\"1.7.10.1-r0\",\"1.7.10.2-r0\",\"1.7.10.2-r1\",\"1.7.10.3-r0\",\"1.7.10.4-r0\",\"1.7.11-r0\",\"1.7.11.1-r0\",\"1.7.11.1-r1\",\"1.7.11.2-r0\",\"1.7.11.3-r0\",\"1.7.11.4-r0\",\"1.7.11.5-r0\",\"1.7.12-r0\",\"1.7.12.1-r0\",\"1.7.12.2-r0\",\"1.7.12.3-r0\",\"1.7.12.4-r0\",\"1.7.2-r0\",\"1.7.2.1-r0\",\"1.7.2.2-r0\",\"1.7.2.3-r0\",\"1.7.3-r0\",\"1.7.3.1-r0\",\"1.7.3.2-r0\",\"1.7.3.2-r1\",\"1.7.3.3-r0\",\"1.7.3.4-r0\",\"1.7.3.5-r0\",\"1.7.3.5-r1\",\"1.7.4-r0\",\"1.7.4-r1\",\"1.7.4.1-r0\",\"1.7.4.2-r0\",\"1.7.4.4-r0\",\"1.7.4.5-r0\",\"1.7.5.1-r0\",\"1.7.5.1-r1\",\"1.7.5.2-r0\",\"1.7.5.3-r0\",\"1.7.5.4-r0\",\"1.7.5.4-r1\",\"1.7.6-r0\",\"1.7.6.1-r0\",\"1.7.7-r0\",\"1.7.7.1-r0\",\"1.7.7.2-r0\",\"1.7.7.3-r0\",\"1.7.7.4-r0\",\"1.7.8-r0\",\"1.7.8.1-r0\",\"1.7.8.2-r0\",\"1.7.8.3-r0\",\"1.7.8.4-r0\",\"1.7.8.4-r1\",\"1.7.9-r0\",\"1.7.9.1-r0\",\"1.7.9.2-r0\",\"1.7.9.3-r0\",\"1.7.9.4-r0\",\"1.7.9.5-r0\",\"1.7.9.6-r0\",\"1.8.0-r0\",\"1.8.0.1-r0\",\"1.8.0.2-r0\",\"1.8.0.3-r0\",\"1.8.1-r0\",\"1.8.1.1-r0\",\"1.8.1.2-r0\",\"1.8.1.3-r0\",\"1.8.1.4-r0\",\"1.8.1.5-r0\",\"1.8.2-r0\",\"1.8.2.1-r0\",\"1.8.2.2-r0\",\"1.8.2.3-r0\",\"1.8.2.3-r1\",\"1.8.2.3-r2\",\"1.8.3-r0\",\"1.8.3.1-r0\",\"1.8.3.2-r0\",\"1.8.3.3-r0\",\"1.8.3.4-r0\",\"1.8.4-r0\",\"1.8.4.1-r0\",\"1.8.4.2-r0\",\"1.8.4.3-r0\",\"1.8.4.3-r1\",\"1.8.4.3-r2\",\"1.8.5.1-r0\",\"1.8.5.1-r1\",\"1.8.5.1-r2\",\"1.8.5.1-r3\",\"1.8.5.1-r4\",\"1.8.5.2-r0\",\"1.8.5.3-r0\",\"1.8.5.3-r1\",\"1.8.5.4-r0\",\"1.9.0-r0\",\"1.9.1-r0\",\"1.9.2-r0\",\"1.9.2-r1\",\"1.9.2-r2\",\"1.9.3-r0\",\"2.0.0-r0\",\"2.0.1-r0\",\"2.0.2-r0\",\"2.0.3-r0\",\"2.0.4-r0\",\"2.1.0-r0\",\"2.1.1-r0\",\"2.1.2-r0\",\"2.1.3-r0\",\"2.1.3-r1\",\"2.10.0-r0\",\"2.10.1-r0\",\"2.10.2-r0\",\"2.11.0-r0\",\"2.11.1-r0\",\"2.12.1-r0\",\"2.12.2-r0\",\"2.12.2-r1\",\"2.13.0-r0\",\"2.13.1-r0\",\"2.13.2-r0\",\"2.13.2-r1\",\"2.13.3-r0\",\"2.13.3-r1\",\"2.13.4-r0\",\"2.14.0-r0\",\"2.14.0-r1\",\"2.14.1-r0\",\"2.14.1-r1\",\"2.14.2-r0\",\"2.14.3-r0\",\"2.15.0-r0\",\"2.15.0-r1\",\"2.15.0-r2\",\"2.15.1-r0\",\"2.16.0-r0\",\"2.16.1-r0\",\"2.16.2-r0\",\"2.16.3-r0\",\"2.16.3-r1\",\"2.17.0-r0\",\"2.17.1-r0\",\"2.18.0-r0\",\"2.19.0-r0\",\"2.19.1-r0\",\"2.19.1-r1\",\"2.19.2-r0\",\"2.2.0-r0\",\"2.2.1-r0\",\"2.2.2-r0\",\"2.20.1-r0\",\"2.21.0-r0\",\"2.21.0-r1\",\"2.21.0-r2\",\"2.21.0-r3\",\"2.21.0-r4\",\"2.22.0-r0\",\"2.22.0-r1\",\"2.22.0-r2\",\"2.22.1-r0\",\"2.23.0-r0\",\"2.23.0-r1\",\"2.23.0-r2\",\"2.24.0-r0\",\"2.24.1-r0\",\"2.25.0-r0\",\"2.25.1-r0\",\"2.25.2-r0\",\"2.26.0-r0\",\"2.26.1-r0\",\"2.26.1-r1\",\"2.26.2-r0\",\"2.27.0-r0\",\"2.27.0-r1\",\"2.28.0-r0\",\"2.28.0-r1\",\"2.29.0-r0\",\"2.29.1-r0\",\"2.29.2-r0\",\"2.3.0-r0\",\"2.3.1-r0\",\"2.3.2-r0\",\"2.3.3-r0\",\"2.3.4-r0\",\"2.3.5-r0\",\"2.3.6-r0\",\"2.3.6-r1\",\"2.3.7-r0\",\"2.30.0-r0\",\"2.30.1-r0\",\"2.30.2-r0\",\"2.31.0-r0\",\"2.31.1-r0\",\"2.31.1-r1\",\"2.32.0-r0\",\"2.32.0-r1\",\"2.32.0-r2\",\"2.33.0-r0\",\"2.33.0-r1\",\"2.33.0-r2\",\"2.33.1-r0\",\"2.34.0-r0\",\"2.34.1-r0\",\"2.34.1-r1\",\"2.35.0-r0\",\"2.35.1-r0\",\"2.35.1-r1\",\"2.35.1-r2\",\"2.35.2-r0\",\"2.36.0-r0\",\"2.36.1-r0\",\"2.37.0-r0\",\"2.37.1-r0\",\"2.37.1-r1\",\"2.37.3-r0\",\"2.37.3-r1\",\"2.38.0-r0\",\"2.38.0-r1\",\"2.38.1-r0\",\"2.38.2-r0\",\"2.39.0-r0\",\"2.39.1-r0\",\"2.39.1-r1\",\"2.39.1-r2\",\"2.39.1-r3\",\"2.39.1-r4\",\"2.39.2-r0\",\"2.4.0-r0\",\"2.4.1-r0\",\"2.4.2-r0\",\"2.4.2-r1\",\"2.4.3-r0\",\"2.4.4-r0\",\"2.4.5-r0\",\"2.4.6-r0\",\"2.4.6-r1\",\"2.40.0-r0\",\"2.40.0-r1\",\"2.40.1-r0\",\"2.41.0-r0\",\"2.41.0-r1\",\"2.41.0-r2\",\"2.42.0-r0\",\"2.42.1-r0\",\"2.43.0-r0\",\"2.5.0-r0\",\"2.5.0-r1\",\"2.5.1-r0\",\"2.5.2-r0\",\"2.5.3-r0\",\"2.6.0-r0\",\"2.6.0-r1\",\"2.6.0-r2\",\"2.6.0-r3\",\"2.6.1-r0\",\"2.6.1-r1\",\"2.6.3-r0\",\"2.6.4-r0\",\"2.7.0-r0\",\"2.7.1-r0\",\"2.7.2-r0\",\"2.7.3-r0\",\"2.7.4-r0\",\"2.8.0-r0\",\"2.8.0-r1\",\"2.8.1-r0\",\"2.8.2-r0\",\"2.8.3-r0\",\"2.8.4-r0\",\"2.9.0-r0\",\"2.9.1-r0\",\"2.9.2-r0\",\"2.9.3-r0\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Alpine:v3.20\",\"purl\":\"pkg:apk/alpine/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2.45.1-r0\"}]}],\"versions\":[\"1.6.0.4-r1\",\"1.6.0.4-r2\",\"1.6.1-r0\",\"1.6.1.3-r0\",\"1.6.1.3-r1\",\"1.6.2.1-r0\",\"1.6.2.3-r0\",\"1.6.2.4-r0\",\"1.6.2.5-r0\",\"1.6.3.2-r0\",\"1.6.3.3-r0\",\"1.6.4-r0\",\"1.6.4.1-r0\",\"1.6.4.2-r0\",\"1.6.4.2-r1\",\"1.6.4.2-r2\",\"1.6.4.4-r0\",\"1.6.5-r0\",\"1.6.5.2-r0\",\"1.6.5.3-r0\",\"1.6.5.5-r0\",\"1.6.5.6-r0\",\"1.6.5.7-r0\",\"1.6.6-r0\",\"1.6.6.1-r0\",\"1.7.0.2-r0\",\"1.7.0.3-r0\",\"1.7.0.4-r0\",\"1.7.0.5-r0\",\"1.7.0.5-r1\",\"1.7.1-r0\",\"1.7.1-r1\",\"1.7.1-r2\",\"1.7.1.1-r0\",\"1.7.10-r0\",\"1.7.10.1-r0\",\"1.7.10.2-r0\",\"1.7.10.2-r1\",\"1.7.10.3-r0\",\"1.7.10.4-r0\",\"1.7.11-r0\",\"1.7.11.1-r0\",\"1.7.11.1-r1\",\"1.7.11.2-r0\",\"1.7.11.3-r0\",\"1.7.11.4-r0\",\"1.7.11.5-r0\",\"1.7.12-r0\",\"1.7.12.1-r0\",\"1.7.12.2-r0\",\"1.7.12.3-r0\",\"1.7.12.4-r0\",\"1.7.2-r0\",\"1.7.2.1-r0\",\"1.7.2.2-r0\",\"1.7.2.3-r0\",\"1.7.3-r0\",\"1.7.3.1-r0\",\"1.7.3.2-r0\",\"1.7.3.2-r1\",\"1.7.3.3-r0\",\"1.7.3.4-r0\",\"1.7.3.5-r0\",\"1.7.3.5-r1\",\"1.7.4-r0\",\"1.7.4-r1\",\"1.7.4.1-r0\",\"1.7.4.2-r0\",\"1.7.4.4-r0\",\"1.7.4.5-r0\",\"1.7.5.1-r0\",\"1.7.5.1-r1\",\"1.7.5.2-r0\",\"1.7.5.3-r0\",\"1.7.5.4-r0\",\"1.7.5.4-r1\",\"1.7.6-r0\",\"1.7.6.1-r0\",\"1.7.7-r0\",\"1.7.7.1-r0\",\"1.7.7.2-r0\",\"1.7.7.3-r0\",\"1.7.7.4-r0\",\"1.7.8-r0\",\"1.7.8.1-r0\",\"1.7.8.2-r0\",\"1.7.8.3-r0\",\"1.7.8.4-r0\",\"1.7.8.4-r1\",\"1.7.9-r0\",\"1.7.9.1-r0\",\"1.7.9.2-r0\",\"1.7.9.3-r0\",\"1.7.9.4-r0\",\"1.7.9.5-r0\",\"1.7.9.6-r0\",\"1.8.0-r0\",\"1.8.0.1-r0\",\"1.8.0.2-r0\",\"1.8.0.3-r0\",\"1.8.1-r0\",\"1.8.1.1-r0\",\"1.8.1.2-r0\",\"1.8.1.3-r0\",\"1.8.1.4-r0\",\"1.8.1.5-r0\",\"1.8.2-r0\",\"1.8.2.1-r0\",\"1.8.2.2-r0\",\"1.8.2.3-r0\",\"1.8.2.3-r1\",\"1.8.2.3-r2\",\"1.8.3-r0\",\"1.8.3.1-r0\",\"1.8.3.2-r0\",\"1.8.3.3-r0\",\"1.8.3.4-r0\",\"1.8.4-r0\",\"1.8.4.1-r0\",\"1.8.4.2-r0\",\"1.8.4.3-r0\",\"1.8.4.3-r1\",\"1.8.4.3-r2\",\"1.8.5.1-r0\",\"1.8.5.1-r1\",\"1.8.5.1-r2\",\"1.8.5.1-r3\",\"1.8.5.1-r4\",\"1.8.5.2-r0\",\"1.8.5.3-r0\",\"1.8.5.3-r1\",\"1.8.5.4-r0\",\"1.9.0-r0\",\"1.9.1-r0\",\"1.9.2-r0\",\"1.9.2-r1\",\"1.9.2-r2\",\"1.9.3-r0\",\"2.0.0-r0\",\"2.0.1-r0\",\"2.0.2-r0\",\"2.0.3-r0\",\"2.0.4-r0\",\"2.1.0-r0\",\"2.1.1-r0\",\"2.1.2-r0\",\"2.1.3-r0\",\"2.1.3-r1\",\"2.10.0-r0\",\"2.10.1-r0\",\"2.10.2-r0\",\"2.11.0-r0\",\"2.11.1-r0\",\"2.12.1-r0\",\"2.12.2-r0\",\"2.12.2-r1\",\"2.13.0-r0\",\"2.13.1-r0\",\"2.13.2-r0\",\"2.13.2-r1\",\"2.13.3-r0\",\"2.13.3-r1\",\"2.13.4-r0\",\"2.14.0-r0\",\"2.14.0-r1\",\"2.14.1-r0\",\"2.14.1-r1\",\"2.14.2-r0\",\"2.14.3-r0\",\"2.15.0-r0\",\"2.15.0-r1\",\"2.15.0-r2\",\"2.15.1-r0\",\"2.16.0-r0\",\"2.16.1-r0\",\"2.16.2-r0\",\"2.16.3-r0\",\"2.16.3-r1\",\"2.17.0-r0\",\"2.17.1-r0\",\"2.18.0-r0\",\"2.19.0-r0\",\"2.19.1-r0\",\"2.19.1-r1\",\"2.19.2-r0\",\"2.2.0-r0\",\"2.2.1-r0\",\"2.2.2-r0\",\"2.20.1-r0\",\"2.21.0-r0\",\"2.21.0-r1\",\"2.21.0-r2\",\"2.21.0-r3\",\"2.21.0-r4\",\"2.22.0-r0\",\"2.22.0-r1\",\"2.22.0-r2\",\"2.22.1-r0\",\"2.23.0-r0\",\"2.23.0-r1\",\"2.23.0-r2\",\"2.24.0-r0\",\"2.24.1-r0\",\"2.25.0-r0\",\"2.25.1-r0\",\"2.25.2-r0\",\"2.26.0-r0\",\"2.26.1-r0\",\"2.26.1-r1\",\"2.26.2-r0\",\"2.27.0-r0\",\"2.27.0-r1\",\"2.28.0-r0\",\"2.28.0-r1\",\"2.29.0-r0\",\"2.29.1-r0\",\"2.29.2-r0\",\"2.3.0-r0\",\"2.3.1-r0\",\"2.3.2-r0\",\"2.3.3-r0\",\"2.3.4-r0\",\"2.3.5-r0\",\"2.3.6-r0\",\"2.3.6-r1\",\"2.3.7-r0\",\"2.30.0-r0\",\"2.30.1-r0\",\"2.30.2-r0\",\"2.31.0-r0\",\"2.31.1-r0\",\"2.31.1-r1\",\"2.32.0-r0\",\"2.32.0-r1\",\"2.32.0-r2\",\"2.33.0-r0\",\"2.33.0-r1\",\"2.33.0-r2\",\"2.33.1-r0\",\"2.34.0-r0\",\"2.34.1-r0\",\"2.34.1-r1\",\"2.35.0-r0\",\"2.35.1-r0\",\"2.35.1-r1\",\"2.35.1-r2\",\"2.35.2-r0\",\"2.36.0-r0\",\"2.36.1-r0\",\"2.37.0-r0\",\"2.37.1-r0\",\"2.37.1-r1\",\"2.37.3-r0\",\"2.37.3-r1\",\"2.38.0-r0\",\"2.38.0-r1\",\"2.38.1-r0\",\"2.38.2-r0\",\"2.39.0-r0\",\"2.39.1-r0\",\"2.39.1-r1\",\"2.39.1-r2\",\"2.39.1-r3\",\"2.39.1-r4\",\"2.39.2-r0\",\"2.4.0-r0\",\"2.4.1-r0\",\"2.4.2-r0\",\"2.4.2-r1\",\"2.4.3-r0\",\"2.4.4-r0\",\"2.4.5-r0\",\"2.4.6-r0\",\"2.4.6-r1\",\"2.40.0-r0\",\"2.40.0-r1\",\"2.40.1-r0\",\"2.41.0-r0\",\"2.41.0-r1\",\"2.41.0-r2\",\"2.42.0-r0\",\"2.42.1-r0\",\"2.43.0-r0\",\"2.43.0-r1\",\"2.43.0-r2\",\"2.43.0-r3\",\"2.43.1-r0\",\"2.43.2-r0\",\"2.44.0-r0\",\"2.44.0-r1\",\"2.44.0-r2\",\"2.45.0-r0\",\"2.5.0-r0\",\"2.5.0-r1\",\"2.5.1-r0\",\"2.5.2-r0\",\"2.5.3-r0\",\"2.6.0-r0\",\"2.6.0-r1\",\"2.6.0-r2\",\"2.6.0-r3\",\"2.6.1-r0\",\"2.6.1-r1\",\"2.6.3-r0\",\"2.6.4-r0\",\"2.7.0-r0\",\"2.7.1-r0\",\"2.7.2-r0\",\"2.7.3-r0\",\"2.7.4-r0\",\"2.8.0-r0\",\"2.8.0-r1\",\"2.8.1-r0\",\"2.8.2-r0\",\"2.8.3-r0\",\"2.8.4-r0\",\"2.9.0-r0\",\"2.9.1-r0\",\"2.9.2-r0\",\"2.9.3-r0\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Alpine:v3.21\",\"purl\":\"pkg:apk/alpine/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2.45.1-r0\"}]}],\"versions\":[\"1.6.0.4-r1\",\"1.6.0.4-r2\",\"1.6.1-r0\",\"1.6.1.3-r0\",\"1.6.1.3-r1\",\"1.6.2.1-r0\",\"1.6.2.3-r0\",\"1.6.2.4-r0\",\"1.6.2.5-r0\",\"1.6.3.2-r0\",\"1.6.3.3-r0\",\"1.6.4-r0\",\"1.6.4.1-r0\",\"1.6.4.2-r0\",\"1.6.4.2-r1\",\"1.6.4.2-r2\",\"1.6.4.4-r0\",\"1.6.5-r0\",\"1.6.5.2-r0\",\"1.6.5.3-r0\",\"1.6.5.5-r0\",\"1.6.5.6-r0\",\"1.6.5.7-r0\",\"1.6.6-r0\",\"1.6.6.1-r0\",\"1.7.0.2-r0\",\"1.7.0.3-r0\",\"1.7.0.4-r0\",\"1.7.0.5-r0\",\"1.7.0.5-r1\",\"1.7.1-r0\",\"1.7.1-r1\",\"1.7.1-r2\",\"1.7.1.1-r0\",\"1.7.10-r0\",\"1.7.10.1-r0\",\"1.7.10.2-r0\",\"1.7.10.2-r1\",\"1.7.10.3-r0\",\"1.7.10.4-r0\",\"1.7.11-r0\",\"1.7.11.1-r0\",\"1.7.11.1-r1\",\"1.7.11.2-r0\",\"1.7.11.3-r0\",\"1.7.11.4-r0\",\"1.7.11.5-r0\",\"1.7.12-r0\",\"1.7.12.1-r0\",\"1.7.12.2-r0\",\"1.7.12.3-r0\",\"1.7.12.4-r0\",\"1.7.2-r0\",\"1.7.2.1-r0\",\"1.7.2.2-r0\",\"1.7.2.3-r0\",\"1.7.3-r0\",\"1.7.3.1-r0\",\"1.7.3.2-r0\",\"1.7.3.2-r1\",\"1.7.3.3-r0\",\"1.7.3.4-r0\",\"1.7.3.5-r0\",\"1.7.3.5-r1\",\"1.7.4-r0\",\"1.7.4-r1\",\"1.7.4.1-r0\",\"1.7.4.2-r0\",\"1.7.4.4-r0\",\"1.7.4.5-r0\",\"1.7.5.1-r0\",\"1.7.5.1-r1\",\"1.7.5.2-r0\",\"1.7.5.3-r0\",\"1.7.5.4-r0\",\"1.7.5.4-r1\",\"1.7.6-r0\",\"1.7.6.1-r0\",\"1.7.7-r0\",\"1.7.7.1-r0\",\"1.7.7.2-r0\",\"1.7.7.3-r0\",\"1.7.7.4-r0\",\"1.7.8-r0\",\"1.7.8.1-r0\",\"1.7.8.2-r0\",\"1.7.8.3-r0\",\"1.7.8.4-r0\",\"1.7.8.4-r1\",\"1.7.9-r0\",\"1.7.9.1-r0\",\"1.7.9.2-r0\",\"1.7.9.3-r0\",\"1.7.9.4-r0\",\"1.7.9.5-r0\",\"1.7.9.6-r0\",\"1.8.0-r0\",\"1.8.0.1-r0\",\"1.8.0.2-r0\",\"1.8.0.3-r0\",\"1.8.1-r0\",\"1.8.1.1-r0\",\"1.8.1.2-r0\",\"1.8.1.3-r0\",\"1.8.1.4-r0\",\"1.8.1.5-r0\",\"1.8.2-r0\",\"1.8.2.1-r0\",\"1.8.2.2-r0\",\"1.8.2.3-r0\",\"1.8.2.3-r1\",\"1.8.2.3-r2\",\"1.8.3-r0\",\"1.8.3.1-r0\",\"1.8.3.2-r0\",\"1.8.3.3-r0\",\"1.8.3.4-r0\",\"1.8.4-r0\",\"1.8.4.1-r0\",\"1.8.4.2-r0\",\"1.8.4.3-r0\",\"1.8.4.3-r1\",\"1.8.4.3-r2\",\"1.8.5.1-r0\",\"1.8.5.1-r1\",\"1.8.5.1-r2\",\"1.8.5.1-r3\",\"1.8.5.1-r4\",\"1.8.5.2-r0\",\"1.8.5.3-r0\",\"1.8.5.3-r1\",\"1.8.5.4-r0\",\"1.9.0-r0\",\"1.9.1-r0\",\"1.9.2-r0\",\"1.9.2-r1\",\"1.9.2-r2\",\"1.9.3-r0\",\"2.0.0-r0\",\"2.0.1-r0\",\"2.0.2-r0\",\"2.0.3-r0\",\"2.0.4-r0\",\"2.1.0-r0\",\"2.1.1-r0\",\"2.1.2-r0\",\"2.1.3-r0\",\"2.1.3-r1\",\"2.10.0-r0\",\"2.10.1-r0\",\"2.10.2-r0\",\"2.11.0-r0\",\"2.11.1-r0\",\"2.12.1-r0\",\"2.12.2-r0\",\"2.12.2-r1\",\"2.13.0-r0\",\"2.13.1-r0\",\"2.13.2-r0\",\"2.13.2-r1\",\"2.13.3-r0\",\"2.13.3-r1\",\"2.13.4-r0\",\"2.14.0-r0\",\"2.14.0-r1\",\"2.14.1-r0\",\"2.14.1-r1\",\"2.14.2-r0\",\"2.14.3-r0\",\"2.15.0-r0\",\"2.15.0-r1\",\"2.15.0-r2\",\"2.15.1-r0\",\"2.16.0-r0\",\"2.16.1-r0\",\"2.16.2-r0\",\"2.16.3-r0\",\"2.16.3-r1\",\"2.17.0-r0\",\"2.17.1-r0\",\"2.18.0-r0\",\"2.19.0-r0\",\"2.19.1-r0\",\"2.19.1-r1\",\"2.19.2-r0\",\"2.2.0-r0\",\"2.2.1-r0\",\"2.2.2-r0\",\"2.20.1-r0\",\"2.21.0-r0\",\"2.21.0-r1\",\"2.21.0-r2\",\"2.21.0-r3\",\"2.21.0-r4\",\"2.22.0-r0\",\"2.22.0-r1\",\"2.22.0-r2\",\"2.22.1-r0\",\"2.23.0-r0\",\"2.23.0-r1\",\"2.23.0-r2\",\"2.24.0-r0\",\"2.24.1-r0\",\"2.25.0-r0\",\"2.25.1-r0\",\"2.25.2-r0\",\"2.26.0-r0\",\"2.26.1-r0\",\"2.26.1-r1\",\"2.26.2-r0\",\"2.27.0-r0\",\"2.27.0-r1\",\"2.28.0-r0\",\"2.28.0-r1\",\"2.29.0-r0\",\"2.29.1-r0\",\"2.29.2-r0\",\"2.3.0-r0\",\"2.3.1-r0\",\"2.3.2-r0\",\"2.3.3-r0\",\"2.3.4-r0\",\"2.3.5-r0\",\"2.3.6-r0\",\"2.3.6-r1\",\"2.3.7-r0\",\"2.30.0-r0\",\"2.30.1-r0\",\"2.30.2-r0\",\"2.31.0-r0\",\"2.31.1-r0\",\"2.31.1-r1\",\"2.32.0-r0\",\"2.32.0-r1\",\"2.32.0-r2\",\"2.33.0-r0\",\"2.33.0-r1\",\"2.33.0-r2\",\"2.33.1-r0\",\"2.34.0-r0\",\"2.34.1-r0\",\"2.34.1-r1\",\"2.35.0-r0\",\"2.35.1-r0\",\"2.35.1-r1\",\"2.35.1-r2\",\"2.35.2-r0\",\"2.36.0-r0\",\"2.36.1-r0\",\"2.37.0-r0\",\"2.37.1-r0\",\"2.37.1-r1\",\"2.37.3-r0\",\"2.37.3-r1\",\"2.38.0-r0\",\"2.38.0-r1\",\"2.38.1-r0\",\"2.38.2-r0\",\"2.39.0-r0\",\"2.39.1-r0\",\"2.39.1-r1\",\"2.39.1-r2\",\"2.39.1-r3\",\"2.39.1-r4\",\"2.39.2-r0\",\"2.4.0-r0\",\"2.4.1-r0\",\"2.4.2-r0\",\"2.4.2-r1\",\"2.4.3-r0\",\"2.4.4-r0\",\"2.4.5-r0\",\"2.4.6-r0\",\"2.4.6-r1\",\"2.40.0-r0\",\"2.40.0-r1\",\"2.40.1-r0\",\"2.41.0-r0\",\"2.41.0-r1\",\"2.41.0-r2\",\"2.42.0-r0\",\"2.42.1-r0\",\"2.43.0-r0\",\"2.43.0-r1\",\"2.43.0-r2\",\"2.43.0-r3\",\"2.43.1-r0\",\"2.43.2-r0\",\"2.44.0-r0\",\"2.44.0-r1\",\"2.44.0-r2\",\"2.45.0-r0\",\"2.5.0-r0\",\"2.5.0-r1\",\"2.5.1-r0\",\"2.5.2-r0\",\"2.5.3-r0\",\"2.6.0-r0\",\"2.6.0-r1\",\"2.6.0-r2\",\"2.6.0-r3\",\"2.6.1-r0\",\"2.6.1-r1\",\"2.6.3-r0\",\"2.6.4-r0\",\"2.7.0-r0\",\"2.7.1-r0\",\"2.7.2-r0\",\"2.7.3-r0\",\"2.7.4-r0\",\"2.8.0-r0\",\"2.8.0-r1\",\"2.8.1-r0\",\"2.8.2-r0\",\"2.8.3-r0\",\"2.8.4-r0\",\"2.9.0-r0\",\"2.9.1-r0\",\"2.9.2-r0\",\"2.9.3-r0\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Debian:11\",\"purl\":\"pkg:deb/debian/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:2.30.2-1+deb11u3\"}]}],\"versions\":[\"1:2.30.2-1\",\"1:2.30.2-1+deb11u1\",\"1:2.30.2-1+deb11u2\"],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Debian:12\",\"purl\":\"pkg:deb/debian/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:2.39.5-0+deb12u1\"}]}],\"versions\":[\"1:2.39.2+next.20230215-1\",\"1:2.39.2-1.1\"],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"package\":{\"name\":\"git\",\"ecosystem\":\"Debian:13\",\"purl\":\"pkg:deb/debian/git?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:2.45.1-1\"}]}],\"versions\":[\"1:2.39.2+next.20230215-1\",\"1:2.39.2-1.1\",\"1:2.40.0+next.20230313-1\",\"1:2.40.0+next.20230319-1\",\"1:2.40.0-1\",\"1:2.40.0-1+alpha.1\",\"1:2.40.1+next.20230424-1\",\"1:2.40.1+next.20230427-1\",\"1:2.40.1-1\",\"1:2.40.1-1+alpha.1\",\"1:2.42.0-1\",\"1:2.43.0+next.20231120-1\",\"1:2.43.0+next.20240104-1\",\"1:2.43.0-1\",\"1:2.43.0-1+alpha.2\"],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}},{\"ranges\":[{\"type\":\"GIT\",\"repo\":\"https://github.com/git/git\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"97065761333fd62db1912d81b489db938d8c991d\"},{\"fixed\":\"97065761333fd62db1912d81b489db938d8c991d\"}]}],\"versions\":[\"gitgui-0.10.0\",\"gitgui-0.10.1\",\"gitgui-0.10.2\",\"gitgui-0.11.0\",\"gitgui-0.12.0\",\"gitgui-0.13.0\",\"gitgui-0.14.0\",\"gitgui-0.15.0\",\"gitgui-0.16.0\",\"gitgui-0.17.0\",\"gitgui-0.18.0\",\"gitgui-0.19.0\",\"gitgui-0.20.0\",\"gitgui-0.21.0\",\"gitgui-0.6.0\",\"gitgui-0.6.1\",\"gitgui-0.6.2\",\"gitgui-0.6.3\",\"gitgui-0.6.4\",\"gitgui-0.6.5\",\"gitgui-0.7.0\",\"gitgui-0.7.0-rc1\",\"gitgui-0.7.1\",\"gitgui-0.7.2\",\"gitgui-0.7.3\",\"gitgui-0.7.4\",\"gitgui-0.7.5\",\"gitgui-0.8.0\",\"gitgui-0.8.1\",\"gitgui-0.8.2\",\"gitgui-0.8.3\",\"gitgui-0.8.4\",\"gitgui-0.9.0\",\"gitgui-0.9.1\",\"gitgui-0.9.2\",\"gitgui-0.9.3\",\"v0.99\",\"v0.99.1\",\"v0.99.2\",\"v0.99.3\",\"v0.99.4\",\"v0.99.5\",\"v0.99.6\",\"v0.99.7\",\"v0.99.7a\",\"v0.99.7b\",\"v0.99.7c\",\"v0.99.7d\",\"v0.99.8\",\"v0.99.8a\",\"v0.99.8b\",\"v0.99.8c\",\"v0.99.8d\",\"v0.99.8e\",\"v0.99.8f\",\"v0.99.8g\",\"v0.99.9\",\"v0.99.9a\",\"v0.99.9b\",\"v0.99.9c\",\"v0.99.9d\",\"v0.99.9e\",\"v0.99.9f\",\"v0.99.9g\",\"v0.99.9h\",\"v0.99.9i\",\"v0.99.9j\",\"v0.99.9k\",\"v0.99.9l\",\"v0.99.9m\",\"v0.99.9n\",\"v1.0.0\",\"v1.0.0a\",\"v1.0.0b\",\"v1.0.1\",\"v1.0.10\",\"v1.0.11\",\"v1.0.12\",\"v1.0.13\",\"v1.0.2\",\"v1.0.3\",\"v1.0.4\",\"v1.0.5\",\"v1.0.6\",\"v1.0.7\",\"v1.0.8\",\"v1.0.9\",\"v1.0rc1\",\"v1.0rc2\",\"v1.0rc3\",\"v1.0rc4\",\"v1.0rc5\",\"v1.0rc6\",\"v1.1.0\",\"v1.1.1\",\"v1.1.2\",\"v1.1.3\",\"v1.1.4\",\"v1.1.5\",\"v1.1.6\",\"v1.2.0\",\"v1.2.1\",\"v1.2.2\",\"v1.2.3\",\"v1.2.4\",\"v1.2.5\",\"v1.2.6\",\"v1.3.0\",\"v1.3.0-rc1\",\"v1.3.0-rc2\",\"v1.3.0-rc3\",\"v1.3.0-rc4\",\"v1.3.1\",\"v1.3.2\",\"v1.3.3\",\"v1.4.0\",\"v1.4.0-rc1\",\"v1.4.0-rc2\",\"v1.4.1\",\"v1.4.1-rc1\",\"v1.4.1-rc2\",\"v1.4.1.1\",\"v1.4.2\",\"v1.4.2-rc1\",\"v1.4.2-rc2\",\"v1.4.2-rc3\",\"v1.4.2-rc4\",\"v1.4.2.1\",\"v1.4.2.2\",\"v1.4.2.3\",\"v1.4.2.4\",\"v1.4.3\",\"v1.4.3-rc1\",\"v1.4.3-rc2\",\"v1.4.3-rc3\",\"v1.4.3.1\",\"v1.4.3.2\",\"v1.4.3.3\",\"v1.4.3.4\",\"v1.4.3.5\",\"v1.4.4\",\"v1.4.4-rc1\",\"v1.4.4-rc2\",\"v1.4.4.1\",\"v1.4.4.2\",\"v1.4.4.3\",\"v1.4.4.4\",\"v1.5.0\",\"v1.5.0-rc0\",\"v1.5.0-rc1\",\"v1.5.0-rc2\",\"v1.5.0-rc3\",\"v1.5.0-rc4\",\"v1.5.0.1\",\"v1.5.0.2\",\"v1.5.0.3\",\"v1.5.0.4\",\"v1.5.0.5\",\"v1.5.0.6\",\"v1.5.0.7\",\"v1.5.1\",\"v1.5.1-rc1\",\"v1.5.1-rc2\",\"v1.5.1-rc3\",\"v1.5.1.1\",\"v1.5.1.2\",\"v1.5.1.3\",\"v1.5.1.4\",\"v1.5.1.5\",\"v1.5.1.6\",\"v1.5.2\",\"v1.5.2-rc0\",\"v1.5.2-rc1\",\"v1.5.2-rc2\",\"v1.5.2-rc3\",\"v1.5.2.1\",\"v1.5.2.2\",\"v1.5.2.3\",\"v1.5.2.4\",\"v1.5.2.5\",\"v1.5.3\",\"v1.5.3-rc0\",\"v1.5.3-rc1\",\"v1.5.3-rc2\",\"v1.5.3-rc3\",\"v1.5.3-rc4\",\"v1.5.3-rc5\",\"v1.5.3-rc6\",\"v1.5.3-rc7\",\"v1.5.3.1\",\"v1.5.3.2\",\"v1.5.3.3\",\"v1.5.3.4\",\"v1.5.3.5\",\"v1.5.3.6\",\"v1.5.3.7\",\"v1.5.3.8\",\"v1.5.4\",\"v1.5.4-rc0\",\"v1.5.4-rc1\",\"v1.5.4-rc2\",\"v1.5.4-rc3\",\"v1.5.4-rc4\",\"v1.5.4-rc5\",\"v1.5.4.1\",\"v1.5.4.2\",\"v1.5.4.3\",\"v1.5.4.4\",\"v1.5.4.5\",\"v1.5.4.6\",\"v1.5.4.7\",\"v1.5.5\",\"v1.5.5-rc0\",\"v1.5.5-rc1\",\"v1.5.5-rc2\",\"v1.5.5-rc3\",\"v1.5.5.1\",\"v1.5.5.2\",\"v1.5.5.3\",\"v1.5.5.4\",\"v1.5.5.5\",\"v1.5.5.6\",\"v1.5.6\",\"v1.5.6-rc0\",\"v1.5.6-rc1\",\"v1.5.6-rc2\",\"v1.5.6-rc3\",\"v1.5.6.1\",\"v1.5.6.2\",\"v1.5.6.3\",\"v1.5.6.4\",\"v1.5.6.5\",\"v1.5.6.6\",\"v1.6.0\",\"v1.6.0-rc0\",\"v1.6.0-rc1\",\"v1.6.0-rc2\",\"v1.6.0-rc3\",\"v1.6.0.1\",\"v1.6.0.2\",\"v1.6.0.3\",\"v1.6.0.4\",\"v1.6.0.5\",\"v1.6.0.6\",\"v1.6.1\",\"v1.6.1-rc1\",\"v1.6.1-rc2\",\"v1.6.1-rc3\",\"v1.6.1-rc4\",\"v1.6.1.1\",\"v1.6.1.2\",\"v1.6.1.3\",\"v1.6.1.4\",\"v1.6.2\",\"v1.6.2-rc0\",\"v1.6.2-rc1\",\"v1.6.2-rc2\",\"v1.6.2.1\",\"v1.6.2.2\",\"v1.6.2.3\",\"v1.6.2.4\",\"v1.6.2.5\",\"v1.6.3\",\"v1.6.3-rc0\",\"v1.6.3-rc1\",\"v1.6.3-rc2\",\"v1.6.3-rc3\",\"v1.6.3-rc4\",\"v1.6.3.1\",\"v1.6.3.2\",\"v1.6.3.3\",\"v1.6.3.4\",\"v1.6.4\",\"v1.6.4-rc0\",\"v1.6.4-rc1\",\"v1.6.4-rc2\",\"v1.6.4-rc3\",\"v1.6.4.1\",\"v1.6.4.2\",\"v1.6.4.3\",\"v1.6.4.4\",\"v1.6.4.5\",\"v1.6.5\",\"v1.6.5-rc0\",\"v1.6.5-rc1\",\"v1.6.5-rc2\",\"v1.6.5-rc3\",\"v1.6.5.1\",\"v1.6.5.2\",\"v1.6.5.3\",\"v1.6.5.4\",\"v1.6.5.5\",\"v1.6.5.6\",\"v1.6.5.7\",\"v1.6.5.8\",\"v1.6.5.9\",\"v1.6.6\",\"v1.6.6-rc0\",\"v1.6.6-rc1\",\"v1.6.6-rc2\",\"v1.6.6-rc3\",\"v1.6.6-rc4\",\"v1.6.6.1\",\"v1.6.6.2\",\"v1.6.6.3\",\"v1.7.0\",\"v1.7.0-rc0\",\"v1.7.0-rc1\",\"v1.7.0-rc2\",\"v1.7.0.1\",\"v1.7.0.2\",\"v1.7.0.3\",\"v1.7.0.4\",\"v1.7.0.5\",\"v1.7.0.6\",\"v1.7.0.7\",\"v1.7.0.8\",\"v1.7.0.9\",\"v1.7.1\",\"v1.7.1-rc0\",\"v1.7.1-rc1\",\"v1.7.1-rc2\",\"v1.7.1.1\",\"v1.7.1.2\",\"v1.7.1.3\",\"v1.7.1.4\",\"v1.7.10\",\"v1.7.10-rc0\",\"v1.7.10-rc1\",\"v1.7.10-rc2\",\"v1.7.10-rc3\",\"v1.7.10-rc4\",\"v1.7.10.1\",\"v1.7.10.2\",\"v1.7.10.3\",\"v1.7.10.4\",\"v1.7.10.5\",\"v1.7.11\",\"v1.7.11-rc0\",\"v1.7.11-rc1\",\"v1.7.11-rc2\",\"v1.7.11-rc3\",\"v1.7.11.1\",\"v1.7.11.2\",\"v1.7.11.3\",\"v1.7.11.4\",\"v1.7.11.5\",\"v1.7.11.6\",\"v1.7.11.7\",\"v1.7.12\",\"v1.7.12-rc0\",\"v1.7.12-rc1\",\"v1.7.12-rc2\",\"v1.7.12-rc3\",\"v1.7.12.1\",\"v1.7.12.2\",\"v1.7.12.3\",\"v1.7.12.4\",\"v1.7.2\",\"v1.7.2-rc0\",\"v1.7.2-rc1\",\"v1.7.2-rc2\",\"v1.7.2-rc3\",\"v1.7.2.1\",\"v1.7.2.2\",\"v1.7.2.3\",\"v1.7.2.4\",\"v1.7.2.5\",\"v1.7.3\",\"v1.7.3-rc0\",\"v1.7.3-rc1\",\"v1.7.3-rc2\",\"v1.7.3.1\",\"v1.7.3.2\",\"v1.7.3.3\",\"v1.7.3.4\",\"v1.7.3.5\",\"v1.7.4\",\"v1.7.4-rc0\",\"v1.7.4-rc1\",\"v1.7.4-rc2\",\"v1.7.4-rc3\",\"v1.7.4.1\",\"v1.7.4.2\",\"v1.7.4.3\",\"v1.7.4.4\",\"v1.7.4.5\",\"v1.7.5\",\"v1.7.5-rc0\",\"v1.7.5-rc1\",\"v1.7.5-rc2\",\"v1.7.5-rc3\",\"v1.7.5.1\",\"v1.7.5.2\",\"v1.7.5.3\",\"v1.7.5.4\",\"v1.7.6\",\"v1.7.6-rc0\",\"v1.7.6-rc1\",\"v1.7.6-rc2\",\"v1.7.6-rc3\",\"v1.7.6.1\",\"v1.7.6.2\",\"v1.7.6.3\",\"v1.7.6.4\",\"v1.7.6.5\",\"v1.7.6.6\",\"v1.7.7\",\"v1.7.7-rc0\",\"v1.7.7-rc1\",\"v1.7.7-rc2\",\"v1.7.7-rc3\",\"v1.7.7.1\",\"v1.7.7.2\",\"v1.7.7.3\",\"v1.7.7.4\",\"v1.7.7.5\",\"v1.7.7.6\",\"v1.7.7.7\",\"v1.7.8\",\"v1.7.8-rc0\",\"v1.7.8-rc1\",\"v1.7.8-rc2\",\"v1.7.8-rc3\",\"v1.7.8-rc4\",\"v1.7.8.1\",\"v1.7.8.2\",\"v1.7.8.3\",\"v1.7.8.4\",\"v1.7.8.5\",\"v1.7.8.6\",\"v1.7.9\",\"v1.7.9-rc0\",\"v1.7.9-rc1\",\"v1.7.9-rc2\",\"v1.7.9.1\",\"v1.7.9.2\",\"v1.7.9.3\",\"v1.7.9.4\",\"v1.7.9.5\",\"v1.7.9.6\",\"v1.7.9.7\",\"v1.8.0\",\"v1.8.0-rc0\",\"v1.8.0-rc1\",\"v1.8.0-rc2\",\"v1.8.0-rc3\",\"v1.8.0.1\",\"v1.8.0.2\",\"v1.8.0.3\",\"v1.8.1\",\"v1.8.1-rc0\",\"v1.8.1-rc1\",\"v1.8.1-rc2\",\"v1.8.1-rc3\",\"v1.8.1.1\",\"v1.8.1.2\",\"v1.8.1.3\",\"v1.8.1.4\",\"v1.8.1.5\",\"v1.8.1.6\",\"v1.8.2\",\"v1.8.2-rc0\",\"v1.8.2-rc1\",\"v1.8.2-rc2\",\"v1.8.2-rc3\",\"v1.8.2.1\",\"v1.8.2.2\",\"v1.8.2.3\",\"v1.8.3\",\"v1.8.3-rc0\",\"v1.8.3-rc1\",\"v1.8.3-rc2\",\"v1.8.3-rc3\",\"v1.8.3.1\",\"v1.8.3.2\",\"v1.8.3.3\",\"v1.8.3.4\",\"v1.8.4\",\"v1.8.4-rc0\",\"v1.8.4-rc1\",\"v1.8.4-rc2\",\"v1.8.4-rc3\",\"v1.8.4-rc4\",\"v1.8.4.1\",\"v1.8.4.2\",\"v1.8.4.3\",\"v1.8.4.4\",\"v1.8.4.5\",\"v1.8.5\",\"v1.8.5-rc0\",\"v1.8.5-rc1\",\"v1.8.5-rc2\",\"v1.8.5-rc3\",\"v1.8.5.1\",\"v1.8.5.2\",\"v1.8.5.3\",\"v1.8.5.4\",\"v1.8.5.5\",\"v1.8.5.6\",\"v1.9-rc0\",\"v1.9-rc1\",\"v1.9-rc2\",\"v1.9.0\",\"v1.9.0-rc3\",\"v1.9.1\",\"v1.9.2\",\"v1.9.3\",\"v1.9.4\",\"v1.9.5\",\"v2.0.0\",\"v2.0.0-rc0\",\"v2.0.0-rc1\",\"v2.0.0-rc2\",\"v2.0.0-rc3\",\"v2.0.0-rc4\",\"v2.0.1\",\"v2.0.2\",\"v2.0.3\",\"v2.0.4\",\"v2.0.5\",\"v2.1.0\",\"v2.1.0-rc0\",\"v2.1.0-rc1\",\"v2.1.0-rc2\",\"v2.1.1\",\"v2.1.2\",\"v2.1.3\",\"v2.1.4\",\"v2.10.0\",\"v2.10.0-rc0\",\"v2.10.0-rc1\",\"v2.10.0-rc2\",\"v2.10.1\",\"v2.10.2\",\"v2.10.3\",\"v2.10.4\",\"v2.10.5\",\"v2.11.0\",\"v2.11.0-rc0\",\"v2.11.0-rc1\",\"v2.11.0-rc2\",\"v2.11.0-rc3\",\"v2.11.1\",\"v2.11.2\",\"v2.11.3\",\"v2.11.4\",\"v2.12.0\",\"v2.12.0-rc0\",\"v2.12.0-rc1\",\"v2.12.0-rc2\",\"v2.12.1\",\"v2.12.2\",\"v2.12.3\",\"v2.12.4\",\"v2.12.5\",\"v2.13.0\",\"v2.13.0-rc0\",\"v2.13.0-rc1\",\"v2.13.0-rc2\",\"v2.13.1\",\"v2.13.2\",\"v2.13.3\",\"v2.13.4\",\"v2.13.5\",\"v2.13.6\",\"v2.13.7\",\"v2.14.0\",\"v2.14.0-rc0\",\"v2.14.0-rc1\",\"v2.14.1\",\"v2.14.2\",\"v2.14.3\",\"v2.14.4\",\"v2.14.5\",\"v2.14.6\",\"v2.15.0\",\"v2.15.0-rc0\",\"v2.15.0-rc1\",\"v2.15.0-rc2\",\"v2.15.1\",\"v2.15.2\",\"v2.15.3\",\"v2.15.4\",\"v2.16.0\",\"v2.16.0-rc0\",\"v2.16.0-rc1\",\"v2.16.0-rc2\",\"v2.16.1\",\"v2.16.2\",\"v2.16.3\",\"v2.16.4\",\"v2.16.5\",\"v2.16.6\",\"v2.17.0\",\"v2.17.0-rc0\",\"v2.17.0-rc1\",\"v2.17.0-rc2\",\"v2.17.1\",\"v2.17.2\",\"v2.17.3\",\"v2.17.4\",\"v2.17.5\",\"v2.17.6\",\"v2.18.0\",\"v2.18.0-rc0\",\"v2.18.0-rc1\",\"v2.18.0-rc2\",\"v2.18.1\",\"v2.18.2\",\"v2.18.3\",\"v2.18.4\",\"v2.18.5\",\"v2.19.0\",\"v2.19.0-rc0\",\"v2.19.0-rc1\",\"v2.19.0-rc2\",\"v2.19.1\",\"v2.19.2\",\"v2.19.3\",\"v2.19.4\",\"v2.19.5\",\"v2.19.6\",\"v2.2.0\",\"v2.2.0-rc0\",\"v2.2.0-rc1\",\"v2.2.0-rc2\",\"v2.2.0-rc3\",\"v2.2.1\",\"v2.2.2\",\"v2.2.3\",\"v2.20.0\",\"v2.20.0-rc0\",\"v2.20.0-rc1\",\"v2.20.0-rc2\",\"v2.20.1\",\"v2.20.2\",\"v2.20.3\",\"v2.20.4\",\"v2.20.5\",\"v2.21.0\",\"v2.21.0-rc0\",\"v2.21.0-rc1\",\"v2.21.0-rc2\",\"v2.21.1\",\"v2.21.2\",\"v2.21.3\",\"v2.21.4\",\"v2.22.0\",\"v2.22.0-rc0\",\"v2.22.0-rc1\",\"v2.22.0-rc2\",\"v2.22.0-rc3\",\"v2.22.1\",\"v2.22.2\",\"v2.22.3\",\"v2.22.4\",\"v2.22.5\",\"v2.23.0\",\"v2.23.0-rc0\",\"v2.23.0-rc1\",\"v2.23.0-rc2\",\"v2.23.1\",\"v2.23.2\",\"v2.23.3\",\"v2.23.4\",\"v2.24.0\",\"v2.24.0-rc0\",\"v2.24.0-rc1\",\"v2.24.0-rc2\",\"v2.24.1\",\"v2.24.2\",\"v2.24.3\",\"v2.24.4\",\"v2.25.0\",\"v2.25.0-rc0\",\"v2.25.0-rc1\",\"v2.25.0-rc2\",\"v2.25.1\",\"v2.25.2\",\"v2.25.3\",\"v2.25.4\",\"v2.25.5\",\"v2.26.0\",\"v2.26.0-rc0\",\"v2.26.0-rc1\",\"v2.26.0-rc2\",\"v2.26.1\",\"v2.26.2\",\"v2.26.3\",\"v2.27.0\",\"v2.27.0-rc0\",\"v2.27.0-rc1\",\"v2.27.0-rc2\",\"v2.27.1\",\"v2.28.0\",\"v2.28.0-rc0\",\"v2.28.0-rc1\",\"v2.28.0-rc2\",\"v2.28.1\",\"v2.29.0\",\"v2.29.0-rc0\",\"v2.29.0-rc1\",\"v2.29.0-rc2\",\"v2.29.1\",\"v2.29.2\",\"v2.29.3\",\"v2.3.0\",\"v2.3.0-rc0\",\"v2.3.0-rc1\",\"v2.3.0-rc2\",\"v2.3.1\",\"v2.3.10\",\"v2.3.2\",\"v2.3.3\",\"v2.3.4\",\"v2.3.5\",\"v2.3.6\",\"v2.3.7\",\"v2.3.8\",\"v2.3.9\",\"v2.30.0\",\"v2.30.0-rc0\",\"v2.30.0-rc1\",\"v2.30.0-rc2\",\"v2.30.1\",\"v2.30.2\",\"v2.30.3\",\"v2.30.4\",\"v2.30.5\",\"v2.30.6\",\"v2.30.7\",\"v2.30.8\",\"v2.30.9\",\"v2.31.0\",\"v2.31.0-rc0\",\"v2.31.0-rc1\",\"v2.31.0-rc2\",\"v2.31.1\",\"v2.31.2\",\"v2.31.3\",\"v2.31.4\",\"v2.31.5\",\"v2.31.6\",\"v2.31.7\",\"v2.31.8\",\"v2.32.0\",\"v2.32.0-rc0\",\"v2.32.0-rc1\",\"v2.32.0-rc2\",\"v2.32.0-rc3\",\"v2.32.1\",\"v2.32.2\",\"v2.32.3\",\"v2.32.4\",\"v2.32.5\",\"v2.32.6\",\"v2.32.7\",\"v2.33.0\",\"v2.33.0-rc0\",\"v2.33.0-rc1\",\"v2.33.0-rc2\",\"v2.33.1\",\"v2.33.2\",\"v2.33.3\",\"v2.33.4\",\"v2.33.5\",\"v2.33.6\",\"v2.33.7\",\"v2.33.8\",\"v2.34.0\",\"v2.34.0-rc0\",\"v2.34.0-rc1\",\"v2.34.0-rc2\",\"v2.34.1\",\"v2.34.2\",\"v2.34.3\",\"v2.34.4\",\"v2.34.5\",\"v2.34.6\",\"v2.34.7\",\"v2.34.8\",\"v2.35.0\",\"v2.35.0-rc0\",\"v2.35.0-rc1\",\"v2.35.0-rc2\",\"v2.35.1\",\"v2.35.2\",\"v2.35.3\",\"v2.35.4\",\"v2.35.5\",\"v2.35.6\",\"v2.35.7\",\"v2.35.8\",\"v2.36.0\",\"v2.36.0-rc0\",\"v2.36.0-rc1\",\"v2.36.0-rc2\",\"v2.36.1\",\"v2.36.2\",\"v2.36.3\",\"v2.36.4\",\"v2.36.5\",\"v2.36.6\",\"v2.37.0\",\"v2.37.0-rc0\",\"v2.37.0-rc1\",\"v2.37.0-rc2\",\"v2.37.1\",\"v2.37.2\",\"v2.37.3\",\"v2.37.4\",\"v2.37.5\",\"v2.37.6\",\"v2.37.7\",\"v2.38.0\",\"v2.38.0-rc0\",\"v2.38.0-rc1\",\"v2.38.0-rc2\",\"v2.38.1\",\"v2.38.2\",\"v2.38.3\",\"v2.38.4\",\"v2.38.5\",\"v2.39.0\",\"v2.39.0-rc0\",\"v2.39.0-rc1\",\"v2.39.0-rc2\",\"v2.39.1\",\"v2.39.2\",\"v2.39.3\",\"v2.4.0\",\"v2.4.0-rc0\",\"v2.4.0-rc1\",\"v2.4.0-rc2\",\"v2.4.0-rc3\",\"v2.4.1\",\"v2.4.10\",\"v2.4.11\",\"v2.4.12\",\"v2.4.2\",\"v2.4.3\",\"v2.4.4\",\"v2.4.5\",\"v2.4.6\",\"v2.4.7\",\"v2.4.8\",\"v2.4.9\",\"v2.5.0\",\"v2.5.0-rc0\",\"v2.5.0-rc1\",\"v2.5.0-rc2\",\"v2.5.0-rc3\",\"v2.5.1\",\"v2.5.2\",\"v2.5.3\",\"v2.5.4\",\"v2.5.5\",\"v2.5.6\",\"v2.6.0\",\"v2.6.0-rc0\",\"v2.6.0-rc1\",\"v2.6.0-rc2\",\"v2.6.0-rc3\",\"v2.6.1\",\"v2.6.2\",\"v2.6.3\",\"v2.6.4\",\"v2.6.5\",\"v2.6.6\",\"v2.6.7\",\"v2.7.0\",\"v2.7.0-rc0\",\"v2.7.0-rc1\",\"v2.7.0-rc2\",\"v2.7.0-rc3\",\"v2.7.1\",\"v2.7.2\",\"v2.7.3\",\"v2.7.4\",\"v2.7.5\",\"v2.7.6\",\"v2.8.0\",\"v2.8.0-rc0\",\"v2.8.0-rc1\",\"v2.8.0-rc2\",\"v2.8.0-rc3\",\"v2.8.0-rc4\",\"v2.8.1\",\"v2.8.2\",\"v2.8.3\",\"v2.8.4\",\"v2.8.5\",\"v2.8.6\",\"v2.9.0\",\"v2.9.0-rc0\",\"v2.9.0-rc1\",\"v2.9.0-rc2\",\"v2.9.1\",\"v2.9.2\",\"v2.9.3\",\"v2.9.4\",\"v2.9.5\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2024-32002.json\"}}],\"schema_version\":\"1.6.0\",\"severity\":[{\"type\":\"CVSS_V3\",\"score\":\"CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:C/C:H/I:H/A:H\"}]}", + "modified": "2025-08-07T20:01:58.452Z" + } + }, + { + "model": "import_observations.osv_cache", + "pk": 2, + "fields": { + "osv_id": "CVE-2017-6349", + "data": "{\"id\":\"ALPINE-CVE-2017-6349\",\"details\":\"An integer overflow at a u_read_undo memory allocation site would occur for vim before patch 8.0.0377, if it does not properly validate values for tree length when reading a corrupted undo file, which may lead to resultant buffer overflows.\",\"modified\":\"2025-01-15T01:28:27.183903Z\",\"published\":\"2017-02-27T07:59:00Z\",\"upstream\":[\"CVE-2017-6349\"],\"related\":[\"DLA-850-1\",\"SUSE-SU-2017:1712-1\",\"SUSE-SU-2022:4619-1\",\"UBUNTU-CVE-2017-6349\",\"USN-4309-1\",\"openSUSE-SU-2024:11081-1\",\"openSUSE-SU-2024:11497-1\"],\"references\":[{\"type\":\"ADVISORY\",\"url\":\"https://security.gentoo.org/glsa/201706-26\"},{\"type\":\"FIX\",\"url\":\"https://github.com/vim/vim/commit/3eb1637b1bba19519885dd6d377bd5596e91d22c\"},{\"type\":\"WEB\",\"url\":\"http://www.securityfocus.com/bid/96451\"},{\"type\":\"WEB\",\"url\":\"http://www.securitytracker.com/id/1037949\"},{\"type\":\"WEB\",\"url\":\"https://groups.google.com/forum/#%21topic/vim_dev/LAgsTcdSfNA\"},{\"type\":\"WEB\",\"url\":\"https://groups.google.com/forum/#%21topic/vim_dev/QPZc0CY9j3Y\"},{\"type\":\"WEB\",\"url\":\"https://usn.ubuntu.com/4309-1/\"},{\"type\":\"ADVISORY\",\"url\":\"https://security.alpinelinux.org/vuln/CVE-2017-6349\"},{\"type\":\"ADVISORY\",\"url\":\"https://security-tracker.debian.org/tracker/CVE-2017-6349\"}],\"affected\":[{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.10\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.11\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.12\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.13\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.14\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.15\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.16\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.17\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.18\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.19\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.20\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.21\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.8\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Alpine:v3.9\",\"purl\":\"pkg:apk/alpine/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"8.0.1521-r0\"}]}],\"versions\":[\"7.2.284-r0\",\"7.2.394-r0\",\"7.2.394-r1\",\"7.2.411-r0\",\"7.3-r0\",\"7.3.003-r0\",\"7.3.1070-r0\",\"7.3.112-r0\",\"7.3.112-r1\",\"7.3.1136-r0\",\"7.3.154-r0\",\"7.3.198-r0\",\"7.3.206-r0\",\"7.3.206-r1\",\"7.3.266-r0\",\"7.3.333-r0\",\"7.3.364-r0\",\"7.3.401-r0\",\"7.3.434-r0\",\"7.3.495-r0\",\"7.3.515-r0\",\"7.3.547-r0\",\"7.3.600-r0\",\"7.3.659-r0\",\"7.3.661-r0\",\"7.3.692-r0\",\"7.3.712-r0\",\"7.3.754-r0\",\"7.3.82-r0\",\"7.4-r0\",\"7.4-r1\",\"7.4-r2\",\"7.4-r3\",\"7.4.1225-r0\",\"7.4.1225-r1\",\"7.4.1591-r0\",\"7.4.1831-r0\",\"7.4.1831-r1\",\"7.4.2028-r0\",\"7.4.712-r0\",\"7.4.712-r1\",\"7.4.861-r0\",\"7.4.861-r1\",\"7.4.943-r0\",\"7.4.943-r1\",\"7.4.943-r2\",\"8.0.0003-r0\",\"8.0.0008-r0\",\"8.0.0027-r0\",\"8.0.0056-r0\",\"8.0.0178-r0\",\"8.0.0187-r0\",\"8.0.0329-r0\",\"8.0.0348-r0\",\"8.0.0349-r0\",\"8.0.0460-r0\",\"8.0.0559-r0\",\"8.0.0594-r0\",\"8.0.0595-r0\",\"8.0.0642-r0\",\"8.0.0972-r0\",\"8.0.0974-r0\",\"8.0.1137-r0\",\"8.0.1171-r0\",\"8.0.1240-r0\",\"8.0.1300-r0\",\"8.0.1359-r0\",\"8.0.1367-r0\",\"8.0.1424-r0\",\"8.0.1424-r1\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"neovim\",\"ecosystem\":\"Debian:11\",\"purl\":\"pkg:deb/debian/neovim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0.1.7-4\"}]}],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"neovim\",\"ecosystem\":\"Debian:12\",\"purl\":\"pkg:deb/debian/neovim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0.1.7-4\"}]}],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"neovim\",\"ecosystem\":\"Debian:13\",\"purl\":\"pkg:deb/debian/neovim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0.1.7-4\"}]}],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Debian:11\",\"purl\":\"pkg:deb/debian/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2:8.0.0197-3\"}]}],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Debian:12\",\"purl\":\"pkg:deb/debian/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2:8.0.0197-3\"}]}],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"package\":{\"name\":\"vim\",\"ecosystem\":\"Debian:13\",\"purl\":\"pkg:deb/debian/vim?arch=source\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"2:8.0.0197-3\"}]}],\"ecosystem_specific\":{\"urgency\":\"not yet assigned\"},\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}},{\"ranges\":[{\"type\":\"GIT\",\"repo\":\"https://github.com/vim/vim\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"3eb1637b1bba19519885dd6d377bd5596e91d22c\"},{\"fixed\":\"3eb1637b1bba19519885dd6d377bd5596e91d22c\"}]}],\"versions\":[\"v7.0\",\"v7.0.001\",\"v7.0.002\",\"v7.0.003\",\"v7.0.004\",\"v7.0.005\",\"v7.0.006\",\"v7.0.007\",\"v7.0.008\",\"v7.0.009\",\"v7.0.010\",\"v7.0.011\",\"v7.0.012\",\"v7.0.013\",\"v7.0.014\",\"v7.0.015\",\"v7.0.016\",\"v7.0.017\",\"v7.0.018\",\"v7.0.019\",\"v7.0.020\",\"v7.0.021\",\"v7.0.022\",\"v7.0.023\",\"v7.0.024\",\"v7.0.025\",\"v7.0.026\",\"v7.0.027\",\"v7.0.028\",\"v7.0.029\",\"v7.0.030\",\"v7.0.031\",\"v7.0.032\",\"v7.0.033\",\"v7.0.034\",\"v7.0.035\",\"v7.0.036\",\"v7.0.037\",\"v7.0.038\",\"v7.0.039\",\"v7.0.040\",\"v7.0.041\",\"v7.0.042\",\"v7.0.043\",\"v7.0.044\",\"v7.0.045\",\"v7.0.046\",\"v7.0.047\",\"v7.0.048\",\"v7.0.049\",\"v7.0.050\",\"v7.0.051\",\"v7.0.052\",\"v7.0.053\",\"v7.0.054\",\"v7.0.055\",\"v7.0.056\",\"v7.0.057\",\"v7.0.058\",\"v7.0.059\",\"v7.0.060\",\"v7.0.061\",\"v7.0.062\",\"v7.0.063\",\"v7.0.064\",\"v7.0.065\",\"v7.0.066\",\"v7.0.067\",\"v7.0.068\",\"v7.0.069\",\"v7.0.070\",\"v7.0.071\",\"v7.0.072\",\"v7.0.073\",\"v7.0.074\",\"v7.0.075\",\"v7.0.076\",\"v7.0.077\",\"v7.0.078\",\"v7.0.079\",\"v7.0.080\",\"v7.0.081\",\"v7.0.082\",\"v7.0.083\",\"v7.0.084\",\"v7.0.085\",\"v7.0.086\",\"v7.0.087\",\"v7.0.088\",\"v7.0.089\",\"v7.0.090\",\"v7.0.091\",\"v7.0.092\",\"v7.0.093\",\"v7.0.094\",\"v7.0.095\",\"v7.0.096\",\"v7.0.097\",\"v7.0.098\",\"v7.0.099\",\"v7.0.100\",\"v7.0.101\",\"v7.0.102\",\"v7.0.103\",\"v7.0.104\",\"v7.0.105\",\"v7.0.106\",\"v7.0.107\",\"v7.0.108\",\"v7.0.109\",\"v7.0.110\",\"v7.0.111\",\"v7.0.112\",\"v7.0.113\",\"v7.0.114\",\"v7.0.115\",\"v7.0.116\",\"v7.0.117\",\"v7.0.118\",\"v7.0.119\",\"v7.0.120\",\"v7.0.121\",\"v7.0.122\",\"v7.0.123\",\"v7.0.124\",\"v7.0.125\",\"v7.0.126\",\"v7.0.127\",\"v7.0.128\",\"v7.0.129\",\"v7.0.130\",\"v7.0.131\",\"v7.0.132\",\"v7.0.133\",\"v7.0.134\",\"v7.0.135\",\"v7.0.136\",\"v7.0.137\",\"v7.0.138\",\"v7.0.139\",\"v7.0.140\",\"v7.0.141\",\"v7.0.142\",\"v7.0.143\",\"v7.0.144\",\"v7.0.145\",\"v7.0.146\",\"v7.0.147\",\"v7.0.148\",\"v7.0.149\",\"v7.0.150\",\"v7.0.151\",\"v7.0.152\",\"v7.0.153\",\"v7.0.154\",\"v7.0.155\",\"v7.0.156\",\"v7.0.157\",\"v7.0.158\",\"v7.0.159\",\"v7.0.160\",\"v7.0.161\",\"v7.0.162\",\"v7.0.163\",\"v7.0.164\",\"v7.0.165\",\"v7.0.166\",\"v7.0.167\",\"v7.0.168\",\"v7.0.169\",\"v7.0.170\",\"v7.0.171\",\"v7.0.172\",\"v7.0.173\",\"v7.0.174\",\"v7.0.175\",\"v7.0.176\",\"v7.0.177\",\"v7.0.178\",\"v7.0.179\",\"v7.0.180\",\"v7.0.181\",\"v7.0.182\",\"v7.0.183\",\"v7.0.184\",\"v7.0.185\",\"v7.0.186\",\"v7.0.187\",\"v7.0.188\",\"v7.0.189\",\"v7.0.190\",\"v7.0.191\",\"v7.0.192\",\"v7.0.193\",\"v7.0.194\",\"v7.0.195\",\"v7.0.196\",\"v7.0.197\",\"v7.0.198\",\"v7.0.199\",\"v7.0.200\",\"v7.0.201\",\"v7.0.202\",\"v7.0.203\",\"v7.0.204\",\"v7.0.205\",\"v7.0.206\",\"v7.0.207\",\"v7.0.208\",\"v7.0.209\",\"v7.0.210\",\"v7.0.211\",\"v7.0.212\",\"v7.0.213\",\"v7.0.214\",\"v7.0.215\",\"v7.0.216\",\"v7.0.217\",\"v7.0.218\",\"v7.0.219\",\"v7.0.220\",\"v7.0.221\",\"v7.0.222\",\"v7.0.223\",\"v7.0.224\",\"v7.0.225\",\"v7.0.226\",\"v7.0.227\",\"v7.0.228\",\"v7.0.229\",\"v7.0.230\",\"v7.0.231\",\"v7.0.232\",\"v7.0.233\",\"v7.0.234\",\"v7.0.235\",\"v7.0.236\",\"v7.0.237\",\"v7.0.238\",\"v7.0.239\",\"v7.0.240\",\"v7.0.241\",\"v7.0.242\",\"v7.0.243\",\"v7.0001\",\"v7.0002\",\"v7.0007\",\"v7.0008\",\"v7.0009\",\"v7.0010\",\"v7.0011\",\"v7.0012\",\"v7.0015\",\"v7.0016\",\"v7.0017\",\"v7.0018\",\"v7.0019\",\"v7.0020\",\"v7.0021\",\"v7.0022\",\"v7.0023\",\"v7.0024\",\"v7.0025\",\"v7.0026\",\"v7.0027\",\"v7.0028\",\"v7.0029\",\"v7.0030\",\"v7.0031\",\"v7.0032\",\"v7.0033\",\"v7.0034\",\"v7.0035\",\"v7.0036\",\"v7.0037\",\"v7.0038\",\"v7.0039\",\"v7.0040\",\"v7.0041\",\"v7.0042\",\"v7.0043\",\"v7.0044\",\"v7.0045\",\"v7.0046\",\"v7.0047\",\"v7.0048\",\"v7.0049\",\"v7.0050\",\"v7.0051\",\"v7.0052\",\"v7.0053\",\"v7.0054\",\"v7.0055\",\"v7.0056\",\"v7.0057\",\"v7.0058\",\"v7.0059\",\"v7.0060\",\"v7.0061\",\"v7.0062\",\"v7.0063\",\"v7.0064\",\"v7.0065\",\"v7.0066\",\"v7.0067\",\"v7.0068\",\"v7.0069\",\"v7.0070\",\"v7.0071\",\"v7.0072\",\"v7.0073\",\"v7.0074\",\"v7.0075\",\"v7.0076\",\"v7.0077\",\"v7.0078\",\"v7.0079\",\"v7.0080\",\"v7.0081\",\"v7.0082\",\"v7.0083\",\"v7.0084\",\"v7.0085\",\"v7.0086\",\"v7.0087\",\"v7.0088\",\"v7.0089\",\"v7.0090\",\"v7.0091\",\"v7.0092\",\"v7.0093\",\"v7.0094\",\"v7.0095\",\"v7.0096\",\"v7.0097\",\"v7.0098\",\"v7.0099\",\"v7.0100\",\"v7.0101\",\"v7.0102\",\"v7.0103\",\"v7.0104\",\"v7.0105\",\"v7.0106\",\"v7.0107\",\"v7.0108\",\"v7.0109\",\"v7.0110\",\"v7.0111\",\"v7.0112\",\"v7.0113\",\"v7.0114\",\"v7.0115\",\"v7.0116\",\"v7.0117\",\"v7.0118\",\"v7.0119\",\"v7.0120\",\"v7.0121\",\"v7.0122\",\"v7.0123\",\"v7.0124\",\"v7.0125\",\"v7.0126\",\"v7.0127\",\"v7.0128\",\"v7.0129\",\"v7.0130\",\"v7.0131\",\"v7.0132\",\"v7.0133\",\"v7.0134\",\"v7.0135\",\"v7.0136\",\"v7.0137\",\"v7.0138\",\"v7.0139\",\"v7.0140\",\"v7.0141\",\"v7.0142\",\"v7.0143\",\"v7.0144\",\"v7.0145\",\"v7.0146\",\"v7.0147\",\"v7.0148\",\"v7.0149\",\"v7.0150\",\"v7.0151\",\"v7.0152\",\"v7.0153\",\"v7.0154\",\"v7.0155\",\"v7.0156\",\"v7.0157\",\"v7.0158\",\"v7.0159\",\"v7.0160\",\"v7.0161\",\"v7.0162\",\"v7.0163\",\"v7.0164\",\"v7.0165\",\"v7.0166\",\"v7.0167\",\"v7.0168\",\"v7.0169\",\"v7.0170\",\"v7.0171\",\"v7.0172\",\"v7.0173\",\"v7.0174\",\"v7.0175\",\"v7.0176\",\"v7.0177\",\"v7.0178\",\"v7.0179\",\"v7.0180\",\"v7.0181\",\"v7.0182\",\"v7.0183\",\"v7.0184\",\"v7.0185\",\"v7.0186\",\"v7.0187\",\"v7.0188\",\"v7.0189\",\"v7.0191\",\"v7.0192\",\"v7.0193\",\"v7.0194\",\"v7.0195\",\"v7.0196\",\"v7.0197\",\"v7.0198\",\"v7.0199\",\"v7.0200\",\"v7.0201\",\"v7.0202\",\"v7.0203\",\"v7.0204\",\"v7.0205\",\"v7.0206\",\"v7.0207\",\"v7.0208\",\"v7.0209\",\"v7.0210\",\"v7.0211\",\"v7.0212\",\"v7.0213\",\"v7.0214\",\"v7.0216\",\"v7.0217\",\"v7.0218\",\"v7.0219\",\"v7.0220\",\"v7.0221\",\"v7.0222\",\"v7.0223\",\"v7.0224\",\"v7.0225\",\"v7.0226\",\"v7.0227\",\"v7.0228\",\"v7.0229\",\"v7.0230\",\"v7.0231\",\"v7.0232\",\"v7.0b\",\"v7.0b01\",\"v7.0b02\",\"v7.0c\",\"v7.0c01\",\"v7.0c02\",\"v7.0c03\",\"v7.0c10\",\"v7.0c11\",\"v7.0c12\",\"v7.0c13\",\"v7.0d\",\"v7.0d01\",\"v7.0d02\",\"v7.0d03\",\"v7.0d04\",\"v7.0d05\",\"v7.0e\",\"v7.0e01\",\"v7.0e02\",\"v7.0e03\",\"v7.0e04\",\"v7.0e05\",\"v7.0e06\",\"v7.0e07\",\"v7.0f\",\"v7.0f01\",\"v7.0f02\",\"v7.0f03\",\"v7.0f04\",\"v7.0f05\",\"v7.0g\",\"v7.0g01\",\"v7.0g02\",\"v7.0g03\",\"v7.0g04\",\"v7.0g05\",\"v7.1\",\"v7.1.001\",\"v7.1.002\",\"v7.1.003\",\"v7.1.004\",\"v7.1.005\",\"v7.1.006\",\"v7.1.007\",\"v7.1.008\",\"v7.1.009\",\"v7.1.010\",\"v7.1.011\",\"v7.1.012\",\"v7.1.013\",\"v7.1.014\",\"v7.1.015\",\"v7.1.016\",\"v7.1.017\",\"v7.1.018\",\"v7.1.019\",\"v7.1.020\",\"v7.1.021\",\"v7.1.022\",\"v7.1.023\",\"v7.1.024\",\"v7.1.025\",\"v7.1.026\",\"v7.1.027\",\"v7.1.028\",\"v7.1.029\",\"v7.1.030\",\"v7.1.031\",\"v7.1.032\",\"v7.1.033\",\"v7.1.034\",\"v7.1.035\",\"v7.1.036\",\"v7.1.037\",\"v7.1.038\",\"v7.1.039\",\"v7.1.040\",\"v7.1.041\",\"v7.1.042\",\"v7.1.043\",\"v7.1.044\",\"v7.1.045\",\"v7.1.046\",\"v7.1.047\",\"v7.1.048\",\"v7.1.049\",\"v7.1.050\",\"v7.1.051\",\"v7.1.052\",\"v7.1.053\",\"v7.1.054\",\"v7.1.055\",\"v7.1.056\",\"v7.1.057\",\"v7.1.058\",\"v7.1.059\",\"v7.1.060\",\"v7.1.061\",\"v7.1.062\",\"v7.1.063\",\"v7.1.064\",\"v7.1.065\",\"v7.1.066\",\"v7.1.067\",\"v7.1.068\",\"v7.1.069\",\"v7.1.070\",\"v7.1.071\",\"v7.1.072\",\"v7.1.073\",\"v7.1.074\",\"v7.1.075\",\"v7.1.076\",\"v7.1.077\",\"v7.1.078\",\"v7.1.079\",\"v7.1.080\",\"v7.1.081\",\"v7.1.082\",\"v7.1.084\",\"v7.1.085\",\"v7.1.086\",\"v7.1.087\",\"v7.1.088\",\"v7.1.089\",\"v7.1.090\",\"v7.1.091\",\"v7.1.092\",\"v7.1.093\",\"v7.1.094\",\"v7.1.095\",\"v7.1.096\",\"v7.1.097\",\"v7.1.098\",\"v7.1.099\",\"v7.1.100\",\"v7.1.101\",\"v7.1.102\",\"v7.1.103\",\"v7.1.104\",\"v7.1.105\",\"v7.1.106\",\"v7.1.107\",\"v7.1.108\",\"v7.1.109\",\"v7.1.110\",\"v7.1.111\",\"v7.1.112\",\"v7.1.113\",\"v7.1.114\",\"v7.1.115\",\"v7.1.116\",\"v7.1.117\",\"v7.1.118\",\"v7.1.119\",\"v7.1.120\",\"v7.1.121\",\"v7.1.122\",\"v7.1.123\",\"v7.1.124\",\"v7.1.125\",\"v7.1.126\",\"v7.1.127\",\"v7.1.128\",\"v7.1.129\",\"v7.1.130\",\"v7.1.131\",\"v7.1.132\",\"v7.1.133\",\"v7.1.134\",\"v7.1.135\",\"v7.1.136\",\"v7.1.137\",\"v7.1.138\",\"v7.1.139\",\"v7.1.140\",\"v7.1.141\",\"v7.1.142\",\"v7.1.143\",\"v7.1.144\",\"v7.1.145\",\"v7.1.146\",\"v7.1.147\",\"v7.1.148\",\"v7.1.149\",\"v7.1.150\",\"v7.1.151\",\"v7.1.152\",\"v7.1.153\",\"v7.1.154\",\"v7.1.155\",\"v7.1.156\",\"v7.1.157\",\"v7.1.158\",\"v7.1.159\",\"v7.1.160\",\"v7.1.161\",\"v7.1.162\",\"v7.1.163\",\"v7.1.164\",\"v7.1.165\",\"v7.1.166\",\"v7.1.167\",\"v7.1.168\",\"v7.1.169\",\"v7.1.170\",\"v7.1.171\",\"v7.1.172\",\"v7.1.173\",\"v7.1.174\",\"v7.1.175\",\"v7.1.176\",\"v7.1.177\",\"v7.1.178\",\"v7.1.179\",\"v7.1.180\",\"v7.1.181\",\"v7.1.182\",\"v7.1.183\",\"v7.1.184\",\"v7.1.185\",\"v7.1.186\",\"v7.1.187\",\"v7.1.188\",\"v7.1.189\",\"v7.1.190\",\"v7.1.191\",\"v7.1.192\",\"v7.1.193\",\"v7.1.194\",\"v7.1.195\",\"v7.1.196\",\"v7.1.197\",\"v7.1.198\",\"v7.1.199\",\"v7.1.200\",\"v7.1.201\",\"v7.1.202\",\"v7.1.203\",\"v7.1.204\",\"v7.1.205\",\"v7.1.206\",\"v7.1.207\",\"v7.1.208\",\"v7.1.209\",\"v7.1.210\",\"v7.1.211\",\"v7.1.212\",\"v7.1.213\",\"v7.1.214\",\"v7.1.215\",\"v7.1.216\",\"v7.1.217\",\"v7.1.218\",\"v7.1.219\",\"v7.1.220\",\"v7.1.221\",\"v7.1.222\",\"v7.1.223\",\"v7.1.224\",\"v7.1.225\",\"v7.1.226\",\"v7.1.227\",\"v7.1.228\",\"v7.1.229\",\"v7.1.230\",\"v7.1.231\",\"v7.1.232\",\"v7.1.233\",\"v7.1.234\",\"v7.1.235\",\"v7.1.236\",\"v7.1.237\",\"v7.1.238\",\"v7.1.239\",\"v7.1.240\",\"v7.1.241\",\"v7.1.242\",\"v7.1.243\",\"v7.1.244\",\"v7.1.245\",\"v7.1.246\",\"v7.1.247\",\"v7.1.248\",\"v7.1.249\",\"v7.1.250\",\"v7.1.251\",\"v7.1.252\",\"v7.1.253\",\"v7.1.254\",\"v7.1.255\",\"v7.1.256\",\"v7.1.257\",\"v7.1.258\",\"v7.1.259\",\"v7.1.260\",\"v7.1.261\",\"v7.1.262\",\"v7.1.263\",\"v7.1.264\",\"v7.1.265\",\"v7.1.266\",\"v7.1.267\",\"v7.1.268\",\"v7.1.269\",\"v7.1.270\",\"v7.1.271\",\"v7.1.272\",\"v7.1.273\",\"v7.1.274\",\"v7.1.275\",\"v7.1.276\",\"v7.1.277\",\"v7.1.278\",\"v7.1.279\",\"v7.1.280\",\"v7.1.281\",\"v7.1.282\",\"v7.1.283\",\"v7.1.284\",\"v7.1.285\",\"v7.1.286\",\"v7.1.287\",\"v7.1.288\",\"v7.1.289\",\"v7.1.290\",\"v7.1.291\",\"v7.1.292\",\"v7.1.293\",\"v7.1.294\",\"v7.1.295\",\"v7.1.296\",\"v7.1.297\",\"v7.1.298\",\"v7.1.299\",\"v7.1.300\",\"v7.1.301\",\"v7.1.302\",\"v7.1.303\",\"v7.1.304\",\"v7.1.305\",\"v7.1.306\",\"v7.1.307\",\"v7.1.308\",\"v7.1.309\",\"v7.1.310\",\"v7.1.311\",\"v7.1.312\",\"v7.1.313\",\"v7.1.314\",\"v7.1.315\",\"v7.1.316\",\"v7.1.317\",\"v7.1.318\",\"v7.1.319\",\"v7.1.320\",\"v7.1.321\",\"v7.1.322\",\"v7.1.323\",\"v7.1.324\",\"v7.1.325\",\"v7.1.326\",\"v7.1.327\",\"v7.1.328\",\"v7.1.329\",\"v7.1.330\",\"v7.1a\",\"v7.1a.001\",\"v7.1b\",\"v7.1b.001\",\"v7.1b.002\",\"v7.2\",\"v7.2.000\",\"v7.2.001\",\"v7.2.002\",\"v7.2.003\",\"v7.2.004\",\"v7.2.005\",\"v7.2.006\",\"v7.2.007\",\"v7.2.008\",\"v7.2.009\",\"v7.2.010\",\"v7.2.011\",\"v7.2.012\",\"v7.2.013\",\"v7.2.014\",\"v7.2.015\",\"v7.2.016\",\"v7.2.017\",\"v7.2.018\",\"v7.2.019\",\"v7.2.020\",\"v7.2.021\",\"v7.2.022\",\"v7.2.023\",\"v7.2.024\",\"v7.2.025\",\"v7.2.026\",\"v7.2.027\",\"v7.2.028\",\"v7.2.029\",\"v7.2.030\",\"v7.2.031\",\"v7.2.032\",\"v7.2.033\",\"v7.2.034\",\"v7.2.035\",\"v7.2.036\",\"v7.2.037\",\"v7.2.038\",\"v7.2.039\",\"v7.2.040\",\"v7.2.041\",\"v7.2.042\",\"v7.2.043\",\"v7.2.044\",\"v7.2.045\",\"v7.2.046\",\"v7.2.047\",\"v7.2.048\",\"v7.2.049\",\"v7.2.050\",\"v7.2.051\",\"v7.2.052\",\"v7.2.053\",\"v7.2.055\",\"v7.2.056\",\"v7.2.057\",\"v7.2.058\",\"v7.2.059\",\"v7.2.060\",\"v7.2.061\",\"v7.2.062\",\"v7.2.063\",\"v7.2.064\",\"v7.2.065\",\"v7.2.066\",\"v7.2.067\",\"v7.2.068\",\"v7.2.069\",\"v7.2.070\",\"v7.2.071\",\"v7.2.072\",\"v7.2.073\",\"v7.2.074\",\"v7.2.075\",\"v7.2.076\",\"v7.2.077\",\"v7.2.078\",\"v7.2.079\",\"v7.2.080\",\"v7.2.081\",\"v7.2.082\",\"v7.2.083\",\"v7.2.084\",\"v7.2.085\",\"v7.2.086\",\"v7.2.087\",\"v7.2.088\",\"v7.2.089\",\"v7.2.090\",\"v7.2.091\",\"v7.2.092\",\"v7.2.093\",\"v7.2.094\",\"v7.2.095\",\"v7.2.096\",\"v7.2.097\",\"v7.2.098\",\"v7.2.099\",\"v7.2.100\",\"v7.2.101\",\"v7.2.102\",\"v7.2.103\",\"v7.2.104\",\"v7.2.105\",\"v7.2.106\",\"v7.2.107\",\"v7.2.108\",\"v7.2.109\",\"v7.2.110\",\"v7.2.111\",\"v7.2.112\",\"v7.2.113\",\"v7.2.114\",\"v7.2.115\",\"v7.2.116\",\"v7.2.117\",\"v7.2.118\",\"v7.2.119\",\"v7.2.120\",\"v7.2.121\",\"v7.2.122\",\"v7.2.123\",\"v7.2.124\",\"v7.2.125\",\"v7.2.126\",\"v7.2.127\",\"v7.2.128\",\"v7.2.129\",\"v7.2.130\",\"v7.2.131\",\"v7.2.132\",\"v7.2.133\",\"v7.2.134\",\"v7.2.135\",\"v7.2.136\",\"v7.2.137\",\"v7.2.138\",\"v7.2.139\",\"v7.2.140\",\"v7.2.141\",\"v7.2.142\",\"v7.2.143\",\"v7.2.144\",\"v7.2.145\",\"v7.2.146\",\"v7.2.147\",\"v7.2.148\",\"v7.2.149\",\"v7.2.150\",\"v7.2.151\",\"v7.2.152\",\"v7.2.153\",\"v7.2.154\",\"v7.2.155\",\"v7.2.156\",\"v7.2.157\",\"v7.2.158\",\"v7.2.159\",\"v7.2.160\",\"v7.2.161\",\"v7.2.162\",\"v7.2.163\",\"v7.2.164\",\"v7.2.165\",\"v7.2.166\",\"v7.2.167\",\"v7.2.168\",\"v7.2.169\",\"v7.2.170\",\"v7.2.171\",\"v7.2.172\",\"v7.2.173\",\"v7.2.174\",\"v7.2.175\",\"v7.2.176\",\"v7.2.177\",\"v7.2.178\",\"v7.2.179\",\"v7.2.180\",\"v7.2.181\",\"v7.2.182\",\"v7.2.183\",\"v7.2.184\",\"v7.2.185\",\"v7.2.186\",\"v7.2.187\",\"v7.2.188\",\"v7.2.189\",\"v7.2.190\",\"v7.2.191\",\"v7.2.192\",\"v7.2.193\",\"v7.2.194\",\"v7.2.195\",\"v7.2.196\",\"v7.2.197\",\"v7.2.198\",\"v7.2.199\",\"v7.2.200\",\"v7.2.201\",\"v7.2.202\",\"v7.2.203\",\"v7.2.204\",\"v7.2.205\",\"v7.2.206\",\"v7.2.207\",\"v7.2.208\",\"v7.2.209\",\"v7.2.210\",\"v7.2.211\",\"v7.2.212\",\"v7.2.213\",\"v7.2.214\",\"v7.2.215\",\"v7.2.216\",\"v7.2.217\",\"v7.2.218\",\"v7.2.219\",\"v7.2.220\",\"v7.2.221\",\"v7.2.222\",\"v7.2.223\",\"v7.2.224\",\"v7.2.225\",\"v7.2.226\",\"v7.2.227\",\"v7.2.228\",\"v7.2.229\",\"v7.2.230\",\"v7.2.231\",\"v7.2.232\",\"v7.2.233\",\"v7.2.234\",\"v7.2.235\",\"v7.2.236\",\"v7.2.237\",\"v7.2.238\",\"v7.2.239\",\"v7.2.240\",\"v7.2.241\",\"v7.2.242\",\"v7.2.243\",\"v7.2.244\",\"v7.2.245\",\"v7.2.246\",\"v7.2.247\",\"v7.2.248\",\"v7.2.249\",\"v7.2.250\",\"v7.2.251\",\"v7.2.252\",\"v7.2.253\",\"v7.2.254\",\"v7.2.255\",\"v7.2.256\",\"v7.2.257\",\"v7.2.258\",\"v7.2.259\",\"v7.2.260\",\"v7.2.261\",\"v7.2.262\",\"v7.2.263\",\"v7.2.264\",\"v7.2.265\",\"v7.2.266\",\"v7.2.267\",\"v7.2.268\",\"v7.2.269\",\"v7.2.270\",\"v7.2.271\",\"v7.2.272\",\"v7.2.273\",\"v7.2.274\",\"v7.2.275\",\"v7.2.276\",\"v7.2.277\",\"v7.2.278\",\"v7.2.279\",\"v7.2.280\",\"v7.2.281\",\"v7.2.282\",\"v7.2.283\",\"v7.2.284\",\"v7.2.285\",\"v7.2.286\",\"v7.2.287\",\"v7.2.288\",\"v7.2.289\",\"v7.2.290\",\"v7.2.291\",\"v7.2.292\",\"v7.2.293\",\"v7.2.294\",\"v7.2.295\",\"v7.2.296\",\"v7.2.297\",\"v7.2.298\",\"v7.2.299\",\"v7.2.300\",\"v7.2.301\",\"v7.2.302\",\"v7.2.303\",\"v7.2.304\",\"v7.2.305\",\"v7.2.306\",\"v7.2.307\",\"v7.2.308\",\"v7.2.309\",\"v7.2.310\",\"v7.2.311\",\"v7.2.312\",\"v7.2.313\",\"v7.2.314\",\"v7.2.315\",\"v7.2.316\",\"v7.2.317\",\"v7.2.318\",\"v7.2.319\",\"v7.2.320\",\"v7.2.321\",\"v7.2.322\",\"v7.2.323\",\"v7.2.324\",\"v7.2.325\",\"v7.2.326\",\"v7.2.327\",\"v7.2.328\",\"v7.2.329\",\"v7.2.330\",\"v7.2.331\",\"v7.2.332\",\"v7.2.333\",\"v7.2.334\",\"v7.2.335\",\"v7.2.336\",\"v7.2.337\",\"v7.2.338\",\"v7.2.339\",\"v7.2.340\",\"v7.2.341\",\"v7.2.342\",\"v7.2.343\",\"v7.2.344\",\"v7.2.345\",\"v7.2.346\",\"v7.2.347\",\"v7.2.348\",\"v7.2.349\",\"v7.2.350\",\"v7.2.351\",\"v7.2.352\",\"v7.2.353\",\"v7.2.354\",\"v7.2.355\",\"v7.2.356\",\"v7.2.357\",\"v7.2.358\",\"v7.2.359\",\"v7.2.360\",\"v7.2.361\",\"v7.2.362\",\"v7.2.363\",\"v7.2.364\",\"v7.2.365\",\"v7.2.366\",\"v7.2.367\",\"v7.2.368\",\"v7.2.369\",\"v7.2.370\",\"v7.2.371\",\"v7.2.372\",\"v7.2.373\",\"v7.2.374\",\"v7.2.375\",\"v7.2.376\",\"v7.2.377\",\"v7.2.378\",\"v7.2.379\",\"v7.2.380\",\"v7.2.381\",\"v7.2.382\",\"v7.2.383\",\"v7.2.384\",\"v7.2.385\",\"v7.2.386\",\"v7.2.387\",\"v7.2.388\",\"v7.2.389\",\"v7.2.390\",\"v7.2.391\",\"v7.2.392\",\"v7.2.393\",\"v7.2.394\",\"v7.2.395\",\"v7.2.396\",\"v7.2.397\",\"v7.2.398\",\"v7.2.399\",\"v7.2.400\",\"v7.2.401\",\"v7.2.402\",\"v7.2.403\",\"v7.2.404\",\"v7.2.405\",\"v7.2.406\",\"v7.2.407\",\"v7.2.408\",\"v7.2.409\",\"v7.2.410\",\"v7.2.411\",\"v7.2.412\",\"v7.2.413\",\"v7.2.414\",\"v7.2.415\",\"v7.2.416\",\"v7.2.417\",\"v7.2.418\",\"v7.2.419\",\"v7.2.420\",\"v7.2.421\",\"v7.2.422\",\"v7.2.423\",\"v7.2.424\",\"v7.2.425\",\"v7.2.426\",\"v7.2.427\",\"v7.2.428\",\"v7.2.429\",\"v7.2.430\",\"v7.2.431\",\"v7.2.432\",\"v7.2.433\",\"v7.2.434\",\"v7.2.435\",\"v7.2.436\",\"v7.2.437\",\"v7.2.438\",\"v7.2.439\",\"v7.2.440\",\"v7.2.441\",\"v7.2.442\",\"v7.2.443\",\"v7.2.444\",\"v7.2.445\",\"v7.2.446\",\"v7.2a\",\"v7.2a.00\",\"v7.2a.001\",\"v7.2a.002\",\"v7.2a.003\",\"v7.2a.004\",\"v7.2a.005\",\"v7.2a.006\",\"v7.2a.007\",\"v7.2a.008\",\"v7.2a.009\",\"v7.2a.010\",\"v7.2a.011\",\"v7.2a.012\",\"v7.2a.013\",\"v7.2a.014\",\"v7.2a.015\",\"v7.2a.016\",\"v7.2a.017\",\"v7.2a.018\",\"v7.2a.019\",\"v7.2b.000\",\"v7.2b.001\",\"v7.2b.002\",\"v7.2b.003\",\"v7.2b.004\",\"v7.2b.005\",\"v7.2b.006\",\"v7.2b.007\",\"v7.2b.008\",\"v7.2b.009\",\"v7.2b.010\",\"v7.2b.011\",\"v7.2b.012\",\"v7.2b.014\",\"v7.2b.015\",\"v7.2b.016\",\"v7.2b.017\",\"v7.2b.018\",\"v7.2b.019\",\"v7.2b.020\",\"v7.2b.021\",\"v7.2b.022\",\"v7.2b.023\",\"v7.2b.024\",\"v7.2b.025\",\"v7.2b.026\",\"v7.2b.027\",\"v7.2b.028\",\"v7.2b.029\",\"v7.2b.030\",\"v7.2c.000\",\"v7.2c.001\",\"v7.2c.002\",\"v7.2c.003\",\"v7.3\",\"v7.3.001\",\"v7.3.002\",\"v7.3.003\",\"v7.3.004\",\"v7.3.005\",\"v7.3.006\",\"v7.3.007\",\"v7.3.008\",\"v7.3.009\",\"v7.3.010\",\"v7.3.011\",\"v7.3.012\",\"v7.3.013\",\"v7.3.014\",\"v7.3.015\",\"v7.3.016\",\"v7.3.017\",\"v7.3.018\",\"v7.3.019\",\"v7.3.020\",\"v7.3.021\",\"v7.3.022\",\"v7.3.023\",\"v7.3.024\",\"v7.3.025\",\"v7.3.026\",\"v7.3.027\",\"v7.3.028\",\"v7.3.029\",\"v7.3.030\",\"v7.3.031\",\"v7.3.032\",\"v7.3.033\",\"v7.3.034\",\"v7.3.035\",\"v7.3.036\",\"v7.3.037\",\"v7.3.038\",\"v7.3.039\",\"v7.3.040\",\"v7.3.041\",\"v7.3.042\",\"v7.3.043\",\"v7.3.044\",\"v7.3.045\",\"v7.3.046\",\"v7.3.047\",\"v7.3.048\",\"v7.3.049\",\"v7.3.050\",\"v7.3.051\",\"v7.3.052\",\"v7.3.053\",\"v7.3.054\",\"v7.3.055\",\"v7.3.056\",\"v7.3.057\",\"v7.3.058\",\"v7.3.059\",\"v7.3.060\",\"v7.3.061\",\"v7.3.062\",\"v7.3.063\",\"v7.3.064\",\"v7.3.065\",\"v7.3.066\",\"v7.3.067\",\"v7.3.068\",\"v7.3.069\",\"v7.3.070\",\"v7.3.071\",\"v7.3.072\",\"v7.3.073\",\"v7.3.074\",\"v7.3.075\",\"v7.3.076\",\"v7.3.077\",\"v7.3.078\",\"v7.3.079\",\"v7.3.080\",\"v7.3.081\",\"v7.3.082\",\"v7.3.083\",\"v7.3.084\",\"v7.3.085\",\"v7.3.086\",\"v7.3.087\",\"v7.3.088\",\"v7.3.089\",\"v7.3.090\",\"v7.3.091\",\"v7.3.092\",\"v7.3.093\",\"v7.3.094\",\"v7.3.095\",\"v7.3.096\",\"v7.3.097\",\"v7.3.098\",\"v7.3.099\",\"v7.3.100\",\"v7.3.1000\",\"v7.3.1001\",\"v7.3.1002\",\"v7.3.1003\",\"v7.3.1004\",\"v7.3.1005\",\"v7.3.1006\",\"v7.3.1007\",\"v7.3.1008\",\"v7.3.1009\",\"v7.3.101\",\"v7.3.1010\",\"v7.3.1011\",\"v7.3.1012\",\"v7.3.1013\",\"v7.3.1014\",\"v7.3.1015\",\"v7.3.1016\",\"v7.3.1017\",\"v7.3.1018\",\"v7.3.1019\",\"v7.3.102\",\"v7.3.1020\",\"v7.3.1021\",\"v7.3.1022\",\"v7.3.1023\",\"v7.3.1024\",\"v7.3.1025\",\"v7.3.1026\",\"v7.3.1027\",\"v7.3.1028\",\"v7.3.1029\",\"v7.3.103\",\"v7.3.1030\",\"v7.3.1031\",\"v7.3.1032\",\"v7.3.1033\",\"v7.3.1034\",\"v7.3.1035\",\"v7.3.1036\",\"v7.3.1037\",\"v7.3.1038\",\"v7.3.1039\",\"v7.3.104\",\"v7.3.1040\",\"v7.3.1041\",\"v7.3.1042\",\"v7.3.1043\",\"v7.3.1044\",\"v7.3.1045\",\"v7.3.1046\",\"v7.3.1047\",\"v7.3.1048\",\"v7.3.1049\",\"v7.3.105\",\"v7.3.1050\",\"v7.3.1051\",\"v7.3.1052\",\"v7.3.1053\",\"v7.3.1054\",\"v7.3.1055\",\"v7.3.1056\",\"v7.3.1057\",\"v7.3.1058\",\"v7.3.1059\",\"v7.3.106\",\"v7.3.1060\",\"v7.3.1061\",\"v7.3.1062\",\"v7.3.1063\",\"v7.3.1064\",\"v7.3.1065\",\"v7.3.1066\",\"v7.3.1067\",\"v7.3.1068\",\"v7.3.1069\",\"v7.3.107\",\"v7.3.1070\",\"v7.3.1071\",\"v7.3.1072\",\"v7.3.1073\",\"v7.3.1074\",\"v7.3.1075\",\"v7.3.1076\",\"v7.3.1077\",\"v7.3.1078\",\"v7.3.1079\",\"v7.3.108\",\"v7.3.1080\",\"v7.3.1081\",\"v7.3.1082\",\"v7.3.1083\",\"v7.3.1084\",\"v7.3.1085\",\"v7.3.1086\",\"v7.3.1087\",\"v7.3.1088\",\"v7.3.1089\",\"v7.3.109\",\"v7.3.1090\",\"v7.3.1091\",\"v7.3.1092\",\"v7.3.1093\",\"v7.3.1094\",\"v7.3.1095\",\"v7.3.1096\",\"v7.3.1097\",\"v7.3.1098\",\"v7.3.1099\",\"v7.3.110\",\"v7.3.1100\",\"v7.3.1101\",\"v7.3.1102\",\"v7.3.1103\",\"v7.3.1104\",\"v7.3.1105\",\"v7.3.1106\",\"v7.3.1107\",\"v7.3.1108\",\"v7.3.1109\",\"v7.3.111\",\"v7.3.1110\",\"v7.3.1111\",\"v7.3.1112\",\"v7.3.1113\",\"v7.3.1114\",\"v7.3.1115\",\"v7.3.1116\",\"v7.3.1117\",\"v7.3.1118\",\"v7.3.1119\",\"v7.3.112\",\"v7.3.1120\",\"v7.3.1121\",\"v7.3.1122\",\"v7.3.1123\",\"v7.3.1124\",\"v7.3.1125\",\"v7.3.1126\",\"v7.3.1127\",\"v7.3.1128\",\"v7.3.1129\",\"v7.3.113\",\"v7.3.1130\",\"v7.3.1131\",\"v7.3.1132\",\"v7.3.1133\",\"v7.3.1134\",\"v7.3.1135\",\"v7.3.1136\",\"v7.3.1137\",\"v7.3.1138\",\"v7.3.1139\",\"v7.3.114\",\"v7.3.1140\",\"v7.3.1141\",\"v7.3.1142\",\"v7.3.1143\",\"v7.3.1144\",\"v7.3.1145\",\"v7.3.1146\",\"v7.3.1147\",\"v7.3.1148\",\"v7.3.1149\",\"v7.3.115\",\"v7.3.1150\",\"v7.3.1151\",\"v7.3.1152\",\"v7.3.1153\",\"v7.3.1154\",\"v7.3.1155\",\"v7.3.1156\",\"v7.3.1157\",\"v7.3.1158\",\"v7.3.1159\",\"v7.3.116\",\"v7.3.1160\",\"v7.3.1161\",\"v7.3.1162\",\"v7.3.1163\",\"v7.3.1164\",\"v7.3.1165\",\"v7.3.1166\",\"v7.3.1167\",\"v7.3.1168\",\"v7.3.1169\",\"v7.3.117\",\"v7.3.1170\",\"v7.3.1171\",\"v7.3.1172\",\"v7.3.1173\",\"v7.3.1174\",\"v7.3.1175\",\"v7.3.1176\",\"v7.3.1177\",\"v7.3.1178\",\"v7.3.1179\",\"v7.3.118\",\"v7.3.1180\",\"v7.3.1181\",\"v7.3.1182\",\"v7.3.1183\",\"v7.3.1184\",\"v7.3.1185\",\"v7.3.1186\",\"v7.3.1187\",\"v7.3.1188\",\"v7.3.1189\",\"v7.3.119\",\"v7.3.1190\",\"v7.3.1191\",\"v7.3.1192\",\"v7.3.1193\",\"v7.3.1194\",\"v7.3.1195\",\"v7.3.1196\",\"v7.3.1197\",\"v7.3.1198\",\"v7.3.1199\",\"v7.3.120\",\"v7.3.1200\",\"v7.3.1201\",\"v7.3.1202\",\"v7.3.1203\",\"v7.3.1204\",\"v7.3.1205\",\"v7.3.1206\",\"v7.3.1207\",\"v7.3.1208\",\"v7.3.1209\",\"v7.3.121\",\"v7.3.1210\",\"v7.3.1211\",\"v7.3.1212\",\"v7.3.1213\",\"v7.3.1214\",\"v7.3.1215\",\"v7.3.1216\",\"v7.3.1217\",\"v7.3.1218\",\"v7.3.1219\",\"v7.3.122\",\"v7.3.1220\",\"v7.3.1221\",\"v7.3.1222\",\"v7.3.1223\",\"v7.3.1224\",\"v7.3.1225\",\"v7.3.1226\",\"v7.3.1227\",\"v7.3.1228\",\"v7.3.1229\",\"v7.3.123\",\"v7.3.1230\",\"v7.3.1231\",\"v7.3.1232\",\"v7.3.1233\",\"v7.3.1234\",\"v7.3.1235\",\"v7.3.1236\",\"v7.3.1237\",\"v7.3.1238\",\"v7.3.1239\",\"v7.3.124\",\"v7.3.1240\",\"v7.3.1241\",\"v7.3.1242\",\"v7.3.1243\",\"v7.3.1244\",\"v7.3.1245\",\"v7.3.1246\",\"v7.3.1247\",\"v7.3.1248\",\"v7.3.1249\",\"v7.3.125\",\"v7.3.1250\",\"v7.3.1251\",\"v7.3.1252\",\"v7.3.1253\",\"v7.3.1254\",\"v7.3.1255\",\"v7.3.1256\",\"v7.3.1257\",\"v7.3.1258\",\"v7.3.1259\",\"v7.3.126\",\"v7.3.1260\",\"v7.3.1261\",\"v7.3.1262\",\"v7.3.1263\",\"v7.3.1264\",\"v7.3.1265\",\"v7.3.1266\",\"v7.3.1267\",\"v7.3.1268\",\"v7.3.1269\",\"v7.3.127\",\"v7.3.1270\",\"v7.3.1271\",\"v7.3.1272\",\"v7.3.1273\",\"v7.3.1274\",\"v7.3.1275\",\"v7.3.1276\",\"v7.3.1277\",\"v7.3.1278\",\"v7.3.1279\",\"v7.3.128\",\"v7.3.1280\",\"v7.3.1281\",\"v7.3.1282\",\"v7.3.1283\",\"v7.3.1284\",\"v7.3.1285\",\"v7.3.1286\",\"v7.3.1287\",\"v7.3.1288\",\"v7.3.1289\",\"v7.3.129\",\"v7.3.1290\",\"v7.3.1291\",\"v7.3.1292\",\"v7.3.1293\",\"v7.3.1294\",\"v7.3.1295\",\"v7.3.1296\",\"v7.3.1297\",\"v7.3.1298\",\"v7.3.1299\",\"v7.3.130\",\"v7.3.1300\",\"v7.3.1301\",\"v7.3.1302\",\"v7.3.1303\",\"v7.3.1304\",\"v7.3.1305\",\"v7.3.1306\",\"v7.3.1307\",\"v7.3.1308\",\"v7.3.1309\",\"v7.3.131\",\"v7.3.1310\",\"v7.3.1311\",\"v7.3.1312\",\"v7.3.1313\",\"v7.3.1314\",\"v7.3.132\",\"v7.3.133\",\"v7.3.134\",\"v7.3.135\",\"v7.3.136\",\"v7.3.137\",\"v7.3.138\",\"v7.3.139\",\"v7.3.140\",\"v7.3.141\",\"v7.3.142\",\"v7.3.143\",\"v7.3.144\",\"v7.3.145\",\"v7.3.146\",\"v7.3.147\",\"v7.3.148\",\"v7.3.149\",\"v7.3.150\",\"v7.3.151\",\"v7.3.152\",\"v7.3.153\",\"v7.3.154\",\"v7.3.155\",\"v7.3.156\",\"v7.3.157\",\"v7.3.158\",\"v7.3.159\",\"v7.3.160\",\"v7.3.161\",\"v7.3.162\",\"v7.3.163\",\"v7.3.164\",\"v7.3.165\",\"v7.3.166\",\"v7.3.167\",\"v7.3.168\",\"v7.3.169\",\"v7.3.170\",\"v7.3.171\",\"v7.3.172\",\"v7.3.173\",\"v7.3.174\",\"v7.3.175\",\"v7.3.176\",\"v7.3.177\",\"v7.3.178\",\"v7.3.179\",\"v7.3.180\",\"v7.3.181\",\"v7.3.182\",\"v7.3.183\",\"v7.3.184\",\"v7.3.185\",\"v7.3.186\",\"v7.3.187\",\"v7.3.188\",\"v7.3.189\",\"v7.3.190\",\"v7.3.191\",\"v7.3.192\",\"v7.3.193\",\"v7.3.194\",\"v7.3.195\",\"v7.3.196\",\"v7.3.197\",\"v7.3.198\",\"v7.3.199\",\"v7.3.200\",\"v7.3.201\",\"v7.3.202\",\"v7.3.203\",\"v7.3.204\",\"v7.3.205\",\"v7.3.206\",\"v7.3.207\",\"v7.3.208\",\"v7.3.209\",\"v7.3.210\",\"v7.3.211\",\"v7.3.212\",\"v7.3.213\",\"v7.3.214\",\"v7.3.215\",\"v7.3.216\",\"v7.3.217\",\"v7.3.218\",\"v7.3.219\",\"v7.3.220\",\"v7.3.221\",\"v7.3.222\",\"v7.3.223\",\"v7.3.224\",\"v7.3.225\",\"v7.3.226\",\"v7.3.227\",\"v7.3.228\",\"v7.3.229\",\"v7.3.230\",\"v7.3.231\",\"v7.3.232\",\"v7.3.233\",\"v7.3.234\",\"v7.3.235\",\"v7.3.236\",\"v7.3.237\",\"v7.3.238\",\"v7.3.239\",\"v7.3.240\",\"v7.3.241\",\"v7.3.242\",\"v7.3.243\",\"v7.3.244\",\"v7.3.245\",\"v7.3.246\",\"v7.3.247\",\"v7.3.248\",\"v7.3.249\",\"v7.3.250\",\"v7.3.251\",\"v7.3.252\",\"v7.3.253\",\"v7.3.254\",\"v7.3.255\",\"v7.3.256\",\"v7.3.257\",\"v7.3.258\",\"v7.3.259\",\"v7.3.260\",\"v7.3.261\",\"v7.3.262\",\"v7.3.263\",\"v7.3.264\",\"v7.3.265\",\"v7.3.266\",\"v7.3.267\",\"v7.3.268\",\"v7.3.269\",\"v7.3.270\",\"v7.3.271\",\"v7.3.272\",\"v7.3.273\",\"v7.3.274\",\"v7.3.275\",\"v7.3.276\",\"v7.3.277\",\"v7.3.278\",\"v7.3.279\",\"v7.3.280\",\"v7.3.281\",\"v7.3.282\",\"v7.3.283\",\"v7.3.284\",\"v7.3.285\",\"v7.3.286\",\"v7.3.287\",\"v7.3.288\",\"v7.3.289\",\"v7.3.290\",\"v7.3.291\",\"v7.3.292\",\"v7.3.293\",\"v7.3.294\",\"v7.3.295\",\"v7.3.296\",\"v7.3.297\",\"v7.3.298\",\"v7.3.299\",\"v7.3.300\",\"v7.3.301\",\"v7.3.302\",\"v7.3.303\",\"v7.3.304\",\"v7.3.305\",\"v7.3.306\",\"v7.3.307\",\"v7.3.308\",\"v7.3.309\",\"v7.3.310\",\"v7.3.311\",\"v7.3.312\",\"v7.3.313\",\"v7.3.314\",\"v7.3.315\",\"v7.3.316\",\"v7.3.317\",\"v7.3.318\",\"v7.3.319\",\"v7.3.320\",\"v7.3.321\",\"v7.3.322\",\"v7.3.323\",\"v7.3.324\",\"v7.3.325\",\"v7.3.326\",\"v7.3.327\",\"v7.3.328\",\"v7.3.329\",\"v7.3.330\",\"v7.3.331\",\"v7.3.332\",\"v7.3.333\",\"v7.3.334\",\"v7.3.335\",\"v7.3.336\",\"v7.3.337\",\"v7.3.338\",\"v7.3.339\",\"v7.3.340\",\"v7.3.341\",\"v7.3.342\",\"v7.3.343\",\"v7.3.344\",\"v7.3.345\",\"v7.3.346\",\"v7.3.347\",\"v7.3.348\",\"v7.3.349\",\"v7.3.350\",\"v7.3.351\",\"v7.3.352\",\"v7.3.353\",\"v7.3.354\",\"v7.3.355\",\"v7.3.356\",\"v7.3.357\",\"v7.3.358\",\"v7.3.359\",\"v7.3.360\",\"v7.3.361\",\"v7.3.362\",\"v7.3.363\",\"v7.3.364\",\"v7.3.365\",\"v7.3.366\",\"v7.3.367\",\"v7.3.368\",\"v7.3.369\",\"v7.3.370\",\"v7.3.371\",\"v7.3.372\",\"v7.3.373\",\"v7.3.374\",\"v7.3.375\",\"v7.3.376\",\"v7.3.377\",\"v7.3.378\",\"v7.3.379\",\"v7.3.380\",\"v7.3.381\",\"v7.3.382\",\"v7.3.383\",\"v7.3.384\",\"v7.3.385\",\"v7.3.386\",\"v7.3.387\",\"v7.3.388\",\"v7.3.389\",\"v7.3.390\",\"v7.3.391\",\"v7.3.392\",\"v7.3.393\",\"v7.3.394\",\"v7.3.395\",\"v7.3.396\",\"v7.3.397\",\"v7.3.398\",\"v7.3.399\",\"v7.3.400\",\"v7.3.401\",\"v7.3.402\",\"v7.3.403\",\"v7.3.404\",\"v7.3.405\",\"v7.3.406\",\"v7.3.407\",\"v7.3.408\",\"v7.3.409\",\"v7.3.410\",\"v7.3.411\",\"v7.3.412\",\"v7.3.413\",\"v7.3.414\",\"v7.3.415\",\"v7.3.416\",\"v7.3.417\",\"v7.3.418\",\"v7.3.419\",\"v7.3.420\",\"v7.3.421\",\"v7.3.422\",\"v7.3.423\",\"v7.3.424\",\"v7.3.425\",\"v7.3.426\",\"v7.3.427\",\"v7.3.428\",\"v7.3.429\",\"v7.3.430\",\"v7.3.431\",\"v7.3.432\",\"v7.3.433\",\"v7.3.434\",\"v7.3.435\",\"v7.3.436\",\"v7.3.437\",\"v7.3.438\",\"v7.3.439\",\"v7.3.440\",\"v7.3.441\",\"v7.3.442\",\"v7.3.443\",\"v7.3.444\",\"v7.3.445\",\"v7.3.446\",\"v7.3.447\",\"v7.3.448\",\"v7.3.449\",\"v7.3.450\",\"v7.3.451\",\"v7.3.452\",\"v7.3.453\",\"v7.3.454\",\"v7.3.455\",\"v7.3.456\",\"v7.3.457\",\"v7.3.458\",\"v7.3.459\",\"v7.3.460\",\"v7.3.461\",\"v7.3.462\",\"v7.3.463\",\"v7.3.464\",\"v7.3.465\",\"v7.3.466\",\"v7.3.467\",\"v7.3.468\",\"v7.3.469\",\"v7.3.470\",\"v7.3.471\",\"v7.3.472\",\"v7.3.473\",\"v7.3.474\",\"v7.3.475\",\"v7.3.476\",\"v7.3.477\",\"v7.3.478\",\"v7.3.479\",\"v7.3.480\",\"v7.3.481\",\"v7.3.482\",\"v7.3.483\",\"v7.3.484\",\"v7.3.485\",\"v7.3.486\",\"v7.3.487\",\"v7.3.488\",\"v7.3.489\",\"v7.3.490\",\"v7.3.491\",\"v7.3.492\",\"v7.3.493\",\"v7.3.494\",\"v7.3.495\",\"v7.3.496\",\"v7.3.497\",\"v7.3.498\",\"v7.3.499\",\"v7.3.500\",\"v7.3.501\",\"v7.3.502\",\"v7.3.503\",\"v7.3.504\",\"v7.3.505\",\"v7.3.506\",\"v7.3.507\",\"v7.3.508\",\"v7.3.509\",\"v7.3.510\",\"v7.3.511\",\"v7.3.512\",\"v7.3.513\",\"v7.3.514\",\"v7.3.515\",\"v7.3.516\",\"v7.3.517\",\"v7.3.518\",\"v7.3.519\",\"v7.3.520\",\"v7.3.521\",\"v7.3.522\",\"v7.3.523\",\"v7.3.524\",\"v7.3.525\",\"v7.3.526\",\"v7.3.527\",\"v7.3.528\",\"v7.3.529\",\"v7.3.530\",\"v7.3.531\",\"v7.3.532\",\"v7.3.533\",\"v7.3.534\",\"v7.3.535\",\"v7.3.536\",\"v7.3.537\",\"v7.3.538\",\"v7.3.539\",\"v7.3.540\",\"v7.3.541\",\"v7.3.542\",\"v7.3.543\",\"v7.3.544\",\"v7.3.545\",\"v7.3.546\",\"v7.3.547\",\"v7.3.548\",\"v7.3.549\",\"v7.3.550\",\"v7.3.551\",\"v7.3.552\",\"v7.3.553\",\"v7.3.554\",\"v7.3.555\",\"v7.3.556\",\"v7.3.557\",\"v7.3.558\",\"v7.3.559\",\"v7.3.560\",\"v7.3.561\",\"v7.3.562\",\"v7.3.563\",\"v7.3.564\",\"v7.3.565\",\"v7.3.566\",\"v7.3.567\",\"v7.3.568\",\"v7.3.569\",\"v7.3.570\",\"v7.3.571\",\"v7.3.572\",\"v7.3.573\",\"v7.3.574\",\"v7.3.575\",\"v7.3.576\",\"v7.3.577\",\"v7.3.578\",\"v7.3.579\",\"v7.3.580\",\"v7.3.581\",\"v7.3.582\",\"v7.3.583\",\"v7.3.584\",\"v7.3.585\",\"v7.3.586\",\"v7.3.587\",\"v7.3.588\",\"v7.3.589\",\"v7.3.590\",\"v7.3.591\",\"v7.3.592\",\"v7.3.593\",\"v7.3.594\",\"v7.3.595\",\"v7.3.596\",\"v7.3.597\",\"v7.3.598\",\"v7.3.599\",\"v7.3.600\",\"v7.3.601\",\"v7.3.602\",\"v7.3.603\",\"v7.3.604\",\"v7.3.605\",\"v7.3.606\",\"v7.3.607\",\"v7.3.608\",\"v7.3.609\",\"v7.3.610\",\"v7.3.611\",\"v7.3.612\",\"v7.3.613\",\"v7.3.614\",\"v7.3.615\",\"v7.3.616\",\"v7.3.617\",\"v7.3.618\",\"v7.3.619\",\"v7.3.620\",\"v7.3.621\",\"v7.3.622\",\"v7.3.623\",\"v7.3.624\",\"v7.3.625\",\"v7.3.626\",\"v7.3.627\",\"v7.3.628\",\"v7.3.629\",\"v7.3.630\",\"v7.3.631\",\"v7.3.632\",\"v7.3.633\",\"v7.3.634\",\"v7.3.635\",\"v7.3.636\",\"v7.3.637\",\"v7.3.638\",\"v7.3.639\",\"v7.3.640\",\"v7.3.641\",\"v7.3.642\",\"v7.3.643\",\"v7.3.644\",\"v7.3.645\",\"v7.3.646\",\"v7.3.647\",\"v7.3.648\",\"v7.3.649\",\"v7.3.650\",\"v7.3.651\",\"v7.3.652\",\"v7.3.653\",\"v7.3.654\",\"v7.3.655\",\"v7.3.656\",\"v7.3.657\",\"v7.3.658\",\"v7.3.659\",\"v7.3.660\",\"v7.3.661\",\"v7.3.662\",\"v7.3.663\",\"v7.3.664\",\"v7.3.665\",\"v7.3.666\",\"v7.3.667\",\"v7.3.668\",\"v7.3.669\",\"v7.3.670\",\"v7.3.671\",\"v7.3.672\",\"v7.3.673\",\"v7.3.674\",\"v7.3.675\",\"v7.3.676\",\"v7.3.677\",\"v7.3.678\",\"v7.3.679\",\"v7.3.680\",\"v7.3.681\",\"v7.3.682\",\"v7.3.683\",\"v7.3.684\",\"v7.3.685\",\"v7.3.686\",\"v7.3.687\",\"v7.3.688\",\"v7.3.689\",\"v7.3.690\",\"v7.3.691\",\"v7.3.692\",\"v7.3.693\",\"v7.3.694\",\"v7.3.695\",\"v7.3.696\",\"v7.3.697\",\"v7.3.698\",\"v7.3.699\",\"v7.3.700\",\"v7.3.701\",\"v7.3.702\",\"v7.3.703\",\"v7.3.704\",\"v7.3.705\",\"v7.3.706\",\"v7.3.707\",\"v7.3.708\",\"v7.3.709\",\"v7.3.710\",\"v7.3.711\",\"v7.3.712\",\"v7.3.713\",\"v7.3.714\",\"v7.3.715\",\"v7.3.716\",\"v7.3.717\",\"v7.3.718\",\"v7.3.719\",\"v7.3.720\",\"v7.3.721\",\"v7.3.722\",\"v7.3.723\",\"v7.3.724\",\"v7.3.725\",\"v7.3.726\",\"v7.3.727\",\"v7.3.728\",\"v7.3.729\",\"v7.3.730\",\"v7.3.731\",\"v7.3.732\",\"v7.3.733\",\"v7.3.734\",\"v7.3.735\",\"v7.3.736\",\"v7.3.737\",\"v7.3.738\",\"v7.3.739\",\"v7.3.740\",\"v7.3.741\",\"v7.3.742\",\"v7.3.743\",\"v7.3.744\",\"v7.3.745\",\"v7.3.746\",\"v7.3.747\",\"v7.3.748\",\"v7.3.749\",\"v7.3.750\",\"v7.3.751\",\"v7.3.752\",\"v7.3.753\",\"v7.3.754\",\"v7.3.755\",\"v7.3.756\",\"v7.3.757\",\"v7.3.758\",\"v7.3.759\",\"v7.3.760\",\"v7.3.761\",\"v7.3.762\",\"v7.3.763\",\"v7.3.764\",\"v7.3.765\",\"v7.3.766\",\"v7.3.767\",\"v7.3.768\",\"v7.3.769\",\"v7.3.770\",\"v7.3.771\",\"v7.3.772\",\"v7.3.773\",\"v7.3.774\",\"v7.3.775\",\"v7.3.776\",\"v7.3.777\",\"v7.3.778\",\"v7.3.779\",\"v7.3.780\",\"v7.3.781\",\"v7.3.782\",\"v7.3.783\",\"v7.3.784\",\"v7.3.785\",\"v7.3.786\",\"v7.3.787\",\"v7.3.788\",\"v7.3.789\",\"v7.3.790\",\"v7.3.791\",\"v7.3.792\",\"v7.3.793\",\"v7.3.794\",\"v7.3.795\",\"v7.3.796\",\"v7.3.797\",\"v7.3.798\",\"v7.3.799\",\"v7.3.800\",\"v7.3.801\",\"v7.3.802\",\"v7.3.803\",\"v7.3.804\",\"v7.3.805\",\"v7.3.806\",\"v7.3.807\",\"v7.3.808\",\"v7.3.809\",\"v7.3.810\",\"v7.3.811\",\"v7.3.812\",\"v7.3.813\",\"v7.3.814\",\"v7.3.815\",\"v7.3.816\",\"v7.3.817\",\"v7.3.818\",\"v7.3.819\",\"v7.3.820\",\"v7.3.821\",\"v7.3.822\",\"v7.3.823\",\"v7.3.824\",\"v7.3.825\",\"v7.3.826\",\"v7.3.827\",\"v7.3.828\",\"v7.3.829\",\"v7.3.830\",\"v7.3.831\",\"v7.3.832\",\"v7.3.833\",\"v7.3.834\",\"v7.3.835\",\"v7.3.836\",\"v7.3.837\",\"v7.3.838\",\"v7.3.839\",\"v7.3.840\",\"v7.3.841\",\"v7.3.842\",\"v7.3.843\",\"v7.3.844\",\"v7.3.845\",\"v7.3.846\",\"v7.3.847\",\"v7.3.848\",\"v7.3.849\",\"v7.3.850\",\"v7.3.851\",\"v7.3.852\",\"v7.3.853\",\"v7.3.854\",\"v7.3.855\",\"v7.3.856\",\"v7.3.857\",\"v7.3.858\",\"v7.3.859\",\"v7.3.860\",\"v7.3.861\",\"v7.3.862\",\"v7.3.863\",\"v7.3.864\",\"v7.3.865\",\"v7.3.866\",\"v7.3.867\",\"v7.3.868\",\"v7.3.869\",\"v7.3.870\",\"v7.3.871\",\"v7.3.872\",\"v7.3.873\",\"v7.3.874\",\"v7.3.875\",\"v7.3.876\",\"v7.3.877\",\"v7.3.878\",\"v7.3.879\",\"v7.3.880\",\"v7.3.881\",\"v7.3.882\",\"v7.3.883\",\"v7.3.884\",\"v7.3.885\",\"v7.3.886\",\"v7.3.887\",\"v7.3.888\",\"v7.3.889\",\"v7.3.890\",\"v7.3.891\",\"v7.3.892\",\"v7.3.893\",\"v7.3.894\",\"v7.3.895\",\"v7.3.896\",\"v7.3.897\",\"v7.3.898\",\"v7.3.899\",\"v7.3.900\",\"v7.3.901\",\"v7.3.902\",\"v7.3.903\",\"v7.3.904\",\"v7.3.905\",\"v7.3.906\",\"v7.3.907\",\"v7.3.908\",\"v7.3.909\",\"v7.3.910\",\"v7.3.911\",\"v7.3.912\",\"v7.3.913\",\"v7.3.914\",\"v7.3.915\",\"v7.3.916\",\"v7.3.917\",\"v7.3.918\",\"v7.3.919\",\"v7.3.920\",\"v7.3.921\",\"v7.3.922\",\"v7.3.923\",\"v7.3.924\",\"v7.3.925\",\"v7.3.926\",\"v7.3.927\",\"v7.3.928\",\"v7.3.929\",\"v7.3.930\",\"v7.3.931\",\"v7.3.932\",\"v7.3.933\",\"v7.3.934\",\"v7.3.935\",\"v7.3.936\",\"v7.3.937\",\"v7.3.938\",\"v7.3.939\",\"v7.3.940\",\"v7.3.941\",\"v7.3.942\",\"v7.3.943\",\"v7.3.944\",\"v7.3.945\",\"v7.3.946\",\"v7.3.947\",\"v7.3.948\",\"v7.3.949\",\"v7.3.950\",\"v7.3.951\",\"v7.3.952\",\"v7.3.953\",\"v7.3.954\",\"v7.3.955\",\"v7.3.956\",\"v7.3.957\",\"v7.3.958\",\"v7.3.959\",\"v7.3.960\",\"v7.3.961\",\"v7.3.962\",\"v7.3.963\",\"v7.3.964\",\"v7.3.965\",\"v7.3.966\",\"v7.3.967\",\"v7.3.968\",\"v7.3.969\",\"v7.3.970\",\"v7.3.971\",\"v7.3.972\",\"v7.3.973\",\"v7.3.974\",\"v7.3.975\",\"v7.3.976\",\"v7.3.977\",\"v7.3.978\",\"v7.3.979\",\"v7.3.980\",\"v7.3.981\",\"v7.3.982\",\"v7.3.983\",\"v7.3.984\",\"v7.3.985\",\"v7.3.986\",\"v7.3.987\",\"v7.3.988\",\"v7.3.989\",\"v7.3.990\",\"v7.3.991\",\"v7.3.992\",\"v7.3.993\",\"v7.3.994\",\"v7.3.995\",\"v7.3.996\",\"v7.3.997\",\"v7.3.998\",\"v7.3.999\",\"v7.4\",\"v7.4.001\",\"v7.4.002\",\"v7.4.003\",\"v7.4.004\",\"v7.4.005\",\"v7.4.006\",\"v7.4.007\",\"v7.4.008\",\"v7.4.009\",\"v7.4.010\",\"v7.4.011\",\"v7.4.012\",\"v7.4.013\",\"v7.4.014\",\"v7.4.015\",\"v7.4.016\",\"v7.4.017\",\"v7.4.018\",\"v7.4.019\",\"v7.4.020\",\"v7.4.021\",\"v7.4.022\",\"v7.4.023\",\"v7.4.024\",\"v7.4.025\",\"v7.4.026\",\"v7.4.027\",\"v7.4.028\",\"v7.4.029\",\"v7.4.030\",\"v7.4.031\",\"v7.4.032\",\"v7.4.033\",\"v7.4.034\",\"v7.4.035\",\"v7.4.036\",\"v7.4.037\",\"v7.4.038\",\"v7.4.039\",\"v7.4.040\",\"v7.4.041\",\"v7.4.042\",\"v7.4.043\",\"v7.4.044\",\"v7.4.045\",\"v7.4.046\",\"v7.4.047\",\"v7.4.048\",\"v7.4.049\",\"v7.4.050\",\"v7.4.051\",\"v7.4.052\",\"v7.4.053\",\"v7.4.054\",\"v7.4.055\",\"v7.4.056\",\"v7.4.057\",\"v7.4.058\",\"v7.4.059\",\"v7.4.060\",\"v7.4.061\",\"v7.4.062\",\"v7.4.063\",\"v7.4.064\",\"v7.4.065\",\"v7.4.066\",\"v7.4.067\",\"v7.4.068\",\"v7.4.069\",\"v7.4.070\",\"v7.4.071\",\"v7.4.072\",\"v7.4.073\",\"v7.4.074\",\"v7.4.075\",\"v7.4.076\",\"v7.4.077\",\"v7.4.078\",\"v7.4.079\",\"v7.4.080\",\"v7.4.081\",\"v7.4.082\",\"v7.4.083\",\"v7.4.084\",\"v7.4.085\",\"v7.4.086\",\"v7.4.087\",\"v7.4.088\",\"v7.4.089\",\"v7.4.090\",\"v7.4.091\",\"v7.4.092\",\"v7.4.093\",\"v7.4.094\",\"v7.4.095\",\"v7.4.096\",\"v7.4.097\",\"v7.4.098\",\"v7.4.099\",\"v7.4.100\",\"v7.4.1000\",\"v7.4.1001\",\"v7.4.1002\",\"v7.4.1003\",\"v7.4.1004\",\"v7.4.1005\",\"v7.4.1006\",\"v7.4.1007\",\"v7.4.1008\",\"v7.4.1009\",\"v7.4.101\",\"v7.4.1010\",\"v7.4.1011\",\"v7.4.1012\",\"v7.4.1013\",\"v7.4.1014\",\"v7.4.1015\",\"v7.4.1016\",\"v7.4.1017\",\"v7.4.1018\",\"v7.4.1019\",\"v7.4.102\",\"v7.4.1020\",\"v7.4.1021\",\"v7.4.1022\",\"v7.4.1023\",\"v7.4.1024\",\"v7.4.1025\",\"v7.4.1026\",\"v7.4.1027\",\"v7.4.1028\",\"v7.4.1029\",\"v7.4.103\",\"v7.4.1030\",\"v7.4.1031\",\"v7.4.1032\",\"v7.4.1033\",\"v7.4.1034\",\"v7.4.1035\",\"v7.4.1036\",\"v7.4.1037\",\"v7.4.1038\",\"v7.4.1039\",\"v7.4.104\",\"v7.4.1040\",\"v7.4.1041\",\"v7.4.1042\",\"v7.4.1043\",\"v7.4.1044\",\"v7.4.1045\",\"v7.4.1046\",\"v7.4.1047\",\"v7.4.1048\",\"v7.4.1049\",\"v7.4.105\",\"v7.4.1050\",\"v7.4.1051\",\"v7.4.1052\",\"v7.4.1053\",\"v7.4.1054\",\"v7.4.1055\",\"v7.4.1056\",\"v7.4.1057\",\"v7.4.1058\",\"v7.4.1059\",\"v7.4.106\",\"v7.4.1060\",\"v7.4.1061\",\"v7.4.1062\",\"v7.4.1063\",\"v7.4.1064\",\"v7.4.1065\",\"v7.4.1066\",\"v7.4.1067\",\"v7.4.1068\",\"v7.4.1069\",\"v7.4.107\",\"v7.4.1070\",\"v7.4.1071\",\"v7.4.1072\",\"v7.4.1073\",\"v7.4.1074\",\"v7.4.1075\",\"v7.4.1076\",\"v7.4.1077\",\"v7.4.1078\",\"v7.4.1079\",\"v7.4.108\",\"v7.4.1080\",\"v7.4.1081\",\"v7.4.1082\",\"v7.4.1083\",\"v7.4.1084\",\"v7.4.1085\",\"v7.4.1086\",\"v7.4.1087\",\"v7.4.1088\",\"v7.4.1089\",\"v7.4.109\",\"v7.4.1090\",\"v7.4.1091\",\"v7.4.1092\",\"v7.4.1093\",\"v7.4.1094\",\"v7.4.1095\",\"v7.4.1096\",\"v7.4.1097\",\"v7.4.1098\",\"v7.4.1099\",\"v7.4.110\",\"v7.4.1100\",\"v7.4.1101\",\"v7.4.1102\",\"v7.4.1103\",\"v7.4.1104\",\"v7.4.1105\",\"v7.4.1106\",\"v7.4.1107\",\"v7.4.1108\",\"v7.4.1109\",\"v7.4.111\",\"v7.4.1110\",\"v7.4.1111\",\"v7.4.1112\",\"v7.4.1113\",\"v7.4.1114\",\"v7.4.1115\",\"v7.4.1116\",\"v7.4.1117\",\"v7.4.1118\",\"v7.4.1119\",\"v7.4.112\",\"v7.4.1120\",\"v7.4.1121\",\"v7.4.1122\",\"v7.4.1123\",\"v7.4.1124\",\"v7.4.1125\",\"v7.4.1126\",\"v7.4.1127\",\"v7.4.1128\",\"v7.4.1129\",\"v7.4.113\",\"v7.4.1130\",\"v7.4.1131\",\"v7.4.1132\",\"v7.4.1133\",\"v7.4.1134\",\"v7.4.1135\",\"v7.4.1136\",\"v7.4.1137\",\"v7.4.1138\",\"v7.4.1139\",\"v7.4.114\",\"v7.4.1140\",\"v7.4.1141\",\"v7.4.1142\",\"v7.4.1143\",\"v7.4.1144\",\"v7.4.1145\",\"v7.4.1146\",\"v7.4.1147\",\"v7.4.1148\",\"v7.4.1149\",\"v7.4.115\",\"v7.4.1150\",\"v7.4.1151\",\"v7.4.1152\",\"v7.4.1153\",\"v7.4.1154\",\"v7.4.1155\",\"v7.4.1156\",\"v7.4.1157\",\"v7.4.1158\",\"v7.4.1159\",\"v7.4.116\",\"v7.4.1160\",\"v7.4.1161\",\"v7.4.1162\",\"v7.4.1163\",\"v7.4.1164\",\"v7.4.1165\",\"v7.4.1166\",\"v7.4.1167\",\"v7.4.1168\",\"v7.4.1169\",\"v7.4.117\",\"v7.4.1170\",\"v7.4.1171\",\"v7.4.1172\",\"v7.4.1173\",\"v7.4.1174\",\"v7.4.1175\",\"v7.4.1176\",\"v7.4.1177\",\"v7.4.1178\",\"v7.4.1179\",\"v7.4.118\",\"v7.4.1180\",\"v7.4.1181\",\"v7.4.1182\",\"v7.4.1183\",\"v7.4.1184\",\"v7.4.1185\",\"v7.4.1186\",\"v7.4.1187\",\"v7.4.1188\",\"v7.4.1189\",\"v7.4.119\",\"v7.4.1190\",\"v7.4.1191\",\"v7.4.1192\",\"v7.4.1193\",\"v7.4.1194\",\"v7.4.1195\",\"v7.4.1196\",\"v7.4.1197\",\"v7.4.1198\",\"v7.4.1199\",\"v7.4.120\",\"v7.4.1200\",\"v7.4.1201\",\"v7.4.1202\",\"v7.4.1203\",\"v7.4.1204\",\"v7.4.1205\",\"v7.4.1206\",\"v7.4.1207\",\"v7.4.1208\",\"v7.4.1209\",\"v7.4.121\",\"v7.4.1210\",\"v7.4.1211\",\"v7.4.1212\",\"v7.4.1213\",\"v7.4.1214\",\"v7.4.1215\",\"v7.4.1216\",\"v7.4.1217\",\"v7.4.1218\",\"v7.4.1219\",\"v7.4.122\",\"v7.4.1220\",\"v7.4.1221\",\"v7.4.1222\",\"v7.4.1223\",\"v7.4.1224\",\"v7.4.1225\",\"v7.4.1226\",\"v7.4.1227\",\"v7.4.1228\",\"v7.4.1229\",\"v7.4.123\",\"v7.4.1230\",\"v7.4.1231\",\"v7.4.1232\",\"v7.4.1233\",\"v7.4.1234\",\"v7.4.1235\",\"v7.4.1236\",\"v7.4.1237\",\"v7.4.1238\",\"v7.4.1239\",\"v7.4.124\",\"v7.4.1240\",\"v7.4.1241\",\"v7.4.1242\",\"v7.4.1243\",\"v7.4.1244\",\"v7.4.1245\",\"v7.4.1246\",\"v7.4.1247\",\"v7.4.1248\",\"v7.4.1249\",\"v7.4.125\",\"v7.4.1250\",\"v7.4.1251\",\"v7.4.1252\",\"v7.4.1253\",\"v7.4.1254\",\"v7.4.1255\",\"v7.4.1256\",\"v7.4.1257\",\"v7.4.1258\",\"v7.4.1259\",\"v7.4.126\",\"v7.4.1260\",\"v7.4.1261\",\"v7.4.1262\",\"v7.4.1263\",\"v7.4.1264\",\"v7.4.1265\",\"v7.4.1266\",\"v7.4.1267\",\"v7.4.1268\",\"v7.4.1269\",\"v7.4.127\",\"v7.4.1270\",\"v7.4.1271\",\"v7.4.1272\",\"v7.4.1273\",\"v7.4.1274\",\"v7.4.1275\",\"v7.4.1276\",\"v7.4.1277\",\"v7.4.1278\",\"v7.4.1279\",\"v7.4.128\",\"v7.4.1280\",\"v7.4.1281\",\"v7.4.1282\",\"v7.4.1283\",\"v7.4.1284\",\"v7.4.1285\",\"v7.4.1286\",\"v7.4.1287\",\"v7.4.1288\",\"v7.4.1289\",\"v7.4.129\",\"v7.4.1290\",\"v7.4.1291\",\"v7.4.1292\",\"v7.4.1293\",\"v7.4.1294\",\"v7.4.1295\",\"v7.4.1296\",\"v7.4.1297\",\"v7.4.1298\",\"v7.4.1299\",\"v7.4.130\",\"v7.4.1300\",\"v7.4.1301\",\"v7.4.1302\",\"v7.4.1303\",\"v7.4.1304\",\"v7.4.1305\",\"v7.4.1306\",\"v7.4.1307\",\"v7.4.1308\",\"v7.4.1309\",\"v7.4.131\",\"v7.4.1310\",\"v7.4.1311\",\"v7.4.1312\",\"v7.4.1313\",\"v7.4.1314\",\"v7.4.1315\",\"v7.4.1316\",\"v7.4.1317\",\"v7.4.1318\",\"v7.4.1319\",\"v7.4.132\",\"v7.4.1320\",\"v7.4.1321\",\"v7.4.1322\",\"v7.4.1323\",\"v7.4.1324\",\"v7.4.1325\",\"v7.4.1326\",\"v7.4.1327\",\"v7.4.1328\",\"v7.4.1329\",\"v7.4.133\",\"v7.4.1330\",\"v7.4.1331\",\"v7.4.1332\",\"v7.4.1333\",\"v7.4.1334\",\"v7.4.1335\",\"v7.4.1336\",\"v7.4.1337\",\"v7.4.1338\",\"v7.4.1339\",\"v7.4.134\",\"v7.4.1340\",\"v7.4.1341\",\"v7.4.1342\",\"v7.4.1343\",\"v7.4.1344\",\"v7.4.1345\",\"v7.4.1346\",\"v7.4.1347\",\"v7.4.1348\",\"v7.4.1349\",\"v7.4.135\",\"v7.4.1350\",\"v7.4.1351\",\"v7.4.1352\",\"v7.4.1353\",\"v7.4.1354\",\"v7.4.1355\",\"v7.4.1356\",\"v7.4.1357\",\"v7.4.1358\",\"v7.4.1359\",\"v7.4.136\",\"v7.4.1360\",\"v7.4.1361\",\"v7.4.1362\",\"v7.4.1363\",\"v7.4.1364\",\"v7.4.1365\",\"v7.4.1366\",\"v7.4.1367\",\"v7.4.1368\",\"v7.4.1369\",\"v7.4.137\",\"v7.4.1370\",\"v7.4.1371\",\"v7.4.1372\",\"v7.4.1373\",\"v7.4.1374\",\"v7.4.1375\",\"v7.4.1376\",\"v7.4.1377\",\"v7.4.1378\",\"v7.4.1379\",\"v7.4.138\",\"v7.4.1380\",\"v7.4.1381\",\"v7.4.1382\",\"v7.4.1383\",\"v7.4.1384\",\"v7.4.1385\",\"v7.4.1386\",\"v7.4.1387\",\"v7.4.1388\",\"v7.4.1389\",\"v7.4.139\",\"v7.4.1390\",\"v7.4.1391\",\"v7.4.1392\",\"v7.4.1393\",\"v7.4.1394\",\"v7.4.1395\",\"v7.4.1396\",\"v7.4.1397\",\"v7.4.1398\",\"v7.4.1399\",\"v7.4.140\",\"v7.4.1400\",\"v7.4.1401\",\"v7.4.1402\",\"v7.4.1403\",\"v7.4.1404\",\"v7.4.1405\",\"v7.4.1406\",\"v7.4.1407\",\"v7.4.1408\",\"v7.4.1409\",\"v7.4.141\",\"v7.4.1410\",\"v7.4.1411\",\"v7.4.1412\",\"v7.4.1413\",\"v7.4.1414\",\"v7.4.1415\",\"v7.4.1416\",\"v7.4.1417\",\"v7.4.1418\",\"v7.4.1419\",\"v7.4.142\",\"v7.4.1420\",\"v7.4.1421\",\"v7.4.1422\",\"v7.4.1423\",\"v7.4.1424\",\"v7.4.1425\",\"v7.4.1426\",\"v7.4.1427\",\"v7.4.1428\",\"v7.4.1429\",\"v7.4.143\",\"v7.4.1430\",\"v7.4.1431\",\"v7.4.1432\",\"v7.4.1433\",\"v7.4.1434\",\"v7.4.1435\",\"v7.4.1436\",\"v7.4.1437\",\"v7.4.1438\",\"v7.4.1439\",\"v7.4.144\",\"v7.4.1440\",\"v7.4.1441\",\"v7.4.1442\",\"v7.4.1443\",\"v7.4.1444\",\"v7.4.1445\",\"v7.4.1446\",\"v7.4.1447\",\"v7.4.1448\",\"v7.4.1449\",\"v7.4.145\",\"v7.4.1450\",\"v7.4.1451\",\"v7.4.1452\",\"v7.4.1453\",\"v7.4.1454\",\"v7.4.1455\",\"v7.4.1456\",\"v7.4.1457\",\"v7.4.1458\",\"v7.4.1459\",\"v7.4.146\",\"v7.4.1460\",\"v7.4.1461\",\"v7.4.1462\",\"v7.4.1463\",\"v7.4.1464\",\"v7.4.1465\",\"v7.4.1466\",\"v7.4.1467\",\"v7.4.1468\",\"v7.4.1469\",\"v7.4.147\",\"v7.4.1470\",\"v7.4.1471\",\"v7.4.1472\",\"v7.4.1473\",\"v7.4.1474\",\"v7.4.1475\",\"v7.4.1476\",\"v7.4.1477\",\"v7.4.1478\",\"v7.4.1479\",\"v7.4.148\",\"v7.4.1480\",\"v7.4.1481\",\"v7.4.1482\",\"v7.4.1483\",\"v7.4.1484\",\"v7.4.1485\",\"v7.4.1486\",\"v7.4.1487\",\"v7.4.1488\",\"v7.4.1489\",\"v7.4.149\",\"v7.4.1490\",\"v7.4.1491\",\"v7.4.1492\",\"v7.4.1493\",\"v7.4.1494\",\"v7.4.1495\",\"v7.4.1496\",\"v7.4.1497\",\"v7.4.1498\",\"v7.4.1499\",\"v7.4.150\",\"v7.4.1500\",\"v7.4.1501\",\"v7.4.1502\",\"v7.4.1503\",\"v7.4.1504\",\"v7.4.1505\",\"v7.4.1506\",\"v7.4.1507\",\"v7.4.1508\",\"v7.4.1509\",\"v7.4.151\",\"v7.4.1510\",\"v7.4.1511\",\"v7.4.1512\",\"v7.4.1513\",\"v7.4.1514\",\"v7.4.1515\",\"v7.4.1516\",\"v7.4.1517\",\"v7.4.1518\",\"v7.4.1519\",\"v7.4.152\",\"v7.4.1520\",\"v7.4.1521\",\"v7.4.1522\",\"v7.4.1523\",\"v7.4.1524\",\"v7.4.1525\",\"v7.4.1526\",\"v7.4.1527\",\"v7.4.1528\",\"v7.4.1529\",\"v7.4.153\",\"v7.4.1530\",\"v7.4.1531\",\"v7.4.1532\",\"v7.4.1533\",\"v7.4.1534\",\"v7.4.1535\",\"v7.4.1536\",\"v7.4.1537\",\"v7.4.1538\",\"v7.4.1539\",\"v7.4.154\",\"v7.4.1540\",\"v7.4.1541\",\"v7.4.1542\",\"v7.4.1543\",\"v7.4.1544\",\"v7.4.1545\",\"v7.4.1546\",\"v7.4.1547\",\"v7.4.1548\",\"v7.4.1549\",\"v7.4.155\",\"v7.4.1550\",\"v7.4.1551\",\"v7.4.1552\",\"v7.4.1553\",\"v7.4.1554\",\"v7.4.1555\",\"v7.4.1556\",\"v7.4.1557\",\"v7.4.1558\",\"v7.4.1559\",\"v7.4.156\",\"v7.4.1560\",\"v7.4.1561\",\"v7.4.1562\",\"v7.4.1563\",\"v7.4.1564\",\"v7.4.1565\",\"v7.4.1566\",\"v7.4.1567\",\"v7.4.1568\",\"v7.4.1569\",\"v7.4.157\",\"v7.4.1570\",\"v7.4.1571\",\"v7.4.1572\",\"v7.4.1573\",\"v7.4.1574\",\"v7.4.1575\",\"v7.4.1576\",\"v7.4.1577\",\"v7.4.1578\",\"v7.4.1579\",\"v7.4.158\",\"v7.4.1580\",\"v7.4.1581\",\"v7.4.1582\",\"v7.4.1583\",\"v7.4.1584\",\"v7.4.1585\",\"v7.4.1586\",\"v7.4.1587\",\"v7.4.1588\",\"v7.4.1589\",\"v7.4.159\",\"v7.4.1590\",\"v7.4.1591\",\"v7.4.1592\",\"v7.4.1593\",\"v7.4.1594\",\"v7.4.1595\",\"v7.4.1596\",\"v7.4.1597\",\"v7.4.1598\",\"v7.4.1599\",\"v7.4.160\",\"v7.4.1600\",\"v7.4.1601\",\"v7.4.1602\",\"v7.4.1603\",\"v7.4.1604\",\"v7.4.1605\",\"v7.4.1606\",\"v7.4.1607\",\"v7.4.1608\",\"v7.4.1609\",\"v7.4.161\",\"v7.4.1610\",\"v7.4.1611\",\"v7.4.1612\",\"v7.4.1613\",\"v7.4.1614\",\"v7.4.1615\",\"v7.4.1616\",\"v7.4.1617\",\"v7.4.1618\",\"v7.4.1619\",\"v7.4.162\",\"v7.4.1620\",\"v7.4.1621\",\"v7.4.1622\",\"v7.4.1623\",\"v7.4.1624\",\"v7.4.1625\",\"v7.4.1626\",\"v7.4.1627\",\"v7.4.1628\",\"v7.4.1629\",\"v7.4.163\",\"v7.4.1630\",\"v7.4.1631\",\"v7.4.1632\",\"v7.4.1633\",\"v7.4.1634\",\"v7.4.1635\",\"v7.4.1636\",\"v7.4.1637\",\"v7.4.1638\",\"v7.4.1639\",\"v7.4.164\",\"v7.4.1640\",\"v7.4.1641\",\"v7.4.1642\",\"v7.4.1643\",\"v7.4.1644\",\"v7.4.1645\",\"v7.4.1646\",\"v7.4.1647\",\"v7.4.1648\",\"v7.4.1649\",\"v7.4.165\",\"v7.4.1650\",\"v7.4.1651\",\"v7.4.1652\",\"v7.4.1653\",\"v7.4.1654\",\"v7.4.1655\",\"v7.4.1656\",\"v7.4.1657\",\"v7.4.1658\",\"v7.4.1659\",\"v7.4.166\",\"v7.4.1660\",\"v7.4.1661\",\"v7.4.1662\",\"v7.4.1663\",\"v7.4.1664\",\"v7.4.1665\",\"v7.4.1666\",\"v7.4.1667\",\"v7.4.1668\",\"v7.4.1669\",\"v7.4.167\",\"v7.4.1670\",\"v7.4.1671\",\"v7.4.1672\",\"v7.4.1673\",\"v7.4.1674\",\"v7.4.1675\",\"v7.4.1676\",\"v7.4.1677\",\"v7.4.1678\",\"v7.4.1679\",\"v7.4.168\",\"v7.4.1680\",\"v7.4.1681\",\"v7.4.1682\",\"v7.4.1683\",\"v7.4.1684\",\"v7.4.1685\",\"v7.4.1686\",\"v7.4.1687\",\"v7.4.1688\",\"v7.4.1689\",\"v7.4.169\",\"v7.4.1690\",\"v7.4.1691\",\"v7.4.1692\",\"v7.4.1693\",\"v7.4.1694\",\"v7.4.1695\",\"v7.4.1696\",\"v7.4.1697\",\"v7.4.1698\",\"v7.4.1699\",\"v7.4.170\",\"v7.4.1700\",\"v7.4.1701\",\"v7.4.1702\",\"v7.4.1703\",\"v7.4.1704\",\"v7.4.1705\",\"v7.4.1706\",\"v7.4.1707\",\"v7.4.1708\",\"v7.4.1709\",\"v7.4.171\",\"v7.4.1710\",\"v7.4.1711\",\"v7.4.1712\",\"v7.4.1713\",\"v7.4.1714\",\"v7.4.1715\",\"v7.4.1716\",\"v7.4.1717\",\"v7.4.1718\",\"v7.4.1719\",\"v7.4.172\",\"v7.4.1720\",\"v7.4.1721\",\"v7.4.1722\",\"v7.4.1723\",\"v7.4.1724\",\"v7.4.1725\",\"v7.4.1726\",\"v7.4.1727\",\"v7.4.1728\",\"v7.4.1729\",\"v7.4.173\",\"v7.4.1730\",\"v7.4.1731\",\"v7.4.1732\",\"v7.4.1733\",\"v7.4.1734\",\"v7.4.1735\",\"v7.4.1736\",\"v7.4.1737\",\"v7.4.1738\",\"v7.4.1739\",\"v7.4.174\",\"v7.4.1740\",\"v7.4.1741\",\"v7.4.1742\",\"v7.4.1743\",\"v7.4.1744\",\"v7.4.1745\",\"v7.4.1746\",\"v7.4.1747\",\"v7.4.1748\",\"v7.4.1749\",\"v7.4.175\",\"v7.4.1750\",\"v7.4.1751\",\"v7.4.1752\",\"v7.4.1753\",\"v7.4.1754\",\"v7.4.1755\",\"v7.4.1756\",\"v7.4.1757\",\"v7.4.1758\",\"v7.4.1759\",\"v7.4.176\",\"v7.4.1760\",\"v7.4.1761\",\"v7.4.1762\",\"v7.4.1763\",\"v7.4.1765\",\"v7.4.1766\",\"v7.4.1767\",\"v7.4.1768\",\"v7.4.1769\",\"v7.4.177\",\"v7.4.1770\",\"v7.4.1771\",\"v7.4.1772\",\"v7.4.1773\",\"v7.4.1774\",\"v7.4.1775\",\"v7.4.1776\",\"v7.4.1777\",\"v7.4.1778\",\"v7.4.1779\",\"v7.4.178\",\"v7.4.1780\",\"v7.4.1781\",\"v7.4.1782\",\"v7.4.1783\",\"v7.4.1784\",\"v7.4.1785\",\"v7.4.1786\",\"v7.4.1787\",\"v7.4.1788\",\"v7.4.1789\",\"v7.4.179\",\"v7.4.1790\",\"v7.4.1791\",\"v7.4.1792\",\"v7.4.1793\",\"v7.4.1794\",\"v7.4.1795\",\"v7.4.1796\",\"v7.4.1797\",\"v7.4.1798\",\"v7.4.1799\",\"v7.4.180\",\"v7.4.1800\",\"v7.4.1801\",\"v7.4.1802\",\"v7.4.1803\",\"v7.4.1804\",\"v7.4.1805\",\"v7.4.1806\",\"v7.4.1807\",\"v7.4.1808\",\"v7.4.1809\",\"v7.4.181\",\"v7.4.1810\",\"v7.4.1811\",\"v7.4.1812\",\"v7.4.1813\",\"v7.4.1814\",\"v7.4.1815\",\"v7.4.1816\",\"v7.4.1817\",\"v7.4.1818\",\"v7.4.1819\",\"v7.4.182\",\"v7.4.1820\",\"v7.4.1821\",\"v7.4.1822\",\"v7.4.1823\",\"v7.4.1824\",\"v7.4.1825\",\"v7.4.1826\",\"v7.4.1827\",\"v7.4.1828\",\"v7.4.1829\",\"v7.4.183\",\"v7.4.1830\",\"v7.4.1831\",\"v7.4.1832\",\"v7.4.1833\",\"v7.4.1834\",\"v7.4.1835\",\"v7.4.1836\",\"v7.4.1837\",\"v7.4.1838\",\"v7.4.1839\",\"v7.4.184\",\"v7.4.1840\",\"v7.4.1841\",\"v7.4.1842\",\"v7.4.1843\",\"v7.4.1844\",\"v7.4.1845\",\"v7.4.1846\",\"v7.4.1847\",\"v7.4.1848\",\"v7.4.1849\",\"v7.4.185\",\"v7.4.1850\",\"v7.4.1851\",\"v7.4.1852\",\"v7.4.1853\",\"v7.4.1854\",\"v7.4.1855\",\"v7.4.1856\",\"v7.4.1857\",\"v7.4.1858\",\"v7.4.1859\",\"v7.4.186\",\"v7.4.1860\",\"v7.4.1861\",\"v7.4.1862\",\"v7.4.1863\",\"v7.4.1864\",\"v7.4.1865\",\"v7.4.1866\",\"v7.4.1867\",\"v7.4.1868\",\"v7.4.1869\",\"v7.4.187\",\"v7.4.1870\",\"v7.4.1871\",\"v7.4.1872\",\"v7.4.1873\",\"v7.4.1874\",\"v7.4.1875\",\"v7.4.1876\",\"v7.4.1877\",\"v7.4.1878\",\"v7.4.1879\",\"v7.4.188\",\"v7.4.1880\",\"v7.4.1881\",\"v7.4.1882\",\"v7.4.1883\",\"v7.4.1884\",\"v7.4.1885\",\"v7.4.1886\",\"v7.4.1887\",\"v7.4.1888\",\"v7.4.1889\",\"v7.4.189\",\"v7.4.1890\",\"v7.4.1891\",\"v7.4.1892\",\"v7.4.1893\",\"v7.4.1894\",\"v7.4.1895\",\"v7.4.1896\",\"v7.4.1897\",\"v7.4.1898\",\"v7.4.1899\",\"v7.4.190\",\"v7.4.1900\",\"v7.4.1901\",\"v7.4.1902\",\"v7.4.1903\",\"v7.4.1904\",\"v7.4.1905\",\"v7.4.1906\",\"v7.4.1907\",\"v7.4.1908\",\"v7.4.1909\",\"v7.4.191\",\"v7.4.1910\",\"v7.4.1911\",\"v7.4.1912\",\"v7.4.1913\",\"v7.4.1914\",\"v7.4.1915\",\"v7.4.1916\",\"v7.4.1917\",\"v7.4.1918\",\"v7.4.1919\",\"v7.4.192\",\"v7.4.1920\",\"v7.4.1921\",\"v7.4.1922\",\"v7.4.1923\",\"v7.4.1924\",\"v7.4.1925\",\"v7.4.1926\",\"v7.4.1927\",\"v7.4.1928\",\"v7.4.1929\",\"v7.4.193\",\"v7.4.1930\",\"v7.4.1931\",\"v7.4.1932\",\"v7.4.1933\",\"v7.4.1934\",\"v7.4.1935\",\"v7.4.1936\",\"v7.4.1937\",\"v7.4.1938\",\"v7.4.1939\",\"v7.4.194\",\"v7.4.1940\",\"v7.4.1941\",\"v7.4.1942\",\"v7.4.1943\",\"v7.4.1944\",\"v7.4.1945\",\"v7.4.1946\",\"v7.4.1947\",\"v7.4.1948\",\"v7.4.1949\",\"v7.4.195\",\"v7.4.1950\",\"v7.4.1951\",\"v7.4.1952\",\"v7.4.1953\",\"v7.4.1954\",\"v7.4.1955\",\"v7.4.1956\",\"v7.4.1957\",\"v7.4.1958\",\"v7.4.1959\",\"v7.4.196\",\"v7.4.1960\",\"v7.4.1961\",\"v7.4.1962\",\"v7.4.1963\",\"v7.4.1964\",\"v7.4.1965\",\"v7.4.1966\",\"v7.4.1967\",\"v7.4.1968\",\"v7.4.1969\",\"v7.4.197\",\"v7.4.1970\",\"v7.4.1971\",\"v7.4.1972\",\"v7.4.1973\",\"v7.4.1974\",\"v7.4.1975\",\"v7.4.1976\",\"v7.4.1977\",\"v7.4.1978\",\"v7.4.1979\",\"v7.4.198\",\"v7.4.1980\",\"v7.4.1981\",\"v7.4.1982\",\"v7.4.1983\",\"v7.4.1984\",\"v7.4.1985\",\"v7.4.1986\",\"v7.4.1987\",\"v7.4.1988\",\"v7.4.1989\",\"v7.4.199\",\"v7.4.1990\",\"v7.4.1991\",\"v7.4.1992\",\"v7.4.1993\",\"v7.4.1994\",\"v7.4.1995\",\"v7.4.1996\",\"v7.4.1997\",\"v7.4.1998\",\"v7.4.1999\",\"v7.4.200\",\"v7.4.2000\",\"v7.4.2001\",\"v7.4.2002\",\"v7.4.2003\",\"v7.4.2004\",\"v7.4.2005\",\"v7.4.2006\",\"v7.4.2007\",\"v7.4.2008\",\"v7.4.2009\",\"v7.4.201\",\"v7.4.2010\",\"v7.4.2011\",\"v7.4.2012\",\"v7.4.2013\",\"v7.4.2014\",\"v7.4.2015\",\"v7.4.2016\",\"v7.4.2017\",\"v7.4.2018\",\"v7.4.2019\",\"v7.4.202\",\"v7.4.2020\",\"v7.4.2021\",\"v7.4.2022\",\"v7.4.2023\",\"v7.4.2024\",\"v7.4.2025\",\"v7.4.2026\",\"v7.4.2027\",\"v7.4.2028\",\"v7.4.2029\",\"v7.4.203\",\"v7.4.2030\",\"v7.4.2031\",\"v7.4.2032\",\"v7.4.2033\",\"v7.4.2034\",\"v7.4.2035\",\"v7.4.2036\",\"v7.4.2037\",\"v7.4.2038\",\"v7.4.2039\",\"v7.4.204\",\"v7.4.2040\",\"v7.4.2041\",\"v7.4.2042\",\"v7.4.2043\",\"v7.4.2044\",\"v7.4.2045\",\"v7.4.2046\",\"v7.4.2047\",\"v7.4.2048\",\"v7.4.2049\",\"v7.4.205\",\"v7.4.2050\",\"v7.4.2051\",\"v7.4.2052\",\"v7.4.2053\",\"v7.4.2054\",\"v7.4.2055\",\"v7.4.2056\",\"v7.4.2057\",\"v7.4.2058\",\"v7.4.2059\",\"v7.4.206\",\"v7.4.2060\",\"v7.4.2061\",\"v7.4.2062\",\"v7.4.2063\",\"v7.4.2064\",\"v7.4.2065\",\"v7.4.2066\",\"v7.4.2067\",\"v7.4.2068\",\"v7.4.2069\",\"v7.4.207\",\"v7.4.2070\",\"v7.4.2071\",\"v7.4.2072\",\"v7.4.2073\",\"v7.4.2074\",\"v7.4.2075\",\"v7.4.2076\",\"v7.4.2077\",\"v7.4.2078\",\"v7.4.2079\",\"v7.4.208\",\"v7.4.2080\",\"v7.4.2081\",\"v7.4.2082\",\"v7.4.2083\",\"v7.4.2084\",\"v7.4.2085\",\"v7.4.2086\",\"v7.4.2087\",\"v7.4.2088\",\"v7.4.2089\",\"v7.4.209\",\"v7.4.2090\",\"v7.4.2091\",\"v7.4.2092\",\"v7.4.2093\",\"v7.4.2094\",\"v7.4.2095\",\"v7.4.2096\",\"v7.4.2097\",\"v7.4.2098\",\"v7.4.2099\",\"v7.4.210\",\"v7.4.2100\",\"v7.4.2101\",\"v7.4.2102\",\"v7.4.2103\",\"v7.4.2104\",\"v7.4.2105\",\"v7.4.2106\",\"v7.4.2107\",\"v7.4.2108\",\"v7.4.2109\",\"v7.4.211\",\"v7.4.2110\",\"v7.4.2111\",\"v7.4.2112\",\"v7.4.2113\",\"v7.4.2114\",\"v7.4.2115\",\"v7.4.2116\",\"v7.4.2117\",\"v7.4.2118\",\"v7.4.2119\",\"v7.4.212\",\"v7.4.2120\",\"v7.4.2121\",\"v7.4.2122\",\"v7.4.2123\",\"v7.4.2124\",\"v7.4.2125\",\"v7.4.2126\",\"v7.4.2127\",\"v7.4.2128\",\"v7.4.2129\",\"v7.4.213\",\"v7.4.2130\",\"v7.4.2131\",\"v7.4.2132\",\"v7.4.2133\",\"v7.4.2134\",\"v7.4.2135\",\"v7.4.2136\",\"v7.4.2137\",\"v7.4.2138\",\"v7.4.2139\",\"v7.4.214\",\"v7.4.2140\",\"v7.4.2141\",\"v7.4.2142\",\"v7.4.2143\",\"v7.4.2144\",\"v7.4.2145\",\"v7.4.2146\",\"v7.4.2147\",\"v7.4.2148\",\"v7.4.2149\",\"v7.4.215\",\"v7.4.2150\",\"v7.4.2151\",\"v7.4.2152\",\"v7.4.2153\",\"v7.4.2154\",\"v7.4.2155\",\"v7.4.2156\",\"v7.4.2157\",\"v7.4.2158\",\"v7.4.2159\",\"v7.4.216\",\"v7.4.2160\",\"v7.4.2161\",\"v7.4.2162\",\"v7.4.2163\",\"v7.4.2164\",\"v7.4.2165\",\"v7.4.2166\",\"v7.4.2167\",\"v7.4.2168\",\"v7.4.2169\",\"v7.4.217\",\"v7.4.2170\",\"v7.4.2171\",\"v7.4.2172\",\"v7.4.2173\",\"v7.4.2174\",\"v7.4.2175\",\"v7.4.2176\",\"v7.4.2177\",\"v7.4.2178\",\"v7.4.2179\",\"v7.4.218\",\"v7.4.2180\",\"v7.4.2181\",\"v7.4.2182\",\"v7.4.2183\",\"v7.4.2184\",\"v7.4.2185\",\"v7.4.2186\",\"v7.4.2187\",\"v7.4.2188\",\"v7.4.2189\",\"v7.4.219\",\"v7.4.2190\",\"v7.4.2191\",\"v7.4.2192\",\"v7.4.2193\",\"v7.4.2194\",\"v7.4.2195\",\"v7.4.2196\",\"v7.4.2197\",\"v7.4.2198\",\"v7.4.2199\",\"v7.4.220\",\"v7.4.2200\",\"v7.4.2201\",\"v7.4.2202\",\"v7.4.2203\",\"v7.4.2204\",\"v7.4.2205\",\"v7.4.2206\",\"v7.4.2207\",\"v7.4.2208\",\"v7.4.2209\",\"v7.4.221\",\"v7.4.2210\",\"v7.4.2211\",\"v7.4.2212\",\"v7.4.2213\",\"v7.4.2214\",\"v7.4.2215\",\"v7.4.2216\",\"v7.4.2217\",\"v7.4.2218\",\"v7.4.2219\",\"v7.4.222\",\"v7.4.2220\",\"v7.4.2221\",\"v7.4.2222\",\"v7.4.2223\",\"v7.4.2224\",\"v7.4.2225\",\"v7.4.2226\",\"v7.4.2227\",\"v7.4.2228\",\"v7.4.2229\",\"v7.4.223\",\"v7.4.2230\",\"v7.4.2231\",\"v7.4.2232\",\"v7.4.2233\",\"v7.4.2234\",\"v7.4.2235\",\"v7.4.2236\",\"v7.4.2237\",\"v7.4.2238\",\"v7.4.2239\",\"v7.4.224\",\"v7.4.2240\",\"v7.4.2241\",\"v7.4.2242\",\"v7.4.2243\",\"v7.4.2244\",\"v7.4.2245\",\"v7.4.2246\",\"v7.4.2247\",\"v7.4.2248\",\"v7.4.2249\",\"v7.4.225\",\"v7.4.2250\",\"v7.4.2251\",\"v7.4.2252\",\"v7.4.2253\",\"v7.4.2254\",\"v7.4.2255\",\"v7.4.2256\",\"v7.4.2257\",\"v7.4.2258\",\"v7.4.2259\",\"v7.4.226\",\"v7.4.2260\",\"v7.4.2261\",\"v7.4.2262\",\"v7.4.2263\",\"v7.4.2264\",\"v7.4.2265\",\"v7.4.2266\",\"v7.4.2267\",\"v7.4.2268\",\"v7.4.2269\",\"v7.4.227\",\"v7.4.2270\",\"v7.4.2271\",\"v7.4.2272\",\"v7.4.2273\",\"v7.4.2274\",\"v7.4.2275\",\"v7.4.2276\",\"v7.4.2277\",\"v7.4.2278\",\"v7.4.2279\",\"v7.4.228\",\"v7.4.2280\",\"v7.4.2281\",\"v7.4.2282\",\"v7.4.2283\",\"v7.4.2284\",\"v7.4.2285\",\"v7.4.2286\",\"v7.4.2287\",\"v7.4.2288\",\"v7.4.2289\",\"v7.4.229\",\"v7.4.2290\",\"v7.4.2291\",\"v7.4.2292\",\"v7.4.2293\",\"v7.4.2294\",\"v7.4.2295\",\"v7.4.2296\",\"v7.4.2297\",\"v7.4.2298\",\"v7.4.2299\",\"v7.4.230\",\"v7.4.2300\",\"v7.4.2301\",\"v7.4.2302\",\"v7.4.2303\",\"v7.4.2304\",\"v7.4.2305\",\"v7.4.2306\",\"v7.4.2307\",\"v7.4.2308\",\"v7.4.2309\",\"v7.4.231\",\"v7.4.2310\",\"v7.4.2311\",\"v7.4.2312\",\"v7.4.2313\",\"v7.4.2314\",\"v7.4.2315\",\"v7.4.2316\",\"v7.4.2317\",\"v7.4.2318\",\"v7.4.2319\",\"v7.4.232\",\"v7.4.2320\",\"v7.4.2321\",\"v7.4.2322\",\"v7.4.2323\",\"v7.4.2324\",\"v7.4.2325\",\"v7.4.2326\",\"v7.4.2327\",\"v7.4.2328\",\"v7.4.2329\",\"v7.4.233\",\"v7.4.2330\",\"v7.4.2331\",\"v7.4.2332\",\"v7.4.2333\",\"v7.4.2334\",\"v7.4.2335\",\"v7.4.2336\",\"v7.4.2337\",\"v7.4.2338\",\"v7.4.2339\",\"v7.4.234\",\"v7.4.2340\",\"v7.4.2341\",\"v7.4.2342\",\"v7.4.2343\",\"v7.4.2344\",\"v7.4.2345\",\"v7.4.2346\",\"v7.4.2347\",\"v7.4.2348\",\"v7.4.2349\",\"v7.4.235\",\"v7.4.2350\",\"v7.4.2351\",\"v7.4.2352\",\"v7.4.2353\",\"v7.4.2354\",\"v7.4.2355\",\"v7.4.2356\",\"v7.4.2357\",\"v7.4.2358\",\"v7.4.2359\",\"v7.4.236\",\"v7.4.2360\",\"v7.4.2361\",\"v7.4.2362\",\"v7.4.2363\",\"v7.4.2364\",\"v7.4.2365\",\"v7.4.2366\",\"v7.4.2367\",\"v7.4.237\",\"v7.4.238\",\"v7.4.239\",\"v7.4.240\",\"v7.4.241\",\"v7.4.242\",\"v7.4.243\",\"v7.4.244\",\"v7.4.245\",\"v7.4.246\",\"v7.4.247\",\"v7.4.248\",\"v7.4.249\",\"v7.4.250\",\"v7.4.251\",\"v7.4.252\",\"v7.4.253\",\"v7.4.254\",\"v7.4.255\",\"v7.4.256\",\"v7.4.257\",\"v7.4.258\",\"v7.4.259\",\"v7.4.260\",\"v7.4.261\",\"v7.4.262\",\"v7.4.263\",\"v7.4.264\",\"v7.4.265\",\"v7.4.266\",\"v7.4.267\",\"v7.4.268\",\"v7.4.269\",\"v7.4.270\",\"v7.4.271\",\"v7.4.272\",\"v7.4.273\",\"v7.4.274\",\"v7.4.275\",\"v7.4.276\",\"v7.4.277\",\"v7.4.278\",\"v7.4.279\",\"v7.4.280\",\"v7.4.281\",\"v7.4.282\",\"v7.4.283\",\"v7.4.284\",\"v7.4.285\",\"v7.4.286\",\"v7.4.287\",\"v7.4.288\",\"v7.4.289\",\"v7.4.290\",\"v7.4.291\",\"v7.4.292\",\"v7.4.293\",\"v7.4.294\",\"v7.4.295\",\"v7.4.296\",\"v7.4.297\",\"v7.4.298\",\"v7.4.299\",\"v7.4.300\",\"v7.4.301\",\"v7.4.302\",\"v7.4.303\",\"v7.4.304\",\"v7.4.305\",\"v7.4.306\",\"v7.4.307\",\"v7.4.308\",\"v7.4.309\",\"v7.4.310\",\"v7.4.311\",\"v7.4.312\",\"v7.4.313\",\"v7.4.314\",\"v7.4.315\",\"v7.4.316\",\"v7.4.317\",\"v7.4.318\",\"v7.4.319\",\"v7.4.320\",\"v7.4.321\",\"v7.4.322\",\"v7.4.323\",\"v7.4.324\",\"v7.4.325\",\"v7.4.326\",\"v7.4.327\",\"v7.4.328\",\"v7.4.329\",\"v7.4.330\",\"v7.4.331\",\"v7.4.332\",\"v7.4.333\",\"v7.4.334\",\"v7.4.335\",\"v7.4.336\",\"v7.4.337\",\"v7.4.338\",\"v7.4.339\",\"v7.4.340\",\"v7.4.341\",\"v7.4.342\",\"v7.4.343\",\"v7.4.344\",\"v7.4.345\",\"v7.4.346\",\"v7.4.347\",\"v7.4.348\",\"v7.4.349\",\"v7.4.350\",\"v7.4.351\",\"v7.4.352\",\"v7.4.353\",\"v7.4.354\",\"v7.4.355\",\"v7.4.356\",\"v7.4.357\",\"v7.4.358\",\"v7.4.359\",\"v7.4.360\",\"v7.4.361\",\"v7.4.362\",\"v7.4.363\",\"v7.4.364\",\"v7.4.365\",\"v7.4.366\",\"v7.4.367\",\"v7.4.368\",\"v7.4.369\",\"v7.4.370\",\"v7.4.371\",\"v7.4.372\",\"v7.4.373\",\"v7.4.374\",\"v7.4.375\",\"v7.4.376\",\"v7.4.377\",\"v7.4.378\",\"v7.4.379\",\"v7.4.380\",\"v7.4.381\",\"v7.4.382\",\"v7.4.383\",\"v7.4.384\",\"v7.4.385\",\"v7.4.386\",\"v7.4.387\",\"v7.4.388\",\"v7.4.389\",\"v7.4.390\",\"v7.4.391\",\"v7.4.392\",\"v7.4.393\",\"v7.4.394\",\"v7.4.395\",\"v7.4.396\",\"v7.4.397\",\"v7.4.398\",\"v7.4.399\",\"v7.4.400\",\"v7.4.401\",\"v7.4.402\",\"v7.4.403\",\"v7.4.404\",\"v7.4.405\",\"v7.4.406\",\"v7.4.407\",\"v7.4.408\",\"v7.4.409\",\"v7.4.410\",\"v7.4.411\",\"v7.4.412\",\"v7.4.413\",\"v7.4.414\",\"v7.4.415\",\"v7.4.416\",\"v7.4.417\",\"v7.4.418\",\"v7.4.419\",\"v7.4.420\",\"v7.4.421\",\"v7.4.422\",\"v7.4.423\",\"v7.4.424\",\"v7.4.425\",\"v7.4.426\",\"v7.4.427\",\"v7.4.428\",\"v7.4.429\",\"v7.4.430\",\"v7.4.431\",\"v7.4.432\",\"v7.4.433\",\"v7.4.434\",\"v7.4.435\",\"v7.4.436\",\"v7.4.437\",\"v7.4.438\",\"v7.4.439\",\"v7.4.440\",\"v7.4.441\",\"v7.4.442\",\"v7.4.443\",\"v7.4.444\",\"v7.4.445\",\"v7.4.446\",\"v7.4.447\",\"v7.4.448\",\"v7.4.449\",\"v7.4.450\",\"v7.4.451\",\"v7.4.452\",\"v7.4.453\",\"v7.4.454\",\"v7.4.455\",\"v7.4.456\",\"v7.4.457\",\"v7.4.458\",\"v7.4.459\",\"v7.4.460\",\"v7.4.461\",\"v7.4.462\",\"v7.4.463\",\"v7.4.464\",\"v7.4.465\",\"v7.4.466\",\"v7.4.467\",\"v7.4.468\",\"v7.4.469\",\"v7.4.470\",\"v7.4.471\",\"v7.4.472\",\"v7.4.473\",\"v7.4.474\",\"v7.4.475\",\"v7.4.476\",\"v7.4.477\",\"v7.4.478\",\"v7.4.479\",\"v7.4.480\",\"v7.4.481\",\"v7.4.482\",\"v7.4.483\",\"v7.4.484\",\"v7.4.485\",\"v7.4.486\",\"v7.4.487\",\"v7.4.488\",\"v7.4.489\",\"v7.4.490\",\"v7.4.491\",\"v7.4.492\",\"v7.4.493\",\"v7.4.494\",\"v7.4.495\",\"v7.4.496\",\"v7.4.497\",\"v7.4.498\",\"v7.4.499\",\"v7.4.500\",\"v7.4.501\",\"v7.4.502\",\"v7.4.503\",\"v7.4.504\",\"v7.4.505\",\"v7.4.506\",\"v7.4.507\",\"v7.4.508\",\"v7.4.509\",\"v7.4.510\",\"v7.4.511\",\"v7.4.512\",\"v7.4.513\",\"v7.4.514\",\"v7.4.515\",\"v7.4.516\",\"v7.4.517\",\"v7.4.518\",\"v7.4.519\",\"v7.4.520\",\"v7.4.521\",\"v7.4.522\",\"v7.4.523\",\"v7.4.524\",\"v7.4.525\",\"v7.4.526\",\"v7.4.527\",\"v7.4.528\",\"v7.4.529\",\"v7.4.530\",\"v7.4.531\",\"v7.4.532\",\"v7.4.533\",\"v7.4.534\",\"v7.4.535\",\"v7.4.536\",\"v7.4.537\",\"v7.4.538\",\"v7.4.539\",\"v7.4.540\",\"v7.4.541\",\"v7.4.542\",\"v7.4.543\",\"v7.4.544\",\"v7.4.545\",\"v7.4.546\",\"v7.4.547\",\"v7.4.548\",\"v7.4.549\",\"v7.4.550\",\"v7.4.551\",\"v7.4.552\",\"v7.4.553\",\"v7.4.554\",\"v7.4.555\",\"v7.4.556\",\"v7.4.557\",\"v7.4.558\",\"v7.4.559\",\"v7.4.560\",\"v7.4.561\",\"v7.4.562\",\"v7.4.563\",\"v7.4.564\",\"v7.4.565\",\"v7.4.566\",\"v7.4.567\",\"v7.4.568\",\"v7.4.569\",\"v7.4.570\",\"v7.4.571\",\"v7.4.572\",\"v7.4.573\",\"v7.4.574\",\"v7.4.575\",\"v7.4.576\",\"v7.4.577\",\"v7.4.578\",\"v7.4.579\",\"v7.4.580\",\"v7.4.581\",\"v7.4.582\",\"v7.4.583\",\"v7.4.584\",\"v7.4.585\",\"v7.4.586\",\"v7.4.587\",\"v7.4.588\",\"v7.4.589\",\"v7.4.590\",\"v7.4.591\",\"v7.4.592\",\"v7.4.593\",\"v7.4.594\",\"v7.4.595\",\"v7.4.596\",\"v7.4.597\",\"v7.4.598\",\"v7.4.599\",\"v7.4.600\",\"v7.4.601\",\"v7.4.602\",\"v7.4.603\",\"v7.4.604\",\"v7.4.605\",\"v7.4.606\",\"v7.4.607\",\"v7.4.608\",\"v7.4.609\",\"v7.4.610\",\"v7.4.611\",\"v7.4.612\",\"v7.4.613\",\"v7.4.614\",\"v7.4.615\",\"v7.4.616\",\"v7.4.617\",\"v7.4.618\",\"v7.4.619\",\"v7.4.620\",\"v7.4.621\",\"v7.4.622\",\"v7.4.623\",\"v7.4.624\",\"v7.4.625\",\"v7.4.626\",\"v7.4.627\",\"v7.4.628\",\"v7.4.629\",\"v7.4.630\",\"v7.4.631\",\"v7.4.632\",\"v7.4.633\",\"v7.4.634\",\"v7.4.635\",\"v7.4.636\",\"v7.4.637\",\"v7.4.638\",\"v7.4.639\",\"v7.4.640\",\"v7.4.641\",\"v7.4.642\",\"v7.4.643\",\"v7.4.644\",\"v7.4.645\",\"v7.4.646\",\"v7.4.647\",\"v7.4.648\",\"v7.4.649\",\"v7.4.650\",\"v7.4.651\",\"v7.4.652\",\"v7.4.653\",\"v7.4.654\",\"v7.4.655\",\"v7.4.656\",\"v7.4.657\",\"v7.4.658\",\"v7.4.659\",\"v7.4.660\",\"v7.4.661\",\"v7.4.662\",\"v7.4.663\",\"v7.4.664\",\"v7.4.665\",\"v7.4.666\",\"v7.4.667\",\"v7.4.668\",\"v7.4.669\",\"v7.4.670\",\"v7.4.671\",\"v7.4.672\",\"v7.4.673\",\"v7.4.674\",\"v7.4.675\",\"v7.4.676\",\"v7.4.677\",\"v7.4.678\",\"v7.4.679\",\"v7.4.680\",\"v7.4.681\",\"v7.4.682\",\"v7.4.683\",\"v7.4.684\",\"v7.4.685\",\"v7.4.686\",\"v7.4.687\",\"v7.4.688\",\"v7.4.689\",\"v7.4.690\",\"v7.4.691\",\"v7.4.692\",\"v7.4.693\",\"v7.4.694\",\"v7.4.695\",\"v7.4.696\",\"v7.4.697\",\"v7.4.698\",\"v7.4.699\",\"v7.4.700\",\"v7.4.701\",\"v7.4.702\",\"v7.4.703\",\"v7.4.704\",\"v7.4.705\",\"v7.4.706\",\"v7.4.707\",\"v7.4.708\",\"v7.4.709\",\"v7.4.710\",\"v7.4.711\",\"v7.4.712\",\"v7.4.713\",\"v7.4.714\",\"v7.4.715\",\"v7.4.716\",\"v7.4.717\",\"v7.4.718\",\"v7.4.719\",\"v7.4.720\",\"v7.4.721\",\"v7.4.722\",\"v7.4.723\",\"v7.4.724\",\"v7.4.725\",\"v7.4.726\",\"v7.4.727\",\"v7.4.728\",\"v7.4.729\",\"v7.4.730\",\"v7.4.731\",\"v7.4.732\",\"v7.4.733\",\"v7.4.734\",\"v7.4.735\",\"v7.4.736\",\"v7.4.737\",\"v7.4.738\",\"v7.4.739\",\"v7.4.740\",\"v7.4.741\",\"v7.4.742\",\"v7.4.743\",\"v7.4.744\",\"v7.4.745\",\"v7.4.746\",\"v7.4.747\",\"v7.4.748\",\"v7.4.749\",\"v7.4.750\",\"v7.4.751\",\"v7.4.752\",\"v7.4.753\",\"v7.4.754\",\"v7.4.755\",\"v7.4.756\",\"v7.4.757\",\"v7.4.758\",\"v7.4.759\",\"v7.4.760\",\"v7.4.761\",\"v7.4.762\",\"v7.4.763\",\"v7.4.764\",\"v7.4.765\",\"v7.4.766\",\"v7.4.767\",\"v7.4.768\",\"v7.4.769\",\"v7.4.770\",\"v7.4.771\",\"v7.4.772\",\"v7.4.773\",\"v7.4.774\",\"v7.4.775\",\"v7.4.776\",\"v7.4.777\",\"v7.4.778\",\"v7.4.779\",\"v7.4.780\",\"v7.4.781\",\"v7.4.782\",\"v7.4.783\",\"v7.4.784\",\"v7.4.785\",\"v7.4.786\",\"v7.4.787\",\"v7.4.788\",\"v7.4.789\",\"v7.4.790\",\"v7.4.791\",\"v7.4.792\",\"v7.4.793\",\"v7.4.794\",\"v7.4.795\",\"v7.4.796\",\"v7.4.797\",\"v7.4.798\",\"v7.4.799\",\"v7.4.800\",\"v7.4.801\",\"v7.4.802\",\"v7.4.803\",\"v7.4.804\",\"v7.4.805\",\"v7.4.806\",\"v7.4.807\",\"v7.4.808\",\"v7.4.809\",\"v7.4.810\",\"v7.4.811\",\"v7.4.812\",\"v7.4.813\",\"v7.4.814\",\"v7.4.815\",\"v7.4.816\",\"v7.4.817\",\"v7.4.818\",\"v7.4.819\",\"v7.4.820\",\"v7.4.821\",\"v7.4.822\",\"v7.4.823\",\"v7.4.824\",\"v7.4.825\",\"v7.4.826\",\"v7.4.827\",\"v7.4.828\",\"v7.4.829\",\"v7.4.830\",\"v7.4.831\",\"v7.4.832\",\"v7.4.833\",\"v7.4.834\",\"v7.4.835\",\"v7.4.836\",\"v7.4.837\",\"v7.4.838\",\"v7.4.839\",\"v7.4.840\",\"v7.4.841\",\"v7.4.842\",\"v7.4.843\",\"v7.4.844\",\"v7.4.845\",\"v7.4.846\",\"v7.4.847\",\"v7.4.848\",\"v7.4.849\",\"v7.4.850\",\"v7.4.851\",\"v7.4.852\",\"v7.4.853\",\"v7.4.854\",\"v7.4.855\",\"v7.4.856\",\"v7.4.857\",\"v7.4.858\",\"v7.4.859\",\"v7.4.860\",\"v7.4.861\",\"v7.4.862\",\"v7.4.863\",\"v7.4.864\",\"v7.4.865\",\"v7.4.866\",\"v7.4.867\",\"v7.4.868\",\"v7.4.869\",\"v7.4.870\",\"v7.4.871\",\"v7.4.872\",\"v7.4.873\",\"v7.4.874\",\"v7.4.875\",\"v7.4.876\",\"v7.4.877\",\"v7.4.878\",\"v7.4.879\",\"v7.4.880\",\"v7.4.881\",\"v7.4.882\",\"v7.4.883\",\"v7.4.884\",\"v7.4.885\",\"v7.4.886\",\"v7.4.887\",\"v7.4.888\",\"v7.4.889\",\"v7.4.890\",\"v7.4.891\",\"v7.4.892\",\"v7.4.893\",\"v7.4.894\",\"v7.4.895\",\"v7.4.896\",\"v7.4.897\",\"v7.4.898\",\"v7.4.899\",\"v7.4.900\",\"v7.4.901\",\"v7.4.902\",\"v7.4.903\",\"v7.4.904\",\"v7.4.905\",\"v7.4.906\",\"v7.4.907\",\"v7.4.908\",\"v7.4.909\",\"v7.4.910\",\"v7.4.911\",\"v7.4.912\",\"v7.4.913\",\"v7.4.914\",\"v7.4.915\",\"v7.4.916\",\"v7.4.917\",\"v7.4.918\",\"v7.4.919\",\"v7.4.920\",\"v7.4.921\",\"v7.4.922\",\"v7.4.923\",\"v7.4.924\",\"v7.4.925\",\"v7.4.926\",\"v7.4.927\",\"v7.4.928\",\"v7.4.929\",\"v7.4.930\",\"v7.4.931\",\"v7.4.932\",\"v7.4.933\",\"v7.4.934\",\"v7.4.935\",\"v7.4.936\",\"v7.4.937\",\"v7.4.938\",\"v7.4.939\",\"v7.4.940\",\"v7.4.941\",\"v7.4.942\",\"v7.4.943\",\"v7.4.944\",\"v7.4.945\",\"v7.4.946\",\"v7.4.947\",\"v7.4.948\",\"v7.4.949\",\"v7.4.950\",\"v7.4.951\",\"v7.4.952\",\"v7.4.953\",\"v7.4.954\",\"v7.4.955\",\"v7.4.956\",\"v7.4.957\",\"v7.4.958\",\"v7.4.959\",\"v7.4.960\",\"v7.4.961\",\"v7.4.962\",\"v7.4.963\",\"v7.4.964\",\"v7.4.965\",\"v7.4.966\",\"v7.4.967\",\"v7.4.968\",\"v7.4.969\",\"v7.4.970\",\"v7.4.971\",\"v7.4.972\",\"v7.4.973\",\"v7.4.974\",\"v7.4.975\",\"v7.4.976\",\"v7.4.977\",\"v7.4.978\",\"v7.4.979\",\"v7.4.980\",\"v7.4.981\",\"v7.4.982\",\"v7.4.983\",\"v7.4.984\",\"v7.4.985\",\"v7.4.986\",\"v7.4.987\",\"v7.4.988\",\"v7.4.989\",\"v7.4.990\",\"v7.4.991\",\"v7.4.992\",\"v7.4.993\",\"v7.4.994\",\"v7.4.995\",\"v7.4.996\",\"v7.4.997\",\"v7.4.998\",\"v7.4.999\",\"v7.4a\",\"v7.4a.001\",\"v7.4a.002\",\"v7.4a.003\",\"v7.4a.004\",\"v7.4a.005\",\"v7.4a.006\",\"v7.4a.007\",\"v7.4a.008\",\"v7.4a.009\",\"v7.4a.010\",\"v7.4a.011\",\"v7.4a.012\",\"v7.4a.013\",\"v7.4a.014\",\"v7.4a.015\",\"v7.4a.016\",\"v7.4a.017\",\"v7.4a.018\",\"v7.4a.019\",\"v7.4a.020\",\"v7.4a.021\",\"v7.4a.022\",\"v7.4a.023\",\"v7.4a.024\",\"v7.4a.025\",\"v7.4a.026\",\"v7.4a.027\",\"v7.4a.028\",\"v7.4a.029\",\"v7.4a.030\",\"v7.4a.031\",\"v7.4a.032\",\"v7.4a.033\",\"v7.4a.034\",\"v7.4a.035\",\"v7.4a.036\",\"v7.4a.037\",\"v7.4a.038\",\"v7.4a.039\",\"v7.4a.040\",\"v7.4a.041\",\"v7.4a.042\",\"v7.4a.043\",\"v7.4a.044\",\"v7.4a.045\",\"v7.4a.046\",\"v7.4a.047\",\"v7.4b.000\",\"v7.4b.001\",\"v7.4b.002\",\"v7.4b.003\",\"v7.4b.004\",\"v7.4b.005\",\"v7.4b.006\",\"v7.4b.007\",\"v7.4b.008\",\"v7.4b.009\",\"v7.4b.010\",\"v7.4b.011\",\"v7.4b.012\",\"v7.4b.013\",\"v7.4b.014\",\"v7.4b.015\",\"v7.4b.016\",\"v7.4b.017\",\"v7.4b.018\",\"v7.4b.019\",\"v7.4b.020\",\"v7.4b.021\",\"v7.4b.022\",\"v8.0.0000\",\"v8.0.0001\",\"v8.0.0002\",\"v8.0.0003\",\"v8.0.0004\",\"v8.0.0005\",\"v8.0.0006\",\"v8.0.0007\",\"v8.0.0008\",\"v8.0.0009\",\"v8.0.0010\",\"v8.0.0011\",\"v8.0.0012\",\"v8.0.0013\",\"v8.0.0014\",\"v8.0.0015\",\"v8.0.0016\",\"v8.0.0017\",\"v8.0.0018\",\"v8.0.0019\",\"v8.0.0020\",\"v8.0.0021\",\"v8.0.0022\",\"v8.0.0023\",\"v8.0.0024\",\"v8.0.0025\",\"v8.0.0026\",\"v8.0.0027\",\"v8.0.0028\",\"v8.0.0029\",\"v8.0.0030\",\"v8.0.0031\",\"v8.0.0032\",\"v8.0.0033\",\"v8.0.0034\",\"v8.0.0035\",\"v8.0.0036\",\"v8.0.0037\",\"v8.0.0038\",\"v8.0.0039\",\"v8.0.0040\",\"v8.0.0041\",\"v8.0.0042\",\"v8.0.0043\",\"v8.0.0044\",\"v8.0.0045\",\"v8.0.0046\",\"v8.0.0047\",\"v8.0.0048\",\"v8.0.0049\",\"v8.0.0050\",\"v8.0.0051\",\"v8.0.0052\",\"v8.0.0053\",\"v8.0.0054\",\"v8.0.0055\",\"v8.0.0056\",\"v8.0.0057\",\"v8.0.0058\",\"v8.0.0059\",\"v8.0.0060\",\"v8.0.0061\",\"v8.0.0062\",\"v8.0.0063\",\"v8.0.0064\",\"v8.0.0065\",\"v8.0.0066\",\"v8.0.0067\",\"v8.0.0068\",\"v8.0.0069\",\"v8.0.0070\",\"v8.0.0071\",\"v8.0.0072\",\"v8.0.0073\",\"v8.0.0074\",\"v8.0.0075\",\"v8.0.0076\",\"v8.0.0077\",\"v8.0.0078\",\"v8.0.0079\",\"v8.0.0080\",\"v8.0.0081\",\"v8.0.0082\",\"v8.0.0083\",\"v8.0.0084\",\"v8.0.0085\",\"v8.0.0086\",\"v8.0.0087\",\"v8.0.0088\",\"v8.0.0089\",\"v8.0.0090\",\"v8.0.0091\",\"v8.0.0092\",\"v8.0.0093\",\"v8.0.0094\",\"v8.0.0095\",\"v8.0.0096\",\"v8.0.0097\",\"v8.0.0098\",\"v8.0.0099\",\"v8.0.0100\",\"v8.0.0101\",\"v8.0.0102\",\"v8.0.0103\",\"v8.0.0104\",\"v8.0.0105\",\"v8.0.0106\",\"v8.0.0107\",\"v8.0.0108\",\"v8.0.0109\",\"v8.0.0110\",\"v8.0.0111\",\"v8.0.0112\",\"v8.0.0113\",\"v8.0.0114\",\"v8.0.0115\",\"v8.0.0116\",\"v8.0.0117\",\"v8.0.0118\",\"v8.0.0119\",\"v8.0.0120\",\"v8.0.0121\",\"v8.0.0122\",\"v8.0.0123\",\"v8.0.0124\",\"v8.0.0125\",\"v8.0.0126\",\"v8.0.0127\",\"v8.0.0128\",\"v8.0.0129\",\"v8.0.0130\",\"v8.0.0131\",\"v8.0.0132\",\"v8.0.0133\",\"v8.0.0134\",\"v8.0.0135\",\"v8.0.0136\",\"v8.0.0137\",\"v8.0.0138\",\"v8.0.0139\",\"v8.0.0140\",\"v8.0.0141\",\"v8.0.0142\",\"v8.0.0143\",\"v8.0.0144\",\"v8.0.0145\",\"v8.0.0146\",\"v8.0.0147\",\"v8.0.0148\",\"v8.0.0149\",\"v8.0.0150\",\"v8.0.0151\",\"v8.0.0152\",\"v8.0.0153\",\"v8.0.0154\",\"v8.0.0155\",\"v8.0.0156\",\"v8.0.0157\",\"v8.0.0158\",\"v8.0.0159\",\"v8.0.0160\",\"v8.0.0161\",\"v8.0.0162\",\"v8.0.0163\",\"v8.0.0164\",\"v8.0.0165\",\"v8.0.0166\",\"v8.0.0167\",\"v8.0.0168\",\"v8.0.0169\",\"v8.0.0170\",\"v8.0.0171\",\"v8.0.0172\",\"v8.0.0173\",\"v8.0.0174\",\"v8.0.0175\",\"v8.0.0176\",\"v8.0.0177\",\"v8.0.0178\",\"v8.0.0179\",\"v8.0.0180\",\"v8.0.0181\",\"v8.0.0182\",\"v8.0.0183\",\"v8.0.0184\",\"v8.0.0185\",\"v8.0.0186\",\"v8.0.0187\",\"v8.0.0188\",\"v8.0.0189\",\"v8.0.0190\",\"v8.0.0191\",\"v8.0.0192\",\"v8.0.0193\",\"v8.0.0194\",\"v8.0.0195\",\"v8.0.0196\",\"v8.0.0197\",\"v8.0.0198\",\"v8.0.0199\",\"v8.0.0200\",\"v8.0.0201\",\"v8.0.0202\",\"v8.0.0203\",\"v8.0.0204\",\"v8.0.0205\",\"v8.0.0206\",\"v8.0.0207\",\"v8.0.0208\",\"v8.0.0209\",\"v8.0.0210\",\"v8.0.0211\",\"v8.0.0212\",\"v8.0.0213\",\"v8.0.0214\",\"v8.0.0215\",\"v8.0.0216\",\"v8.0.0217\",\"v8.0.0218\",\"v8.0.0219\",\"v8.0.0220\",\"v8.0.0221\",\"v8.0.0222\",\"v8.0.0223\",\"v8.0.0224\",\"v8.0.0225\",\"v8.0.0226\",\"v8.0.0227\",\"v8.0.0228\",\"v8.0.0229\",\"v8.0.0230\",\"v8.0.0231\",\"v8.0.0232\",\"v8.0.0233\",\"v8.0.0234\",\"v8.0.0235\",\"v8.0.0236\",\"v8.0.0237\",\"v8.0.0238\",\"v8.0.0239\",\"v8.0.0240\",\"v8.0.0241\",\"v8.0.0242\",\"v8.0.0243\",\"v8.0.0244\",\"v8.0.0245\",\"v8.0.0246\",\"v8.0.0247\",\"v8.0.0248\",\"v8.0.0249\",\"v8.0.0250\",\"v8.0.0251\",\"v8.0.0252\",\"v8.0.0253\",\"v8.0.0254\",\"v8.0.0255\",\"v8.0.0256\",\"v8.0.0257\",\"v8.0.0258\",\"v8.0.0259\",\"v8.0.0260\",\"v8.0.0261\",\"v8.0.0262\",\"v8.0.0263\",\"v8.0.0264\",\"v8.0.0265\",\"v8.0.0266\",\"v8.0.0267\",\"v8.0.0268\",\"v8.0.0269\",\"v8.0.0270\",\"v8.0.0271\",\"v8.0.0272\",\"v8.0.0273\",\"v8.0.0274\",\"v8.0.0275\",\"v8.0.0276\",\"v8.0.0277\",\"v8.0.0278\",\"v8.0.0279\",\"v8.0.0280\",\"v8.0.0281\",\"v8.0.0282\",\"v8.0.0283\",\"v8.0.0284\",\"v8.0.0285\",\"v8.0.0286\",\"v8.0.0287\",\"v8.0.0288\",\"v8.0.0289\",\"v8.0.0290\",\"v8.0.0291\",\"v8.0.0292\",\"v8.0.0293\",\"v8.0.0294\",\"v8.0.0295\",\"v8.0.0296\",\"v8.0.0297\",\"v8.0.0298\",\"v8.0.0299\",\"v8.0.0300\",\"v8.0.0301\",\"v8.0.0302\",\"v8.0.0303\",\"v8.0.0304\",\"v8.0.0305\",\"v8.0.0306\",\"v8.0.0307\",\"v8.0.0308\",\"v8.0.0309\",\"v8.0.0310\",\"v8.0.0311\",\"v8.0.0312\",\"v8.0.0313\",\"v8.0.0314\",\"v8.0.0315\",\"v8.0.0316\",\"v8.0.0317\",\"v8.0.0318\",\"v8.0.0319\",\"v8.0.0320\",\"v8.0.0321\",\"v8.0.0322\",\"v8.0.0323\",\"v8.0.0324\",\"v8.0.0325\",\"v8.0.0326\",\"v8.0.0327\",\"v8.0.0328\",\"v8.0.0329\",\"v8.0.0330\",\"v8.0.0331\",\"v8.0.0332\",\"v8.0.0333\",\"v8.0.0334\",\"v8.0.0335\",\"v8.0.0336\",\"v8.0.0337\",\"v8.0.0338\",\"v8.0.0339\",\"v8.0.0340\",\"v8.0.0341\",\"v8.0.0342\",\"v8.0.0343\",\"v8.0.0344\",\"v8.0.0345\",\"v8.0.0346\",\"v8.0.0347\",\"v8.0.0348\",\"v8.0.0349\",\"v8.0.0350\",\"v8.0.0351\",\"v8.0.0352\",\"v8.0.0353\",\"v8.0.0354\",\"v8.0.0355\",\"v8.0.0356\",\"v8.0.0357\",\"v8.0.0358\",\"v8.0.0359\",\"v8.0.0360\",\"v8.0.0361\",\"v8.0.0362\",\"v8.0.0363\",\"v8.0.0364\",\"v8.0.0365\",\"v8.0.0366\",\"v8.0.0367\",\"v8.0.0368\",\"v8.0.0369\",\"v8.0.0370\",\"v8.0.0371\",\"v8.0.0372\",\"v8.0.0373\",\"v8.0.0374\",\"v8.0.0375\",\"v8.0.0376\"],\"database_specific\":{\"source\":\"https://storage.googleapis.com/cve-osv-conversion/osv-output/CVE-2017-6349.json\"}}],\"schema_version\":\"1.6.0\",\"severity\":[{\"type\":\"CVSS_V3\",\"score\":\"CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H\"}]}", + "modified": "2025-08-07T20:01:58.452Z" + } + } +] \ No newline at end of file diff --git a/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm.json b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm.json new file mode 100644 index 000000000..34d692fae --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm.json @@ -0,0 +1,11 @@ +[ + { + "model": "import_observations.osv_cache", + "pk": 1, + "fields": { + "osv_id": "RHSA-2023:6738", + "data": "{\"id\":\"RHSA-2023:6738\",\"summary\":\"Red Hat Security Advisory: java-21-openjdk security and bug fix update\",\"modified\":\"2025-04-04T01:07:14.446058Z\",\"published\":\"2024-09-16T13:34:13Z\",\"upstream\":[\"CVE-2023-22025\",\"CVE-2023-22081\"],\"references\":[{\"type\":\"ADVISORY\",\"url\":\"https://access.redhat.com/errata/RHSA-2023:6738\"},{\"type\":\"ARTICLE\",\"url\":\"https://access.redhat.com/security/updates/classification/#moderate\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=2243627\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=2243805\"},{\"type\":\"ADVISORY\",\"url\":\"https://security.access.redhat.com/data/csaf/v2/advisories/2023/rhsa-2023_6738.json\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2023-22025\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2023-22025\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2023-22025\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2023-22081\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2023-22081\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2023-22081\"}],\"affected\":[{\"package\":{\"name\":\"java-21-openjdk\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-debugsource\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-debugsource\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-demo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-demo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-demo-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-demo-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-demo-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-demo-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-fastdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-fastdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-slowdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-slowdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-fastdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-fastdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-fastdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-fastdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-slowdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-slowdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-javadoc\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-javadoc\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-javadoc-zip\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-javadoc-zip\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-jmods\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-jmods\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-jmods-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-jmods-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-jmods-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-jmods-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-slowdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-slowdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-src\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-src\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-src-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-src-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-src-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-src-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-static-libs\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-static-libs\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-static-libs-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-static-libs-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-static-libs-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::appstream\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-static-libs-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-debugsource\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-debugsource\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-demo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-demo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-demo-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-demo-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-demo-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-demo-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-fastdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-fastdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-devel-slowdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-devel-slowdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-fastdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-fastdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-fastdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-fastdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-headless-slowdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-headless-slowdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-javadoc\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-javadoc\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-javadoc-zip\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-javadoc-zip\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-jmods\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-jmods\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-jmods-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-jmods-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-jmods-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-jmods-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-slowdebug-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-slowdebug-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-src\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-src\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-src-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-src-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-src-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-src-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-static-libs\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-static-libs\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-static-libs-fastdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-static-libs-fastdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}},{\"package\":{\"name\":\"java-21-openjdk-static-libs-slowdebug\",\"ecosystem\":\"Red Hat:enterprise_linux:9::crb\",\"purl\":\"pkg:rpm/redhat/java-21-openjdk-static-libs-slowdebug\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:21.0.1.0.12-2.el9\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2023:6738.json\"}}],\"schema_version\":\"1.6.0\",\"severity\":[{\"type\":\"CVSS_V3\",\"score\":\"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L\"}]}", + "modified": "2025-08-07T20:01:58.452Z" + } + } +] \ No newline at end of file diff --git a/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm_ecosystem.json b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm_ecosystem.json new file mode 100644 index 000000000..b98bde7f1 --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm_ecosystem.json @@ -0,0 +1,11 @@ +[ + { + "model": "import_observations.osv_cache", + "pk": 1, + "fields": { + "osv_id": "RHSA-2015:1115", + "data": "{\"id\":\"RHSA-2015:1115\",\"summary\":\"Red Hat Security Advisory: openssl security update\",\"modified\":\"2025-09-11T10:41:27Z\",\"published\":\"2024-09-15T22:38:06Z\",\"upstream\":[\"CVE-2014-8176\",\"CVE-2015-1789\",\"CVE-2015-1790\",\"CVE-2015-1791\",\"CVE-2015-1792\",\"CVE-2015-3216\"],\"references\":[{\"type\":\"ADVISORY\",\"url\":\"https://access.redhat.com/errata/RHSA-2015:1115\"},{\"type\":\"ARTICLE\",\"url\":\"https://access.redhat.com/security/updates/classification/#moderate\"},{\"type\":\"ARTICLE\",\"url\":\"https://www.openssl.org/news/secadv_20150611.txt\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=1227574\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=1228603\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=1228604\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=1228607\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=1228608\"},{\"type\":\"REPORT\",\"url\":\"https://bugzilla.redhat.com/show_bug.cgi?id=1228611\"},{\"type\":\"ADVISORY\",\"url\":\"https://security.access.redhat.com/data/csaf/v2/advisories/2015/rhsa-2015_1115.json\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2014-8176\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2014-8176\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2014-8176\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2015-1789\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2015-1789\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2015-1789\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2015-1790\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2015-1790\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2015-1790\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2015-1791\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2015-1791\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2015-1791\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2015-1792\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2015-1792\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2015-1792\"},{\"type\":\"REPORT\",\"url\":\"https://access.redhat.com/security/cve/CVE-2015-3216\"},{\"type\":\"ADVISORY\",\"url\":\"https://www.cve.org/CVERecord?id=CVE-2015-3216\"},{\"type\":\"ADVISORY\",\"url\":\"https://nvd.nist.gov/vuln/detail/CVE-2015-3216\"}],\"affected\":[{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::client\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:6::client\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:6::client\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::client\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:6::client\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::computenode\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:6::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:6::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:6::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::server\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:6::server\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:6::server\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::server\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:6::server\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::workstation\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:6::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:6::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:6::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:6::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"0:1.0.1e-30.el6_6.11\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::client\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:7::client\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:7::client\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-libs\",\"ecosystem\":\"Red Hat:enterprise_linux:7::client\",\"purl\":\"pkg:rpm/redhat/openssl-libs\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::client\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:7::client\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::computenode\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:7::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:7::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-libs\",\"ecosystem\":\"Red Hat:enterprise_linux:7::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-libs\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:7::computenode\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::server\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.ael7b_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:7::server\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.ael7b_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:7::server\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.ael7b_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-libs\",\"ecosystem\":\"Red Hat:enterprise_linux:7::server\",\"purl\":\"pkg:rpm/redhat/openssl-libs\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.ael7b_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::server\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.ael7b_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:7::server\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.ael7b_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::workstation\",\"purl\":\"pkg:rpm/redhat/openssl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-debuginfo\",\"ecosystem\":\"Red Hat:enterprise_linux:7::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-debuginfo\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-devel\",\"ecosystem\":\"Red Hat:enterprise_linux:7::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-devel\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-libs\",\"ecosystem\":\"Red Hat:enterprise_linux:7::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-libs\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-perl\",\"ecosystem\":\"Red Hat:enterprise_linux:7::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-perl\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}},{\"package\":{\"name\":\"openssl-static\",\"ecosystem\":\"Red Hat:enterprise_linux:7::workstation\",\"purl\":\"pkg:rpm/redhat/openssl-static\"},\"ranges\":[{\"type\":\"ECOSYSTEM\",\"events\":[{\"introduced\":\"0\"},{\"fixed\":\"1:1.0.1e-42.el7_1.8\"}]}],\"database_specific\":{\"source\":\"https://security.access.redhat.com/data/osv/RHSA-2015:1115.json\"}}],\"schema_version\":\"1.7.3\",\"severity\":[{\"type\":\"CVSS_V3\",\"score\":\"CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H\"}]}", + "modified": "2025-09-19T07:26:00.000Z" + } + } +] diff --git a/backend/unittests/import_observations/parsers/osv/test_fill_osv_cache.py b/backend/unittests/import_observations/parsers/osv/test_fill_osv_cache.py new file mode 100644 index 000000000..35784f091 --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/test_fill_osv_cache.py @@ -0,0 +1,99 @@ +from datetime import datetime, timezone +from unittest import TestCase +from unittest.mock import MagicMock, patch + +from application.import_observations.models import OSV_Cache + +# Adjust these imports based on your actual file structure +from application.import_observations.parsers.osv.parser import ( + OSV_Vulnerability, + OSVParser, +) + + +class TestOSVParserCache(TestCase): + def setUp(self): + self.parser = OSVParser() + self.now = datetime(2023, 1, 1, tzinfo=timezone.utc) + + @patch("application.import_observations.models.OSV_Cache.objects") + @patch("requests.get") + def test_fill_osv_cache_invalidation_and_deletion(self, mock_get, mock_objects): + """ + Scenario: Cache has stale data. + Verifies the .filter(...).delete() chain works and triggers a refresh. + """ + # 1. Setup Input: We have an update for CVE-OLD + new_date = datetime(2024, 1, 1, tzinfo=timezone.utc) + old_date = datetime(2020, 1, 1, tzinfo=timezone.utc) + vuln = OSV_Vulnerability(id="CVE-OLD", modified=new_date) + + # 2. Mock behavior for the stale data check + stale_item = MagicMock(spec=OSV_Cache) + stale_item.osv_id = "CVE-OLD" + stale_item.modified = old_date + + # This mocks the first filter call: OSV_Cache.objects.filter(osv_id__in=...) + # We make it return a list of items for the logic that builds valid/invalid IDs + mock_objects.filter.return_value = [stale_item] + + # 3. Mock the Chained Deletion + # For the line: OSV_Cache.objects.filter(osv_id__in=invalid_ids).delete() + # We need a dedicated mock to represent the QuerySet returned by the second filter call + mock_queryset = MagicMock() + mock_objects.filter.side_effect = [ + [stale_item], # First call: returns list for ID processing + mock_queryset, # Second call: returns the QuerySet for .delete() + ] + + # 4. Mock API for refresh + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.text = '{"id": "CVE-OLD"}' + mock_get.return_value = mock_response + + # Execute + self.parser._fill_osv_cache([vuln]) + + # Assertions + # Verify the deletion was actually called on the filtered QuerySet + mock_queryset.delete.assert_called_once() + + # Verify the API was called to get the fresh data + self.assertTrue(mock_get.called) + self.assertEqual(mock_objects.bulk_create.call_count, 1) + + @patch("application.import_observations.models.OSV_Cache.objects") + @patch("requests.get") + def test_fill_osv_cache_mixed_state(self, mock_get, mock_objects): + """ + Scenario: One valid cache hit, one missing (must fetch). + """ + v1 = OSV_Vulnerability(id="CVE-VALID", modified=self.now) + v2 = OSV_Vulnerability(id="CVE-MISSING", modified=self.now) + + # Mock DB: Only CVE-VALID exists + valid_item = MagicMock(spec=OSV_Cache) + valid_item.osv_id = "CVE-VALID" + valid_item.modified = self.now + valid_item.data = '{"id": "CVE-VALID"}' + + # Setup side_effect to handle multiple filter calls + # 1st: The lookup of existing items + # 2nd: The deletion filter (which will be empty in this case) + mock_queryset_delete = MagicMock() + mock_objects.filter.side_effect = [[valid_item], mock_queryset_delete] # Initial lookup # Deletion call + + # Mock API for the missing one + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.text = '{"id": "CVE-MISSING"}' + mock_get.return_value = mock_response + + # Execute + result = self.parser._fill_osv_cache([v1, v2]) + + # Assertions + self.assertIn("CVE-VALID", result) + self.assertIn("CVE-MISSING", result) + self.assertEqual(mock_get.call_count, 1) # Only called for CVE-MISSING diff --git a/backend/unittests/import_observations/parsers/osv/test_parser.py b/backend/unittests/import_observations/parsers/osv/test_parser.py new file mode 100644 index 000000000..4c9ad04c7 --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/test_parser.py @@ -0,0 +1,470 @@ +from datetime import datetime, timezone +from unittest.mock import patch + +from django.core.management import call_command +from packageurl import PackageURL + +from application.import_observations.parsers.osv.parser import ( + OSV_Component, + OSV_Vulnerability, + OSVParser, +) +from application.licenses.models import License_Component +from unittests.base_test_case import BaseTestCase + + +class TestOSVParser(BaseTestCase): + def test_no_observations(self): + parser = OSVParser() + observations, scanner = parser.get_observations([], self.product_1, self.branch_1) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(observations, []) + + def test_java_and_python_open(self): + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_java_python.json", + ], + ) + + license_component_java = License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="json", + component_version="20190722", + component_name_version="json:20190722", + component_purl="pkg:maven/org.json/json@20190722?type=jar", + component_purl_type="maven", + component_cpe="cpe:/a:org.json:json:20190722", + component_cyclonedx_bom_link="urn:cdx:a/1#b", + component_dependencies="json_dependencies", + ) + + license_component_python = License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="Django", + component_version="5.1.2", + component_name_version="Django:5.1.2", + component_purl="pkg:pypi/django@5.1.2", + component_purl_type="pypi", + component_dependencies="django_dependencies", + ) + + osv_components = [ + OSV_Component( + license_component=license_component_java, + vulnerabilities={ + OSV_Vulnerability( + id="GHSA-3vqj-43w4-2q58", + modified=datetime(2024, 8, 7, 20, 1, 58, 452618, timezone.utc), + ), + OSV_Vulnerability( + id="GHSA-4jq9-2xhw-jpx7", + modified=datetime(2024, 10, 30, 19, 23, 43, 662562, timezone.utc), + ), + }, + ), + OSV_Component( + license_component=license_component_python, + vulnerabilities={ + OSV_Vulnerability( + id="GHSA-m9g8-fxxm-xg86", + modified=datetime(2024, 12, 20, 20, 37, 27, 0, timezone.utc), + ), + }, + ), + ] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, self.branch_1) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(len(observations), 3) + + observation = observations[0] + self.assertEqual("CVE-2022-45688", observation.title) + description = """json stack overflow vulnerability + +A stack overflow in the XML.toJSONObject component of hutool-json v5.8.10 and org.json:json before version 20230227 allows attackers to cause a Denial of Service (DoS) via crafted JSON or XML data. + +**Confidence: High** (Component found in affected versions)""" + self.assertEqual(description, observation.description) + self.assertEqual("", observation.recommendation) + self.assertEqual("CVE-2022-45688", observation.vulnerability_id) + self.assertEqual("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", observation.cvss3_vector) + self.assertEqual("", observation.cvss4_vector) + self.assertEqual("GHSA-3vqj-43w4-2q58", observation.vulnerability_id_aliases) + self.assertEqual("json", observation.origin_component_name) + self.assertEqual("20190722", observation.origin_component_version) + self.assertEqual( + "pkg:maven/org.json/json@20190722?type=jar", + observation.origin_component_purl, + ) + self.assertEqual("cpe:/a:org.json:json:20190722", observation.origin_component_cpe) + self.assertEqual("urn:cdx:a/1#b", observation.origin_component_cyclonedx_bom_link) + self.assertEqual("json_dependencies", observation.origin_component_dependencies) + + unsaved_references = observation.unsaved_references + self.assertEqual(6, len(unsaved_references)) + self.assertEqual("https://nvd.nist.gov/vuln/detail/CVE-2022-45688", unsaved_references[0]) + + self.assertEqual("OSV Vulnerability", observation.unsaved_evidences[0][0]) + self.assertIn("CWE-787", observation.unsaved_evidences[0][1]) + + observation = observations[1] + self.assertEqual("CVE-2023-5072", observation.title) + + observation = observations[2] + self.assertEqual("CVE-2024-53908", observation.title) + self.assertEqual("Update to version 5.1.4", observation.recommendation) + self.assertEqual("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", observation.cvss3_vector) + self.assertEqual( + "CVSS:4.0/AV:N/AC:L/AT:P/PR:N/UI:N/VC:H/VI:H/VA:H/SC:N/SI:N/SA:N/E:U", + observation.cvss4_vector, + ) + + def test_python_fixed(self): + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_java_python.json", + ], + ) + + license_component_python = License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="Django", + component_version="5.1.6", + component_name_version="Django:5.1.6", + component_purl="pkg:pypi/django@5.1.6", + component_purl_type="pypi", + component_dependencies="django_dependencies", + ) + + osv_components = [ + OSV_Component( + license_component=license_component_python, + vulnerabilities={ + OSV_Vulnerability( + id="GHSA-m9g8-fxxm-xg86", + modified=datetime(2024, 12, 20, 20, 37, 27, 0, timezone.utc), + ), + }, + ), + ] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, self.branch_1) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(len(observations), 0) + + @patch("application.import_observations.parsers.osv.parser.OSVParser._get_linux_package_osv_ecosystem") + def test_linux_no_distribution(self, mock_get_linux_package_osv_ecosystem): + mock_get_linux_package_osv_ecosystem.side_effect = self._side_effect_func + + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_linux.json", + ], + ) + + self.product_1.osv_linux_distribution = "" + self.product_1.osv_linux_release = "" + osv_components = [self._get_osv_component_git(), self._get_osv_component_vim()] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, None) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(len(observations), 0) + + @patch("application.import_observations.parsers.osv.parser.OSVParser._get_linux_package_osv_ecosystem") + def test_linux_product_distribution(self, mock_get_linux_package_osv_ecosystem): + mock_get_linux_package_osv_ecosystem.side_effect = self._side_effect_func + + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_linux.json", + ], + ) + + self.product_1.osv_linux_distribution = "Debian" + self.product_1.osv_linux_release = "12" + osv_components = [self._get_osv_component_git(), self._get_osv_component_vim()] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, None) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + + mock_get_linux_package_osv_ecosystem.assert_called_with( + PackageURL.from_string("pkg:deb/debian/vim@9.0.1378-2?arch=amd64&distro=debian-12.5&epoch=2"), + "Debian:12", + ) + + self.assertEqual(len(observations), 1) + + observation = observations[0] + self.assertEqual("CVE-2017-6349", observation.title) + description = """An integer overflow at a u_read_undo memory allocation site would occur for vim before patch 8.0.0377, if it does not properly validate values for tree length when reading a corrupted undo file, which may lead to resultant buffer overflows. + +**Confidence: Low** (Events could not be evaluated) + +**Events:** + +* ECOSYSTEM: Introduced: 0 - Fixed: 2:8.0.0197-3""" + self.assertEqual(description, observation.description) + + def test_linux_branch_distribution(self): + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_linux.json", + ], + ) + + self.product_1.osv_linux_distribution = "" + self.product_1.osv_linux_release = "" + self.branch_1.osv_linux_distribution = "Debian" + self.branch_1.osv_linux_release = "12" + osv_components = [self._get_osv_component_git(), self._get_osv_component_vim()] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, self.branch_1) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(len(observations), 1) + + observation = observations[0] + self.assertEqual("CVE-2017-6349", observation.title) + description = """An integer overflow at a u_read_undo memory allocation site would occur for vim before patch 8.0.0377, if it does not properly validate values for tree length when reading a corrupted undo file, which may lead to resultant buffer overflows. + +**Confidence: Low** (Events could not be evaluated) + +**Events:** + +* ECOSYSTEM: Introduced: 0 - Fixed: 2:8.0.0197-3""" + self.assertEqual(description, observation.description) + self.assertEqual("ALPINE-CVE-2017-6349", observation.vulnerability_id_aliases) + + def test_linux_rpm(self): + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm.json", + ], + ) + + self.product_1.osv_linux_distribution = "" + self.product_1.osv_linux_release = "" + self.branch_1.osv_linux_distribution = "Red Hat" + self.branch_1.osv_linux_release = "enterprise_linux:9::appstream" + osv_components = [self._get_osv_component_rpm()] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, self.branch_1) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(len(observations), 1) + + observation = observations[0] + self.assertEqual("RHSA-2023:6738", observation.title) + description = """Red Hat Security Advisory: java-21-openjdk security and bug fix update + +**Confidence: High** (Component found in affected ranges)""" + self.assertEqual(description, observation.description) + self.assertEqual("CVE-2023-22025, CVE-2023-22081", observation.vulnerability_id_aliases) + + def test_linux_rpm_ecosystem_not_found(self): + call_command( + "loaddata", + [ + "unittests/import_observations/parsers/osv/files/fixtures_osv_cache_rpm_ecosystem.json", + ], + ) + + self.product_1.osv_linux_distribution = "" + self.product_1.osv_linux_release = "" + self.branch_1.osv_linux_distribution = "Red Hat" + self.branch_1.osv_linux_release = "enterprise_linux:9::appstream" + osv_components = [self._get_osv_component_rpm_openssl()] + + parser = OSVParser() + observations, scanner = parser.get_observations(osv_components, self.product_1, self.branch_1) + + self.assertEqual("OSV (Open Source Vulnerabilities)", scanner) + self.assertEqual(len(observations), 0) + + def test_get_linux_package_osv_ecosystem_already_set(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string( + "pkg:apk/alpine/musl@1.2.5-r1?arch=x86_64&distro=alpine-3.20.6&distro_name=alpine-3.20" + ), + "Debian:12", + ) + self.assertEqual("Debian:12", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_alpine_1(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string( + "pkg:apk/alpine/musl@1.2.5-r1?arch=x86_64&distro=alpine-3.20.6&distro_name=alpine-3.20" + ), + None, + ) + self.assertEqual("Alpine:v3.20", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_alpine_2(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:apk/alpine/busybox-binsh@1.48.0-r12?arch=x86_64&distro=3.21.3"), + None, + ) + self.assertEqual("Alpine:v3.21", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_debian_1(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:deb/debian/libtasn1-6@4.16.0-2%2Bdeb11u2?arch=amd64&distro=debian-11"), + None, + ) + self.assertEqual("Debian:11", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_debian_2(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:deb/debian/coreutils@8.32-4%2Bb1?arch=amd64&distro=debian-11.11"), + None, + ) + self.assertEqual("Debian:11", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_chainguard(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:apk/chainguard/nri-kafka@3.10.2-r0?arch=x86_64&distro=20230201"), + None, + ) + self.assertEqual("Chainguard", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_wolfi(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:apk/wolfi/nri-kafka@3.10.2-r0?arch=x86_64&distro=20230201"), + None, + ) + self.assertEqual("Wolfi", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_ubuntu_21_04(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:deb/ubuntu/zlib1g@1.2.11.dfsg-2ubuntu9?arch=amd64&distro=ubuntu-21.04&epoch=1"), + None, + ) + self.assertEqual("Ubuntu:21.04", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_ubuntu_22_10(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:deb/ubuntu/zlib1g@1.2.11.dfsg-2ubuntu9?arch=amd64&distro=ubuntu-22.10&epoch=1"), + None, + ) + self.assertEqual("Ubuntu:22.10", package_osv_ecosystem) + + def test_get_linux_package_osv_ecosystem_ubuntu_lts(self): + parser = OSVParser() + package_osv_ecosystem = parser._get_linux_package_osv_ecosystem( + PackageURL.from_string("pkg:deb/ubuntu/zlib1g@1.2.11.dfsg-2ubuntu9?arch=amd64&distro=ubuntu-22.04&epoch=1"), + None, + ) + self.assertEqual("Ubuntu:22.04:LTS", package_osv_ecosystem) + + def _side_effect_func(self, parsed_purl, package_osv_ecosystem): + return package_osv_ecosystem + + def _get_osv_component_git(self): + return OSV_Component( + license_component=License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="git", + component_version="1:2.39.5-0+deb12u1", + component_name_version="git:1:2.39.5-0+deb12u1", + component_purl="pkg:deb/debian/git@1:2.39.5-0%2Bdeb12u1?arch=amd64&distro=debian-12", + component_purl_type="deb", + component_dependencies="git_dependencies", + ), + vulnerabilities={ + OSV_Vulnerability( + id="CVE-2024-32002", + modified=datetime(2024, 8, 7, 20, 1, 58, 452618, timezone.utc), + ), + }, + ) + + def _get_osv_component_vim(self): + return OSV_Component( + license_component=License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="vim", + component_version="2:9.0.1378-2", + component_name_version="vim:2:9.0.1378-2", + component_purl="pkg:deb/debian/vim@9.0.1378-2?arch=amd64&distro=debian-12.5&epoch=2", + component_purl_type="deb", + component_dependencies="vim_dependencies", + ), + vulnerabilities={ + OSV_Vulnerability( + id="CVE-2017-6349", + modified=datetime(2024, 8, 7, 20, 1, 58, 452618, timezone.utc), + ), + }, + ) + + def _get_osv_component_rpm(self): + return OSV_Component( + license_component=License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="java-21-openjdk-devel", + component_version="21.0.7.0.6-1.el9", + component_name_version="java-21-openjdk-devel:21.0.7.0.6-1.el9", + component_purl="pkg:rpm/redhat/java-21-openjdk-devel@21.0.7.0.6-1.el9", + component_purl_type="rpm", + component_dependencies="", + ), + vulnerabilities={ + OSV_Vulnerability( + id="RHSA-2023:6738", + modified=datetime(2024, 8, 7, 20, 1, 58, 452618, timezone.utc), + ), + }, + ) + + def _get_osv_component_rpm_openssl(self): + return OSV_Component( + license_component=License_Component( + product=self.product_1, + branch=self.branch_1, + component_name="openssl-libs", + component_version="1:3.2.2-6.el9_5.1", + component_name_version="openssl-libs:1:3.2.2-6.el9_5.1", + component_purl="pkg:rpm/redhat/openssl-libs@3.2.2-6.el9_5.1", + component_purl_type="rpm", + component_dependencies="", + ), + vulnerabilities={ + OSV_Vulnerability( + id="RHSA-2015:1115", + modified=datetime(2025, 6, 16, 6, 2, 31, 452618, timezone.utc), + ), + }, + ) diff --git a/backend/unittests/import_observations/parsers/osv/test_rpm_version.py b/backend/unittests/import_observations/parsers/osv/test_rpm_version.py new file mode 100644 index 000000000..9395edc7c --- /dev/null +++ b/backend/unittests/import_observations/parsers/osv/test_rpm_version.py @@ -0,0 +1,17 @@ +from application.import_observations.parsers.osv.rpm import RpmVersion +from unittests.base_test_case import BaseTestCase + + +class TestRpmVersion(BaseTestCase): + def test_parse_version(self): + rpm_ver = RpmVersion.from_string("1:21.0.6.0.7-1.el9") + self.assertEqual(rpm_ver.epoch, 1) + self.assertEqual(rpm_ver.version, "21.0.6.0.7") + self.assertEqual(rpm_ver.release, "1.el9") + + def test_comparisons(self): + rpm_ver1 = RpmVersion.from_string("1:21.0.6.0.7-1.el9") + rpm_ver2 = RpmVersion.from_string("1:21.0.1.0.12-2.el9") + self.assertTrue(rpm_ver1 > rpm_ver2) + self.assertTrue(rpm_ver2 < rpm_ver1) + self.assertFalse(rpm_ver1 == rpm_ver2) diff --git a/backend/unittests/import_observations/parsers/prowler/test_parser.py b/backend/unittests/import_observations/parsers/prowler/test_parser.py index 666003d5b..d2df324b9 100644 --- a/backend/unittests/import_observations/parsers/prowler/test_parser.py +++ b/backend/unittests/import_observations/parsers/prowler/test_parser.py @@ -1,6 +1,7 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.prowler.parser import ProwlerParser from application.import_observations.services.parser_detector import detect_parser @@ -11,9 +12,11 @@ def test_aws(self): with open(path.dirname(__file__) + "/files/prowler_aws.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("Prowler 3", parser.name) - self.assertTrue(isinstance(parser_instance, ProwlerParser)) + self.assertIsInstance(parser_instance, ProwlerParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("Prowler 3", scanner) self.assertEqual(1, len(observations)) observation = observations[0] @@ -39,9 +42,7 @@ def test_aws(self): observation.recommendation, ) self.assertEqual("AWS", observation.origin_cloud_provider) - self.assertEqual( - "ACCOUNT_ID", observation.origin_cloud_account_subscription_project - ) + self.assertEqual("ACCOUNT_ID", observation.origin_cloud_account_subscription_project) self.assertEqual("rds-instance-id", observation.origin_cloud_resource) self.assertEqual("AwsRdsDbInstance", observation.origin_cloud_resource_type) self.assertEqual( @@ -55,9 +56,11 @@ def test_azure(self): with open(path.dirname(__file__) + "/files/prowler_azure.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("Prowler 3", parser.name) - self.assertTrue(isinstance(parser_instance, ProwlerParser)) + self.assertIsInstance(parser_instance, ProwlerParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("Prowler 3", scanner) self.assertEqual(2, len(observations)) observation = observations[0] @@ -83,12 +86,8 @@ def test_azure(self): "Example_Subscription - XAKS", observation.origin_cloud_account_subscription_project, ) - self.assertEqual( - "Defender plan App Services", observation.origin_cloud_resource - ) - self.assertEqual( - "AzureDefenderPlan", observation.origin_cloud_resource_type - ) + self.assertEqual("Defender plan App Services", observation.origin_cloud_resource) + self.assertEqual("AzureDefenderPlan", observation.origin_cloud_resource_type) self.assertEqual("Result", observation.unsaved_evidences[0][0]) self.assertIn( "defender_ensure_defender_for_app_services_is_on", diff --git a/backend/unittests/import_observations/parsers/sarif/test_parser.py b/backend/unittests/import_observations/parsers/sarif/test_parser.py index ac852a4b0..89bbcd419 100644 --- a/backend/unittests/import_observations/parsers/sarif/test_parser.py +++ b/backend/unittests/import_observations/parsers/sarif/test_parser.py @@ -1,6 +1,7 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.sarif.parser import SARIFParser from application.import_observations.services.parser_detector import detect_parser @@ -11,9 +12,11 @@ def test_checkov(self): with open(path.dirname(__file__) + "/files/checkov.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("Checkov / 2.1.277", scanner) self.assertEqual(4, len(observations)) observation = observations[0] @@ -26,9 +29,7 @@ def test_checkov(self): """ self.assertEqual(description, observation.description) - self.assertEqual( - "frontend/docker/Dockerfile", observation.origin_source_file - ) + self.assertEqual("frontend/docker/Dockerfile", observation.origin_source_file) self.assertEqual(1, observation.origin_source_line_start) self.assertEqual(41, observation.origin_source_line_end) self.assertEqual(Severity.SEVERITY_HIGH, observation.parser_severity) @@ -39,17 +40,17 @@ def test_checkov(self): self.assertEqual("Rule", observation.unsaved_evidences[0][0]) self.assertIn('"id": "CKV_DOCKER_2"', observation.unsaved_evidences[0][1]) self.assertEqual("Result", observation.unsaved_evidences[1][0]) - self.assertIn( - '"ruleId": "CKV_DOCKER_2"', observation.unsaved_evidences[1][1] - ) + self.assertIn('"ruleId": "CKV_DOCKER_2"', observation.unsaved_evidences[1][1]) def test_eslint(self): with open(path.dirname(__file__) + "/files/eslint.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("ESLint / 8.25.0", scanner) self.assertEqual(5, len(observations)) observation = observations[0] @@ -87,9 +88,11 @@ def test_bandit(self): with open(path.dirname(__file__) + "/files/bandit.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("Bandit", scanner) self.assertEqual(2, len(observations)) observation = observations[0] @@ -105,9 +108,7 @@ def test_bandit(self): """ self.assertEqual(description, observation.description) - self.assertEqual( - "backend/config/settings/dist.py", observation.origin_source_file - ) + self.assertEqual("backend/config/settings/dist.py", observation.origin_source_file) self.assertEqual(14, observation.origin_source_line_start) self.assertIsNone(observation.origin_source_line_end) self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) @@ -124,9 +125,11 @@ def test_kics(self): with open(path.dirname(__file__) + "/files/kics.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("KICS / development", scanner) self.assertEqual(2, len(observations)) observation = observations[0] @@ -138,9 +141,7 @@ def test_kics(self): """ self.assertEqual(description, observation.description) - self.assertEqual( - "docker-compose-prod-postgres.yml", observation.origin_source_file - ) + self.assertEqual("docker-compose-prod-postgres.yml", observation.origin_source_file) self.assertEqual(34, observation.origin_source_line_start) self.assertIsNone(observation.origin_source_line_end) self.assertEqual(Severity.SEVERITY_HIGH, observation.parser_severity) @@ -163,16 +164,16 @@ def test_trivy_config(self): with open(path.dirname(__file__) + "/files/trivy_config.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("Trivy / 0.47.0", scanner) self.assertEqual(1, len(observations)) observation = observations[0] self.assertEqual("Trivy / 0.47.0", observation.scanner) - self.assertEqual( - "Ensure that the expiration date is set on all keys", observation.title - ) + self.assertEqual("Ensure that the expiration date is set on all keys", observation.title) description = """**Rule full description:** Expiration Date is an optional Key Vault Key behavior and is not set by default. Set when the resource will be become inactive. @@ -185,9 +186,7 @@ def test_trivy_config(self): """ self.assertEqual(description, observation.description) - self.assertEqual( - "modules/azure-cosmosdb/main.tf", observation.origin_source_file - ) + self.assertEqual("modules/azure-cosmosdb/main.tf", observation.origin_source_file) self.assertEqual(164, observation.origin_source_line_start) self.assertEqual(176, observation.origin_source_line_end) self.assertEqual(Severity.SEVERITY_MEDIUM, observation.parser_severity) @@ -210,9 +209,11 @@ def test_dependency_check(self): with open(path.dirname(__file__) + "/files/dependency-check.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("dependency-check / 8.0.1", scanner) self.assertEqual(3, len(observations)) observation = observations[0] @@ -245,9 +246,11 @@ def test_semgrep(self): with open(path.dirname(__file__) + "/files/semgrep.sarif") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SARIF", parser.name) - self.assertTrue(isinstance(parser_instance, SARIFParser)) + self.assertIsInstance(parser_instance, SARIFParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("semgrep / 1.16.0", scanner) self.assertEqual(4, len(observations)) observation = observations[0] diff --git a/backend/unittests/import_observations/parsers/secobserve/test_parser.py b/backend/unittests/import_observations/parsers/secobserve/test_parser.py index 5ad8e50c3..d4a06d8a6 100644 --- a/backend/unittests/import_observations/parsers/secobserve/test_parser.py +++ b/backend/unittests/import_observations/parsers/secobserve/test_parser.py @@ -1,6 +1,7 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.import_observations.parsers.secobserve.parser import SecObserveParser from application.import_observations.services.parser_detector import detect_parser @@ -10,20 +11,22 @@ def test_no_observation(self): with open(path.dirname(__file__) + "/files/no_observation.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SecObserve", parser.name) - self.assertTrue(isinstance(parser_instance, SecObserveParser)) + self.assertIsInstance(parser_instance, SecObserveParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("SecObserve", scanner) self.assertEqual(0, len(observations)) def test_multiple_observations(self): - with open( - path.dirname(__file__) + "/files/multiple_observations.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/multiple_observations.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SecObserve", parser.name) - self.assertTrue(isinstance(parser_instance, SecObserveParser)) + self.assertIsInstance(parser_instance, SecObserveParser) + + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) - observations = parser_instance.get_observations(data) + self.assertEqual("scanner_1", scanner) self.assertEqual(2, len(observations)) observation = observations[0] @@ -31,30 +34,18 @@ def test_multiple_observations(self): self.assertEqual("description_1", observation.description) self.assertEqual("recommendation_1", observation.recommendation) self.assertEqual("Critical", observation.parser_severity) - self.assertEqual( - "scanner_observation_id_1", observation.scanner_observation_id - ) + self.assertEqual("scanner_observation_id_1", observation.scanner_observation_id) self.assertEqual("vulnerability_id_1", observation.vulnerability_id) - self.assertEqual( - "origin_component_name_1", observation.origin_component_name - ) - self.assertEqual( - "origin_component_version_1", observation.origin_component_version - ) + self.assertEqual("origin_component_name_1", observation.origin_component_name) + self.assertEqual("origin_component_version_1", observation.origin_component_version) self.assertEqual( "origin_component_name_version_1", observation.origin_component_name_version, ) - self.assertEqual( - "origin_component_purl_1", observation.origin_component_purl - ) + self.assertEqual("origin_component_purl_1", observation.origin_component_purl) self.assertEqual("origin_component_cpe_1", observation.origin_component_cpe) - self.assertEqual( - "origin_docker_image_name_1", observation.origin_docker_image_name - ) - self.assertEqual( - "origin_docker_image_tag_1", observation.origin_docker_image_tag - ) + self.assertEqual("origin_docker_image_name_1", observation.origin_docker_image_name) + self.assertEqual("origin_docker_image_tag_1", observation.origin_docker_image_tag) self.assertEqual( "origin_docker_image_name_tag_1", observation.origin_docker_image_name_tag, diff --git a/backend/unittests/import_observations/parsers/spdx/files/multiple_observations.json b/backend/unittests/import_observations/parsers/spdx/files/multiple_observations.json index b214495f4..3e70764b5 100644 --- a/backend/unittests/import_observations/parsers/spdx/files/multiple_observations.json +++ b/backend/unittests/import_observations/parsers/spdx/files/multiple_observations.json @@ -2,8 +2,8 @@ "spdxVersion": "SPDX-2.3", "dataLicense": "CC0-1.0", "SPDXID": "SPDXRef-DOCUMENT", - "name": "maibornwolff/secobserve-backend:1.20.0", - "documentNamespace": "http://aquasecurity.github.io/trivy/container_image/maibornwolff/secobserve-backend:1.20.0-b3fdfb29-cea7-4576-875b-9cd70d2552ba", + "name": "ghcr.io/secobserve/secobserve-backend:1.20.0", + "documentNamespace": "http://aquasecurity.github.io/trivy/container_image/ghcr.io/secobserve/secobserve-backend:1.20.0-b3fdfb29-cea7-4576-875b-9cd70d2552ba", "creationInfo": { "creators": [ "Organization: aquasecurity", @@ -13,7 +13,7 @@ }, "packages": [ { - "name": "maibornwolff/secobserve-backend:1.20.0", + "name": "ghcr.io/secobserve/secobserve-backend:1.20.0", "SPDXID": "SPDXRef-ContainerImage-82bc9d1b632a36cd", "downloadLocation": "NONE", "filesAnalyzed": false, @@ -21,7 +21,7 @@ { "referenceCategory": "PACKAGE-MANAGER", "referenceType": "purl", - "referenceLocator": "pkg:oci/secobserve-backend@sha256%3Ace5416f0ca293e258ac5c7d8500eb41198064904379c6b033416772e07c3cf36?arch=amd64\u0026repository_url=index.docker.io%2Fmaibornwolff%2Fsecobserve-backend" + "referenceLocator": "pkg:oci/secobserve-backend@sha256%3Ace5416f0ca293e258ac5c7d8500eb41198064904379c6b033416772e07c3cf36?arch=amd64\u0026repository_url=index.docker.io%secoberve%2Fsecobserve-backend" } ], "primaryPackagePurpose": "CONTAINER", @@ -110,12 +110,6 @@ "annotationType": "OTHER", "comment": "ImageID: sha256:3fd45f090e0fcd15489139af1579fd54cceed2df2d1675041ea7ce59f4c0eecd" }, - { - "annotator": "Tool: trivy-0.58.0", - "annotationDate": "2024-12-21T15:02:40Z", - "annotationType": "OTHER", - "comment": "Labels:org.opencontainers.image.authors: stefan.fleckenstein@maibornwolff.de" - }, { "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", @@ -132,7 +126,7 @@ "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "Labels:org.opencontainers.image.documentation: https://maibornwolff.github.io/SecObserve" + "comment": "Labels:org.opencontainers.image.documentation: https://secobserve.github.io/SecObserve" }, { "annotator": "Tool: trivy-0.58.0", @@ -150,7 +144,7 @@ "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "Labels:org.opencontainers.image.source: https://github.com/MaibornWolff/SecObserve" + "comment": "Labels:org.opencontainers.image.source: https://github.com/SecObserve/SecObserve" }, { "annotator": "Tool: trivy-0.58.0", @@ -162,13 +156,13 @@ "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "Labels:org.opencontainers.image.url: https://github.com/MaibornWolff/SecObserve" + "comment": "Labels:org.opencontainers.image.url: https://github.com/SecObserve/SecObserve" }, { "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "Labels:org.opencontainers.image.vendor: MaibornWolff GmbH" + "comment": "Labels:org.opencontainers.image.vendor: SecObserve" }, { "annotator": "Tool: trivy-0.58.0", @@ -180,13 +174,13 @@ "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "RepoDigest: maibornwolff/secobserve-backend@sha256:ce5416f0ca293e258ac5c7d8500eb41198064904379c6b033416772e07c3cf36" + "comment": "RepoDigest: ghcr.io/secobserve/secobserve-backend@sha256:ce5416f0ca293e258ac5c7d8500eb41198064904379c6b033416772e07c3cf36" }, { "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "RepoTag: maibornwolff/secobserve-backend:1.20.0" + "comment": "RepoTag: ghcr.io/secobserve/secobserve-backend:1.20.0" }, { "annotator": "Tool: trivy-0.58.0", @@ -2602,7 +2596,7 @@ { "name": "libcom_err", "SPDXID": "SPDXRef-Package-202055e74aac337d", - "versionInfo": "1.47.0-r5", + "versionInfo": "1.48.0-r5", "supplier": "NOASSERTION", "downloadLocation": "NONE", "filesAnalyzed": false, @@ -2612,14 +2606,14 @@ "checksumValue": "b12095c140bb68ff17a3667e9bf3ba8ba9a52d00" } ], - "sourceInfo": "built package from: e2fsprogs 1.47.0-r5", + "sourceInfo": "built package from: e2fsprogs 1.48.0-r5", "licenseConcluded": "GPL-2.0-or-later AND LGPL-2.0-or-later AND BSD-3-Clause AND MIT", "licenseDeclared": "GPL-2.0-or-later AND LGPL-2.0-or-later AND BSD-3-Clause AND MIT", "externalRefs": [ { "referenceCategory": "PACKAGE-MANAGER", "referenceType": "purl", - "referenceLocator": "pkg:apk/alpine/libcom_err@1.47.0-r5?arch=x86_64\u0026distro=3.20.3" + "referenceLocator": "pkg:apk/alpine/libcom_err@1.48.0-r5?arch=x86_64\u0026distro=3.20.3" } ], "primaryPackagePurpose": "LIBRARY", @@ -2640,7 +2634,7 @@ "annotator": "Tool: trivy-0.58.0", "annotationDate": "2024-12-21T15:02:40Z", "annotationType": "OTHER", - "comment": "PkgID: libcom_err@1.47.0-r5" + "comment": "PkgID: libcom_err@1.48.0-r5" }, { "annotator": "Tool: trivy-0.58.0", diff --git a/backend/unittests/import_observations/parsers/spdx/files/no_observation.json b/backend/unittests/import_observations/parsers/spdx/files/no_observation.json index 45fb5b737..02ed1c9f5 100644 --- a/backend/unittests/import_observations/parsers/spdx/files/no_observation.json +++ b/backend/unittests/import_observations/parsers/spdx/files/no_observation.json @@ -2,8 +2,8 @@ "spdxVersion": "SPDX-2.3", "dataLicense": "CC0-1.0", "SPDXID": "SPDXRef-DOCUMENT", - "name": "maibornwolff/secobserve-backend:1.20.0", - "documentNamespace": "http://aquasecurity.github.io/trivy/container_image/maibornwolff/secobserve-backend:1.20.0-b3fdfb29-cea7-4576-875b-9cd70d2552ba", + "name": "ghcr.io/secobserve/secobserve-backend:1.20.0", + "documentNamespace": "http://aquasecurity.github.io/trivy/container_image/ghcr.io/secobserve/secobserve-backend:1.20.0-b3fdfb29-cea7-4576-875b-9cd70d2552ba", "creationInfo": { "creators": [ "Organization: aquasecurity", diff --git a/backend/unittests/import_observations/parsers/spdx/test_parser.py b/backend/unittests/import_observations/parsers/spdx/test_parser.py index d2f887b9b..283c30a7d 100644 --- a/backend/unittests/import_observations/parsers/spdx/test_parser.py +++ b/backend/unittests/import_observations/parsers/spdx/test_parser.py @@ -1,6 +1,7 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.import_observations.parsers.spdx.parser import SPDXParser from application.import_observations.services.parser_detector import detect_parser @@ -12,28 +13,31 @@ def test_no_observation(self): self.assertEqual("SPDX", parser.name) self.assertTrue(SPDXParser, isinstance(parser_instance, SPDXParser)) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) self.assertEqual(0, len(observations)) - license_components = parser_instance.get_license_components(data) + license_components, scanner = parser_instance.get_license_components(data) + + self.assertEqual("trivy-0.58.0", scanner) self.assertEqual(0, len(license_components)) def test_multiple_observations(self): - with open( - path.dirname(__file__) + "/files/multiple_observations.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/multiple_observations.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("SPDX", parser.name) self.assertTrue(SPDXParser, isinstance(parser_instance, SPDXParser)) - observations = parser_instance.get_observations(data) - license_components = parser_instance.get_license_components(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + license_components, scanner = parser_instance.get_license_components(data) + + self.assertEqual("trivy-0.58.0", scanner) self.assertEqual(0, len(observations)) self.assertEqual(124, len(license_components)) license_component = license_components[1] - self.assertEqual("", license_component.unsaved_license) + self.assertEqual([], license_component.unsaved_declared_licenses) + self.assertEqual([], license_component.unsaved_concluded_licenses) self.assertEqual(".python-rundeps", license_component.component_name) self.assertEqual("20241001.223602", license_component.component_version) self.assertEqual( @@ -41,7 +45,7 @@ def test_multiple_observations(self): license_component.component_purl, ) dependencies = """alpine:3.20.3 --> .python-rundeps:20241001.223602 -maibornwolff/secobserve-backend:1.20.0 --> alpine:3.20.3""" +ghcr.io/secobserve/secobserve-backend:1.20.0 --> alpine:3.20.3""" self.assertEqual(dependencies, license_component.component_dependencies) self.assertEqual(1, len(license_component.unsaved_evidences)) self.assertEqual("Package", license_component.unsaved_evidences[0][0]) @@ -51,14 +55,15 @@ def test_multiple_observations(self): ) license_component = license_components[2] - self.assertEqual("BSD-3-Clause", license_component.unsaved_license) + self.assertEqual([], license_component.unsaved_declared_licenses) + self.assertEqual(["BSD-3-Clause"], license_component.unsaved_concluded_licenses) self.assertEqual("Django", license_component.component_name) self.assertEqual("", license_component.component_version) self.assertEqual( "pkg:pypi/django@5.1.2", license_component.component_purl, ) - dependencies = """maibornwolff/secobserve-backend:1.20.0 --> Django""" + dependencies = """ghcr.io/secobserve/secobserve-backend:1.20.0 --> Django""" self.assertEqual(dependencies, license_component.component_dependencies) self.assertEqual(1, len(license_component.unsaved_evidences)) self.assertEqual("Package", license_component.unsaved_evidences[0][0]) @@ -68,19 +73,25 @@ def test_multiple_observations(self): ) license_component = license_components[3] - self.assertEqual("MIT", license_component.unsaved_license) + self.assertEqual(["MIT"], license_component.unsaved_declared_licenses) + self.assertEqual([], license_component.unsaved_concluded_licenses) self.assertEqual("PyJWT", license_component.component_name) self.assertEqual("2.9.0", license_component.component_version) license_component = license_components[4] - self.assertEqual("", license_component.unsaved_license) + self.assertEqual([], license_component.unsaved_declared_licenses) + self.assertEqual([], license_component.unsaved_concluded_licenses) self.assertEqual("PyMySQL", license_component.component_name) self.assertEqual("1.1.1", license_component.component_version) license_component = license_components[89] self.assertEqual( - "Artistic-1.0-Perl AND GPL-1.0-or-later", - license_component.unsaved_license, + ["Artistic-1.0-Perl AND GPL-1.0-or-later"], + license_component.unsaved_declared_licenses, + ) + self.assertEqual( + ["Artistic-1.0-Perl AND GPL-1.0-or-later"], + license_component.unsaved_concluded_licenses, ) self.assertEqual("perl", license_component.component_name) self.assertEqual("5.38.2-r0", license_component.component_version) @@ -92,7 +103,7 @@ def test_multiple_observations(self): alpine:3.20.3 --> mysql-client:10.11.8-r0 alpine:3.20.3 --> perl:5.38.2-r0 alpine:3.20.3 --> postgresql-libs:20241014.093848 -maibornwolff/secobserve-backend:1.20.0 --> alpine:3.20.3 +ghcr.io/secobserve/secobserve-backend:1.20.0 --> alpine:3.20.3 mariadb-client:10.11.8-r0 --> perl:5.38.2-r0 mysql-client:10.11.8-r0 --> mariadb-client:10.11.8-r0 postgresql-libs:20241014.093848 --> mysql-client:10.11.8-r0""" diff --git a/backend/unittests/import_observations/parsers/trivy_operator_prometheus/test_parser.py b/backend/unittests/import_observations/parsers/trivy_operator_prometheus/test_parser.py index 99752d011..2c8d6e387 100644 --- a/backend/unittests/import_observations/parsers/trivy_operator_prometheus/test_parser.py +++ b/backend/unittests/import_observations/parsers/trivy_operator_prometheus/test_parser.py @@ -5,6 +5,7 @@ import requests +from application.core.models import Product from application.import_observations.models import Api_Configuration from application.import_observations.parsers.trivy_operator_prometheus.parser import ( TrivyOperatorPrometheus, @@ -12,9 +13,7 @@ class TestTrivyOperatorPrometheusParser(TestCase): - @patch( - "application.import_observations.parsers.trivy_operator_prometheus.parser.requests" - ) + @patch("application.import_observations.parsers.trivy_operator_prometheus.parser.requests") def test_invalid_connection(self, mock_requests): parser = TrivyOperatorPrometheus() @@ -29,14 +28,10 @@ def test_invalid_connection(self, mock_requests): self.assertIn("Cannot access Prometheus", messages[0]) self.assertFalse(data) - @patch( - "application.import_observations.parsers.trivy_operator_prometheus.parser.requests" - ) + @patch("application.import_observations.parsers.trivy_operator_prometheus.parser.requests") def test_valid_connection(self, mock_requests): parser = TrivyOperatorPrometheus() - with open( - path.dirname(__file__) + "/files/trivy_vulnerability_id.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/trivy_vulnerability_id.json") as testfile: json_data = json.load(testfile) mock_response = MagicMock() @@ -61,9 +56,7 @@ def test_invalid_format_json(self): self.assertFalse(data) def test_no_prometheus_endpoint_json(self): - with open( - path.dirname(__file__) + "/files/no_prometheus_endpoint.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/no_prometheus_endpoint.json") as testfile: parser = TrivyOperatorPrometheus() check, messages, data = parser.check_format(testfile) @@ -73,9 +66,7 @@ def test_no_prometheus_endpoint_json(self): self.assertFalse(data) def test_invalid_metric_endpoint_json(self): - with open( - path.dirname(__file__) + "/files/invalid_metric_endpoint.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/invalid_metric_endpoint.json") as testfile: parser = TrivyOperatorPrometheus() check, messages, data = parser.check_format(testfile) @@ -85,15 +76,13 @@ def test_invalid_metric_endpoint_json(self): self.assertFalse(data) def test_compliance(self): - with open( - path.dirname(__file__) + "/files/trivy_compliance_info.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/trivy_compliance_info.json") as testfile: parser = TrivyOperatorPrometheus() - parser.api_configuration = Api_Configuration( - base_url="https://prometheus.example.com" - ) - observations = parser.get_observations(json.load(testfile)) + parser.api_configuration = Api_Configuration(base_url="https://prometheus.example.com") + observations, scanner = parser.get_observations(json.load(testfile), Product(name="product"), None) + + self.assertEqual("Trivy Operator Prometheus", scanner) self.assertEqual(1, len(observations)) self.assertEqual( @@ -111,9 +100,7 @@ def test_compliance(self): description, observations[0].description, ) - self.assertEqual( - "trivy-system", observations[0].origin_kubernetes_namespace - ) + self.assertEqual("trivy-system", observations[0].origin_kubernetes_namespace) self.assertEqual("", observations[0].origin_kubernetes_resource_type) self.assertEqual( "", @@ -121,15 +108,13 @@ def test_compliance(self): ) def test_configaudits(self): - with open( - path.dirname(__file__) + "/files/trivy_configaudits_info.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/trivy_configaudits_info.json") as testfile: parser = TrivyOperatorPrometheus() - parser.api_configuration = Api_Configuration( - base_url="https://prometheus.example.com" - ) - observations = parser.get_observations(json.load(testfile)) + parser.api_configuration = Api_Configuration(base_url="https://prometheus.example.com") + observations, scanner = parser.get_observations(json.load(testfile), Product(name="product"), None) + + self.assertEqual("Trivy Operator Prometheus", scanner) self.assertEqual(1, len(observations)) self.assertEqual( @@ -148,24 +133,20 @@ def test_configaudits(self): observations[0].description, ) self.assertEqual("kube-system", observations[0].origin_kubernetes_namespace) - self.assertEqual( - "DaemonSet", observations[0].origin_kubernetes_resource_type - ) + self.assertEqual("DaemonSet", observations[0].origin_kubernetes_resource_type) self.assertEqual( "kube-proxy", observations[0].origin_kubernetes_resource_name, ) def test_exposedsecrets(self): - with open( - path.dirname(__file__) + "/files/trivy_exposedsecrets_info.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/trivy_exposedsecrets_info.json") as testfile: parser = TrivyOperatorPrometheus() - parser.api_configuration = Api_Configuration( - base_url="https://prometheus.example.com" - ) - observations = parser.get_observations(json.load(testfile)) + parser.api_configuration = Api_Configuration(base_url="https://prometheus.example.com") + observations, scanner = parser.get_observations(json.load(testfile), Product(name="product"), None) + + self.assertEqual("Trivy Operator Prometheus", scanner) self.assertEqual(1, len(observations)) self.assertEqual("Asymmetric Private Key", observations[0].title) @@ -175,34 +156,27 @@ def test_exposedsecrets(self): observations[0].origin_docker_image_name, ) self.assertEqual("latest-no-vault", observations[0].origin_docker_image_tag) - self.assertEqual( - "/var/tmp/helpers/RSAprivatekey.pem", observations[0].origin_source_file - ) + self.assertEqual("/var/tmp/helpers/RSAprivatekey.pem", observations[0].origin_source_file) self.assertEqual("Trivy Operator", observations[0].scanner) self.assertEqual( "", observations[0].description, ) self.assertEqual("test", observations[0].origin_kubernetes_namespace) - self.assertEqual( - "ReplicaSet", observations[0].origin_kubernetes_resource_type - ) + self.assertEqual("ReplicaSet", observations[0].origin_kubernetes_resource_type) self.assertEqual( "wrongsecrets-67cd6df7d", observations[0].origin_kubernetes_resource_name, ) def test_rbacassessments(self): - with open( - path.dirname(__file__) + "/files/trivy_rbacassessments_info.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/trivy_rbacassessments_info.json") as testfile: parser = TrivyOperatorPrometheus() - parser.api_configuration = Api_Configuration( - base_url="https://prometheus.example.com" - ) - observations = parser.get_observations(json.load(testfile)) + parser.api_configuration = Api_Configuration(base_url="https://prometheus.example.com") + observations, scanner = parser.get_observations(json.load(testfile), Product(name="product"), None) + self.assertEqual("Trivy Operator Prometheus", scanner) self.assertEqual(2, len(observations)) self.assertEqual( @@ -242,9 +216,7 @@ def test_rbacassessments(self): description, observations[1].description, ) - self.assertEqual( - "kubernetes-dashboard", observations[1].origin_kubernetes_namespace - ) + self.assertEqual("kubernetes-dashboard", observations[1].origin_kubernetes_namespace) self.assertEqual("Role", observations[1].origin_kubernetes_resource_type) self.assertEqual( "kubernetes-dashboard", @@ -252,24 +224,20 @@ def test_rbacassessments(self): ) def test_vulnerabilities(self): - with open( - path.dirname(__file__) + "/files/trivy_vulnerability_id.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/trivy_vulnerability_id.json") as testfile: parser = TrivyOperatorPrometheus() - parser.api_configuration = Api_Configuration( - base_url="https://prometheus.example.com" - ) - observations = parser.get_observations(json.load(testfile)) + parser.api_configuration = Api_Configuration(base_url="https://prometheus.example.com") + observations, scanner = parser.get_observations(json.load(testfile), Product(name="product"), None) + + self.assertEqual("Trivy Operator Prometheus", scanner) self.assertEqual(2, len(observations)) self.assertEqual("CVE-2023-1111", observations[0].title) self.assertEqual("Medium", observations[0].parser_severity) self.assertEqual("6.1", observations[0].numerical_severity) self.assertEqual("CVE-2023-1111", observations[0].vulnerability_id) - self.assertEqual( - "registry.io/namespace/image", observations[0].origin_docker_image_name - ) + self.assertEqual("registry.io/namespace/image", observations[0].origin_docker_image_name) self.assertEqual("v0.26.0", observations[0].origin_docker_image_tag) self.assertEqual("6.1", observations[0].cvss3_score) self.assertEqual("recoure.org/x/net", observations[0].origin_component_name) @@ -283,9 +251,5 @@ def test_vulnerabilities(self): observations[0].description, ) self.assertEqual("default", observations[0].origin_kubernetes_namespace) - self.assertEqual( - "StatefulSet", observations[0].origin_kubernetes_resource_type - ) - self.assertEqual( - "recource_name", observations[0].origin_kubernetes_resource_name - ) + self.assertEqual("StatefulSet", observations[0].origin_kubernetes_resource_type) + self.assertEqual("recource_name", observations[0].origin_kubernetes_resource_name) diff --git a/backend/unittests/import_observations/parsers/zap/__init__.py b/backend/unittests/import_observations/parsers/zap/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/import_observations/parsers/owasp_zap/files/owasp_zap.json b/backend/unittests/import_observations/parsers/zap/files/owasp_zap.json similarity index 100% rename from backend/unittests/import_observations/parsers/owasp_zap/files/owasp_zap.json rename to backend/unittests/import_observations/parsers/zap/files/owasp_zap.json diff --git a/backend/unittests/import_observations/parsers/owasp_zap/test_parser.py b/backend/unittests/import_observations/parsers/zap/test_parser.py similarity index 85% rename from backend/unittests/import_observations/parsers/owasp_zap/test_parser.py rename to backend/unittests/import_observations/parsers/zap/test_parser.py index d39d90e87..a22402e4c 100644 --- a/backend/unittests/import_observations/parsers/owasp_zap/test_parser.py +++ b/backend/unittests/import_observations/parsers/zap/test_parser.py @@ -1,19 +1,22 @@ from os import path from unittest import TestCase +from application.core.models import Product from application.core.types import Severity from application.import_observations.parsers.zap.parser import ZAPParser from application.import_observations.services.parser_detector import detect_parser class TestZAPParserParser(TestCase): - def test_owasp_zap(self): + def test_zap(self): with open(path.dirname(__file__) + "/files/owasp_zap.json") as testfile: parser, parser_instance, data = detect_parser(testfile) self.assertEqual("ZAP", parser.name) - self.assertTrue(isinstance(parser_instance, ZAPParser)) + self.assertIsInstance(parser_instance, ZAPParser) - observations = parser_instance.get_observations(data) + observations, scanner = parser_instance.get_observations(data, Product(name="product"), None) + + self.assertEqual("OWASP ZAP / 2.12.0", scanner) self.assertEqual(5, len(observations)) observation = observations[0] @@ -46,9 +49,7 @@ def test_owasp_zap(self): ) self.assertEqual(Severity.SEVERITY_NONE, observation.parser_severity) self.assertEqual("200", observation.cwe) - self.assertEqual( - "https://example-backend.example.com", observation.origin_endpoint_url - ) + self.assertEqual("https://example-backend.example.com", observation.origin_endpoint_url) self.assertEqual("Alert", observation.unsaved_evidences[0][0]) self.assertIn( '"alert": "Information Disclosure - Suspicious Comments"', diff --git a/backend/unittests/import_observations/scanners/__init__.py b/backend/unittests/import_observations/scanners/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/import_observations/scanners/test_osv_scanner.py b/backend/unittests/import_observations/scanners/test_osv_scanner.py new file mode 100644 index 000000000..1595f0f28 --- /dev/null +++ b/backend/unittests/import_observations/scanners/test_osv_scanner.py @@ -0,0 +1,410 @@ +from datetime import datetime, timezone +from json import loads +from unittest.mock import call, patch + +from django.core.management import call_command + +from application.commons.models import Settings +from application.core.models import Branch, Observation, Product, Service +from application.import_observations.models import Parser +from application.import_observations.parsers.osv.parser import ( + OSV_Component, + OSV_Vulnerability, +) +from application.import_observations.scanners.osv_scanner import ( + scan_branch, + scan_license_components, + scan_product, +) +from application.import_observations.services.import_observations import ( + ImportParameters, +) +from application.licenses.models import License_Component +from unittests.base_test_case import BaseTestCase + + +class MockResponse: + def __init__(self, filename): + self.filename = filename + self.next_page_token_first_call = True + + def raise_for_status(self): + pass + + def json(self): + if self.filename == "osv_querybatch_next_page_token_first.json": + if self.next_page_token_first_call: + self.next_page_token_first_call = False + else: + self.filename = "osv_querybatch_next_page_token second.json" + + with open(f"unittests/import_observations/services/files/{self.filename}") as file: + return loads(file.read()) + + +class TestImportObservations(BaseTestCase): + def setUp(self): + call_command( + "loaddata", + [ + "unittests/fixtures/initial_license_data.json", + "unittests/fixtures/unittests_fixtures.json", + "unittests/fixtures/unittests_license_fixtures.json", + ], + ) + Parser.objects.create(name="OSV (Open Source Vulnerabilities)", type="SCA", source="Other") + + self.product = Product.objects.get(id=1) + + self.license_component = License_Component.objects.get(product=self.product) + self.license_component.component_purl = "pkg:pypi/django@5.1.8" + self.license_component.component_purl_type = "pypi" + self.license_component.save() + + self.branch_main = Branch.objects.get(product=self.product, name="db_branch_internal_main") + self.branch_dev = Branch.objects.get(product=self.product, name="db_branch_internal_dev") + + self.service_frontend = Service.objects.get(product=self.product, name="db_service_internal_frontend") + self.service_backend = Service.objects.get(product=self.product, name="db_service_internal_backend") + + @patch("application.import_observations.scanners.osv_scanner.scan_license_components") + def test_scan_product_no_branch_no_service( + self, + mock_scan_license_components, + ): + self.license_component.branch = None + self.license_component.origin_service = None + self.license_component.save() + + mock_scan_license_components.return_value = (0, 0, 0) + scan_product(self.product) + + expected_calls = [ + call([self.license_component], self.product, None, None), + call([], self.product, self.branch_dev, None), + call([], self.product, self.branch_main, None), + call([], self.product, None, self.service_backend), + call([], self.product, self.branch_dev, self.service_backend), + call([], self.product, self.branch_main, self.service_backend), + call([], self.product, None, self.service_frontend), + call([], self.product, self.branch_dev, self.service_frontend), + call([], self.product, self.branch_main, self.service_frontend), + ] + mock_scan_license_components.assert_has_calls(expected_calls) + + @patch("application.import_observations.scanners.osv_scanner.scan_license_components") + def test_scan_product_branch_no_service( + self, + mock_scan_license_components, + ): + self.license_component.branch = self.branch_dev + self.license_component.origin_service = None + self.license_component.save() + + mock_scan_license_components.return_value = (0, 0, 0) + scan_product(self.product) + + expected_calls = [ + call([], self.product, None, None), + call([self.license_component], self.product, self.branch_dev, None), + call([], self.product, self.branch_main, None), + call([], self.product, None, self.service_backend), + call([], self.product, self.branch_dev, self.service_backend), + call([], self.product, self.branch_main, self.service_backend), + call([], self.product, None, self.service_frontend), + call([], self.product, self.branch_dev, self.service_frontend), + call([], self.product, self.branch_main, self.service_frontend), + ] + mock_scan_license_components.assert_has_calls(expected_calls) + + @patch("application.import_observations.scanners.osv_scanner.scan_license_components") + def test_scan_product_no_branch_but_service( + self, + mock_scan_license_components, + ): + self.license_component.branch = None + self.license_component.origin_service = self.service_frontend + self.license_component.save() + + mock_scan_license_components.return_value = (0, 0, 0) + scan_product(self.product) + + expected_calls = [ + call([], self.product, None, None), + call([], self.product, self.branch_dev, None), + call([], self.product, self.branch_main, None), + call([], self.product, None, self.service_backend), + call([], self.product, self.branch_dev, self.service_backend), + call([], self.product, self.branch_main, self.service_backend), + call([self.license_component], self.product, None, self.service_frontend), + call([], self.product, self.branch_dev, self.service_frontend), + call([], self.product, self.branch_main, self.service_frontend), + ] + mock_scan_license_components.assert_has_calls(expected_calls) + + @patch("application.import_observations.scanners.osv_scanner.scan_license_components") + def test_scan_product_branch_and_service( + self, + mock_scan_license_components, + ): + self.license_component.branch = self.branch_main + self.license_component.origin_service = self.service_frontend + self.license_component.save() + + mock_scan_license_components.return_value = (0, 0, 0) + scan_product(self.product) + + expected_calls = [ + call([], self.product, None, None), + call([], self.product, self.branch_dev, None), + call([], self.product, self.branch_main, None), + call([], self.product, None, self.service_backend), + call([], self.product, self.branch_dev, self.service_backend), + call([], self.product, self.branch_main, self.service_backend), + call([], self.product, None, self.service_frontend), + call([], self.product, self.branch_dev, self.service_frontend), + call([self.license_component], self.product, self.branch_main, self.service_frontend), + ] + mock_scan_license_components.assert_has_calls(expected_calls) + + @patch("application.import_observations.scanners.osv_scanner.scan_license_components") + def test_scan_branch( + self, + mock_scan_license_components, + ): + self.license_component.branch = self.branch_main + self.license_component.origin_service = self.service_frontend + self.license_component.save() + + mock_scan_license_components.return_value = (0, 0, 0) + scan_branch(self.branch_main) + + expected_calls = [ + call([], self.product, self.branch_main, None), + call([], self.product, self.branch_main, self.service_backend), + call([self.license_component], self.product, self.branch_main, self.service_frontend), + ] + mock_scan_license_components.assert_has_calls(expected_calls) + + @patch("requests.post") + @patch("application.import_observations.scanners.osv_scanner.OSVParser.get_observations") + @patch("application.import_observations.scanners.osv_scanner._process_data") + def test_scan_license_components_no_license_components( + self, mock_process_data, mock_get_observations, mock_requests_post + ): + product = Product.objects.get(id=1) + + numbers = scan_license_components([], product, None, None) + + self.assertEqual((0, 0, 0), numbers) + mock_requests_post.assert_not_called() + mock_get_observations.assert_not_called() + mock_process_data.assert_not_called() + + @patch("requests.post") + @patch("application.import_observations.scanners.osv_scanner.OSVParser.get_observations") + @patch("application.import_observations.scanners.osv_scanner._process_data") + @patch("application.import_observations.scanners.osv_scanner.Vulnerability_Check.objects.update_or_create") + def test_scan_license_components_error_length( + self, + mock_vulnerability_check, + mock_process_data, + mock_get_observations, + mock_requests_post, + ): + license_components: list[License_Component] = list(License_Component.objects.all()) + license_components[0].component_purl = "pkg:pypi/django@4.2.11" + license_components[1].component_purl = "pkg:golang/golang.org/x/net@v0.25.1-0.20240603202750-6249541f2a6c" + product = Product.objects.get(id=1) + branch = Branch.objects.get(id=1) + + response = MockResponse("osv_querybatch_error_length.json") + mock_requests_post.return_value = response + + with self.assertRaises(Exception) as e: + scan_license_components(license_components, product, branch, None) + + self.assertEqual( + "Number of results is different than number of components", + str(e.exception), + ) + + mock_requests_post.assert_called_with( + url="https://api.osv.dev/v1/querybatch", + data='{"queries": [{"package": {"purl": "pkg:pypi/django@4.2.11"}, "page_token": null}, {"package": {"purl": "pkg:golang/golang.org/x/net@v0.25.1-0.20240603202750-6249541f2a6c"}, "page_token": null}]}', + timeout=300, + ) + + mock_get_observations.assert_not_called() + mock_process_data.assert_not_called() + mock_vulnerability_check.assert_not_called() + + @patch("requests.post") + @patch("application.import_observations.scanners.osv_scanner.OSVParser.get_observations") + @patch("application.import_observations.scanners.osv_scanner._process_data") + @patch("application.import_observations.scanners.osv_scanner.Vulnerability_Check.objects.update_or_create") + def test_scan_license_components_error_next_page_token( + self, + mock_vulnerability_check, + mock_process_data, + mock_get_observations, + mock_requests_post, + ): + license_components: list[License_Component] = list(License_Component.objects.all()) + license_components[0].component_purl = "pkg:pypi/django@4.2.11" + license_components[1].component_purl = "pkg:golang/golang.org/x/net@v0.25.1-0.20240603202750-6249541f2a6c" + product = Product.objects.get(id=1) + branch = Branch.objects.get(id=1) + + response = MockResponse("osv_querybatch_next_page_token_first.json") + mock_requests_post.return_value = response + mock_get_observations.return_value = [], "OSV (Open Source Vulnerabilities)" + + scan_license_components(license_components, product, branch, None) + + mock_requests_post.assert_has_calls( + [ + call( + url="https://api.osv.dev/v1/querybatch", + data='{"queries": [{"package": {"purl": "pkg:pypi/django@4.2.11"}, "page_token": null}, {"package": {"purl": "pkg:golang/golang.org/x/net@v0.25.1-0.20240603202750-6249541f2a6c"}, "page_token": null}]}', + timeout=300, + ), + call( + url="https://api.osv.dev/v1/querybatch", + data='{"queries": [{"package": {"purl": "pkg:pypi/django@4.2.11"}, "page_token": "token for query 1"}]}', + timeout=300, + ), + ] + ) + + mock_get_observations.assert_has_calls( + [ + call( + [ + OSV_Component( + license_component=license_components[0], + vulnerabilities={ + OSV_Vulnerability( + id="GHSA-795c-9xpc-xw6g", + modified=datetime(2024, 8, 7, 20, 1, 58, 452618, tzinfo=timezone.utc), + ), + OSV_Vulnerability( + id="GHSA-5hgc-2vfp-mqvc", + modified=datetime(2024, 10, 30, 19, 23, 43, 662562, tzinfo=timezone.utc), + ), + }, + ), + OSV_Component( + license_component=license_components[1], + vulnerabilities={ + OSV_Vulnerability( + id="GO-2024-3333", modified=datetime(2024, 12, 20, 20, 37, 27, tzinfo=timezone.utc) + ) + }, + ), + OSV_Component( + license_component=license_components[0], + vulnerabilities={ + OSV_Vulnerability( + id="CVE-2025-00001", + modified=datetime(2024, 10, 30, 19, 23, 43, 662562, tzinfo=timezone.utc), + ) + }, + ), + ], + product, + branch, + ) + ] + ) + mock_process_data.assert_called_once() + mock_vulnerability_check.assert_called_once() + + @patch("requests.post") + @patch("application.import_observations.scanners.osv_scanner.OSVParser.get_observations") + @patch("application.import_observations.scanners.osv_scanner._process_data") + @patch("application.import_observations.scanners.osv_scanner.Vulnerability_Check.objects.update_or_create") + def test_scan_license_components_success( + self, + mock_vulnerability_check, + mock_process_data, + mock_get_observations, + mock_requests_post, + ): + license_components: list[License_Component] = list(License_Component.objects.all()) + license_components[0].component_purl = "pkg:pypi/django@4.2.11" + license_components[1].component_purl = "pkg:golang/golang.org/x/net@v0.25.1-0.20240603202750-6249541f2a6c" + product = Product.objects.get(id=1) + branch = Branch.objects.get(id=1) + service = Service.objects.get(id=1) + observation = Observation.objects.get(id=1) + + response = MockResponse("osv_querybatch_success.json") + mock_requests_post.return_value = response + mock_process_data.return_value = (1, 2, 3) + mock_get_observations.return_value = [observation], "OSV (Open Source Vulnerabilities)" + + numbers = scan_license_components(license_components, product, branch, service) + + self.assertEqual((1, 2, 3), numbers) + + mock_requests_post.assert_called_with( + url="https://api.osv.dev/v1/querybatch", + data='{"queries": [{"package": {"purl": "pkg:pypi/django@4.2.11"}, "page_token": null}, {"package": {"purl": "pkg:golang/golang.org/x/net@v0.25.1-0.20240603202750-6249541f2a6c"}, "page_token": null}]}', + timeout=300, + ) + + osv_components = [ + OSV_Component( + license_component=license_components[0], + vulnerabilities={ + OSV_Vulnerability( + id="GHSA-795c-9xpc-xw6g", + modified=datetime(2024, 8, 7, 20, 1, 58, 452618, timezone.utc), + ), + OSV_Vulnerability( + id="GHSA-5hgc-2vfp-mqvc", + modified=datetime(2024, 10, 30, 19, 23, 43, 662562, timezone.utc), + ), + }, + ), + OSV_Component( + license_component=license_components[1], + vulnerabilities={ + OSV_Vulnerability( + id="GO-2024-3333", + modified=datetime(2024, 12, 20, 20, 37, 27, 0, timezone.utc), + ), + }, + ), + ] + + mock_get_observations.assert_called_with(osv_components, product, branch) + mock_process_data.assert_called_with( + ImportParameters( + product=product, + branch=branch, + service=service, + parser=Parser.objects.get(name="OSV (Open Source Vulnerabilities)"), + filename="", + api_configuration_name="", + docker_image_name_tag="", + endpoint_url="", + kubernetes_cluster="", + imported_observations=[observation], + ), + Settings.load(), + ) + mock_vulnerability_check.assert_called_with( + product=product, + branch=branch, + service=service, + filename="", + api_configuration_name="", + defaults={ + "last_import_observations_new": 1, + "last_import_observations_updated": 2, + "last_import_observations_resolved": 3, + "scanner": "OSV (Open Source Vulnerabilities)", + }, + ) diff --git a/backend/unittests/import_observations/services/files/osv_querybatch_error_length.json b/backend/unittests/import_observations/services/files/osv_querybatch_error_length.json new file mode 100644 index 000000000..e75815aad --- /dev/null +++ b/backend/unittests/import_observations/services/files/osv_querybatch_error_length.json @@ -0,0 +1,16 @@ +{ + "results": [ + { + "vulns": [ + { + "id": "GHSA-5hgc-2vfp-mqvc", + "modified": "2024-10-30T19:23:43.662562Z" + }, + { + "id": "GHSA-795c-9xpc-xw6g", + "modified": "2024-08-07T20:01:58.452618Z" + } + ] + } + ] +} diff --git a/backend/unittests/import_observations/services/files/osv_querybatch_next_page_token second.json b/backend/unittests/import_observations/services/files/osv_querybatch_next_page_token second.json new file mode 100644 index 000000000..196feb5be --- /dev/null +++ b/backend/unittests/import_observations/services/files/osv_querybatch_next_page_token second.json @@ -0,0 +1,12 @@ +{ + "results": [ + { + "vulns": [ + { + "id": "CVE-2025-00001", + "modified": "2024-10-30T19:23:43.662562Z" + } + ] + } + ] +} diff --git a/backend/unittests/import_observations/services/files/osv_querybatch_next_page_token_first.json b/backend/unittests/import_observations/services/files/osv_querybatch_next_page_token_first.json new file mode 100644 index 000000000..60cab10da --- /dev/null +++ b/backend/unittests/import_observations/services/files/osv_querybatch_next_page_token_first.json @@ -0,0 +1,24 @@ +{ + "results": [ + { + "vulns": [ + { + "id": "GHSA-5hgc-2vfp-mqvc", + "modified": "2024-10-30T19:23:43.662562Z" + }, + { + "id": "GHSA-795c-9xpc-xw6g", + "modified": "2024-08-07T20:01:58.452618Z" + } + ], + "next_page_token": "token for query 1" }, + { + "vulns": [ + { + "id": "GO-2024-3333", + "modified": "2024-12-20T20:37:27Z" + } + ] + } + ] +} diff --git a/backend/unittests/import_observations/services/files/osv_querybatch_success.json b/backend/unittests/import_observations/services/files/osv_querybatch_success.json new file mode 100644 index 000000000..c6ab3ffe1 --- /dev/null +++ b/backend/unittests/import_observations/services/files/osv_querybatch_success.json @@ -0,0 +1,24 @@ +{ + "results": [ + { + "vulns": [ + { + "id": "GHSA-5hgc-2vfp-mqvc", + "modified": "2024-10-30T19:23:43.662562Z" + }, + { + "id": "GHSA-795c-9xpc-xw6g", + "modified": "2024-08-07T20:01:58.452618Z" + } + ] + }, + { + "vulns": [ + { + "id": "GO-2024-3333", + "modified": "2024-12-20T20:37:27Z" + } + ] + } + ] +} diff --git a/backend/unittests/import_observations/services/test_import_observations.py b/backend/unittests/import_observations/services/test_import_observations.py index 3a01099a8..dafd31c56 100644 --- a/backend/unittests/import_observations/services/test_import_observations.py +++ b/backend/unittests/import_observations/services/test_import_observations.py @@ -2,6 +2,7 @@ from django.core.files.base import File from django.core.management import call_command +from rest_framework.exceptions import ValidationError from application.access_control.models import User from application.commons.models import Settings @@ -12,11 +13,21 @@ Observation_Log, Product, Reference, + Service, ) from application.core.types import Severity, Status -from application.import_observations.models import Parser, Vulnerability_Check +from application.import_observations.models import ( + Api_Configuration, + Parser, + Vulnerability_Check, +) +from application.import_observations.parsers.dependency_track.parser import ( + DependencyTrack, +) from application.import_observations.services.import_observations import ( + ApiImportParameters, FileUploadParameters, + api_import_observations, file_upload_observations, ) from application.licenses.models import ( @@ -30,7 +41,7 @@ from unittests.base_test_case import BaseTestCase -class TestImportObservations(BaseTestCase): +class TestFileUploadObservations(BaseTestCase): def setUp(self): Observation.objects.all().delete() Observation_Log.objects.all().delete() @@ -40,31 +51,19 @@ def setUp(self): super().setUp() @patch("application.commons.services.global_request.get_current_request") - @patch( - "application.import_observations.services.import_observations.check_security_gate" - ) - @patch( - "application.import_observations.services.import_observations.set_repository_default_branch" - ) - @patch( - "application.import_observations.services.import_observations.push_observations_to_issue_tracker" - ) - @patch( - "application.import_observations.services.import_observations.epss_apply_observation" - ) - @patch( - "application.import_observations.services.import_observations.find_potential_duplicates" - ) - @patch( - "application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation" - ) + @patch("application.import_observations.services.import_observations.check_security_gate") + @patch("application.import_observations.services.import_observations.push_observations_to_issue_tracker") + @patch("application.import_observations.services.import_observations.apply_epss") + @patch("application.import_observations.services.import_observations.apply_exploit_information") + @patch("application.import_observations.services.import_observations.find_potential_duplicates") + @patch("application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation") def test_file_upload_observations_with_branch( self, mock_apply_vex_statements_for_observation, mock_find_potential_duplicates, - mock_epss_apply_observation, + mock_apply_exploit_information, + mock_apply_epss, mock_push_observations_to_issue_tracker, - mock_set_repository_default_branch, mock_check_security_gate, mock_get_current_request, ): @@ -80,70 +79,85 @@ def test_file_upload_observations_with_branch( product = Product.objects.get(id=1) mock_check_security_gate.assert_has_calls([call(product), call(product)]) - mock_set_repository_default_branch.assert_has_calls( - [call(product), call(product)] - ) self.assertEqual(mock_push_observations_to_issue_tracker.call_count, 2) - self.assertEqual(mock_epss_apply_observation.call_count, 4) + self.assertEqual(mock_apply_epss.call_count, 4) + self.assertEqual(mock_apply_exploit_information.call_count, 4) self.assertEqual(mock_find_potential_duplicates.call_count, 2) self.assertEqual(mock_apply_vex_statements_for_observation.call_count, 4) @patch("application.commons.services.global_request.get_current_request") - @patch( - "application.import_observations.services.import_observations.check_security_gate" - ) - @patch( - "application.import_observations.services.import_observations.set_repository_default_branch" - ) - @patch( - "application.import_observations.services.import_observations.push_observations_to_issue_tracker" - ) - @patch( - "application.import_observations.services.import_observations.epss_apply_observation" - ) - @patch( - "application.import_observations.services.import_observations.find_potential_duplicates" - ) - @patch( - "application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation" - ) + @patch("application.import_observations.services.import_observations.check_security_gate") + @patch("application.import_observations.services.import_observations.push_observations_to_issue_tracker") + @patch("application.import_observations.services.import_observations.apply_epss") + @patch("application.import_observations.services.import_observations.apply_exploit_information") + @patch("application.import_observations.services.import_observations.find_potential_duplicates") + @patch("application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation") def test_file_upload_observations_without_branch( self, mock_apply_vex_statements_for_observation, mock_find_potential_duplicates, - mock_epss_apply_observation, + mock_apply_exploit_information, + mock_apply_epss, mock_push_observations_to_issue_tracker, - mock_set_repository_default_branch, mock_check_security_gate, mock_get_current_request, ): mock_get_current_request.return_value = RequestMock(User.objects.get(id=1)) + product = Product.objects.get(id=1) + product.repository_default_branch = None + product.save() + self._file_upload_observations(None, None, None, None, None) - product = Product.objects.get(id=1) mock_check_security_gate.assert_has_calls([call(product), call(product)]) - mock_set_repository_default_branch.assert_has_calls( - [call(product), call(product)] - ) self.assertEqual(mock_push_observations_to_issue_tracker.call_count, 2) - self.assertEqual(mock_epss_apply_observation.call_count, 4) + self.assertEqual(mock_apply_epss.call_count, 4) + self.assertEqual(mock_apply_exploit_information.call_count, 4) self.assertEqual(mock_find_potential_duplicates.call_count, 2) self.assertEqual(mock_apply_vex_statements_for_observation.call_count, 4) - def _file_upload_observations( - self, branch, service, docker_image_name_tag, endpoint_url, kubernetes_cluster + @patch("application.commons.services.global_request.get_current_request") + @patch("application.import_observations.services.import_observations.check_security_gate") + @patch("application.import_observations.services.import_observations.push_observations_to_issue_tracker") + @patch("application.import_observations.services.import_observations.apply_epss") + @patch("application.import_observations.services.import_observations.apply_exploit_information") + @patch("application.import_observations.services.import_observations.find_potential_duplicates") + @patch("application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation") + def test_file_upload_observations_different_branch( + self, + mock_apply_vex_statements_for_observation, + mock_find_potential_duplicates, + mock_apply_exploit_information, + mock_apply_epss, + mock_push_observations_to_issue_tracker, + mock_check_security_gate, + mock_get_current_request, ): + mock_get_current_request.return_value = RequestMock(User.objects.get(id=1)) + product = Product.objects.get(id=1) + + self._file_upload_observations(None, None, None, None, None) + + mock_check_security_gate.assert_not_called() + self.assertEqual(mock_push_observations_to_issue_tracker.call_count, 2) + self.assertEqual(mock_apply_epss.call_count, 4) + self.assertEqual(mock_apply_exploit_information.call_count, 4) + self.assertEqual(mock_find_potential_duplicates.call_count, 2) + self.assertEqual(mock_apply_vex_statements_for_observation.call_count, 4) + + def _file_upload_observations(self, branch, service_name, docker_image_name_tag, endpoint_url, kubernetes_cluster): # --- First import --- file_upload_parameters = FileUploadParameters( product=Product.objects.get(id=1), branch=branch, file=File(open("unittests/fixtures/data_1/bandit.sarif", "r")), - service=service, + service_name=service_name, docker_image_name_tag=docker_image_name_tag, endpoint_url=endpoint_url, kubernetes_cluster=kubernetes_cluster, suppress_licenses=False, + sbom=False, ) ( @@ -169,14 +183,16 @@ def _file_upload_observations( self.assertEqual(observations[0].product, product) self.assertEqual(observations[0].branch, branch) - if service: - self.assertEqual(observations[0].origin_service_name, service) + service = None + if service_name: + service = Service.objects.get(product=product, name=service_name) + self.assertEqual(observations[0].origin_service, service) + self.assertEqual(observations[0].origin_service_name, service_name) else: + self.assertIsNone(observations[0].origin_service) self.assertEqual(observations[0].origin_service_name, "") if docker_image_name_tag: - self.assertEqual( - observations[0].origin_docker_image_name_tag, docker_image_name_tag - ) + self.assertEqual(observations[0].origin_docker_image_name_tag, docker_image_name_tag) else: self.assertEqual(observations[0].origin_docker_image_name_tag, "") if endpoint_url: @@ -184,9 +200,7 @@ def _file_upload_observations( else: self.assertEqual(observations[0].origin_endpoint_url, "") if kubernetes_cluster: - self.assertEqual( - observations[0].origin_kubernetes_cluster, kubernetes_cluster - ) + self.assertEqual(observations[0].origin_kubernetes_cluster, kubernetes_cluster) else: self.assertEqual(observations[0].origin_kubernetes_cluster, "") @@ -194,9 +208,7 @@ def _file_upload_observations( self.assertEqual(observations[1].current_status, Status.STATUS_OPEN) self.assertEqual(observations[2].current_status, Status.STATUS_NOT_AFFECTED) - observation_logs = Observation_Log.objects.filter( - observation__product=1 - ).order_by("id") + observation_logs = Observation_Log.objects.filter(observation__product=1).order_by("id") self.assertEqual(len(observation_logs), 4) self.assertEqual(observation_logs[0].observation, observations[0]) @@ -215,9 +227,7 @@ def _file_upload_observations( "Updated by product rule db_product_rule_import", ) - references = Reference.objects.filter(observation__product=product).order_by( - "id" - ) + references = Reference.objects.filter(observation__product=product).order_by("id") self.assertEqual(len(references), 3) self.assertEqual(references[0].observation, observations[0]) @@ -234,6 +244,7 @@ def _file_upload_observations( self.assertEqual(vulnerability_checks[0].product, product) self.assertEqual(vulnerability_checks[0].branch, branch) + self.assertEqual(vulnerability_checks[0].service, service) self.assertEqual(vulnerability_checks[0].filename, "bandit.sarif") self.assertEqual(vulnerability_checks[0].api_configuration_name, "") self.assertEqual(vulnerability_checks[0].scanner, "Bandit") @@ -247,11 +258,12 @@ def _file_upload_observations( product=Product.objects.get(id=1), branch=branch, file=File(open("unittests/fixtures/data_2/bandit.sarif", "r")), - service=service, + service_name=service_name, docker_image_name_tag=docker_image_name_tag, endpoint_url=endpoint_url, kubernetes_cluster=kubernetes_cluster, suppress_licenses=False, + sbom=False, ) ( @@ -277,9 +289,7 @@ def _file_upload_observations( self.assertEqual(observations[1].current_status, Status.STATUS_OPEN) self.assertEqual(observations[2].current_status, Status.STATUS_RESOLVED) - observation_logs = Observation_Log.objects.filter( - observation__product=1 - ).order_by("id") + observation_logs = Observation_Log.objects.filter(observation__product=1).order_by("id") self.assertEqual(len(observation_logs), 7) self.assertEqual(observation_logs[4].observation, observations[1]) @@ -290,20 +300,14 @@ def _file_upload_observations( self.assertEqual(observation_logs[5].observation, observations[0]) self.assertEqual(observation_logs[5].severity, "") self.assertEqual(observation_logs[5].status, Status.STATUS_RESOLVED) - self.assertEqual( - observation_logs[5].comment, "Observation not found in latest scan" - ) + self.assertEqual(observation_logs[5].comment, "Observation not found in latest scan") self.assertEqual(observation_logs[6].observation, observations[2]) self.assertEqual(observation_logs[6].severity, "") self.assertEqual(observation_logs[6].status, Status.STATUS_RESOLVED) - self.assertEqual( - observation_logs[6].comment, "Observation not found in latest scan" - ) + self.assertEqual(observation_logs[6].comment, "Observation not found in latest scan") - references = Reference.objects.filter(observation__product=product).order_by( - "id" - ) + references = Reference.objects.filter(observation__product=product).order_by("id") self.assertEqual(len(references), 3) evidences = Evidence.objects.filter(observation__product=product).order_by("id") @@ -314,6 +318,7 @@ def _file_upload_observations( self.assertEqual(vulnerability_checks[0].product, product) self.assertEqual(vulnerability_checks[0].branch, branch) + self.assertEqual(vulnerability_checks[0].service, service) self.assertEqual(vulnerability_checks[0].filename, "bandit.sarif") self.assertEqual(vulnerability_checks[0].api_configuration_name, "") self.assertEqual(vulnerability_checks[0].scanner, "Bandit") @@ -322,39 +327,23 @@ def _file_upload_observations( self.assertEqual(vulnerability_checks[0].last_import_observations_resolved, 1) @patch("application.commons.services.global_request.get_current_request") - @patch( - "application.import_observations.services.import_observations.check_security_gate" - ) - @patch( - "application.import_observations.services.import_observations.set_repository_default_branch" - ) - @patch( - "application.import_observations.services.import_observations.push_observations_to_issue_tracker" - ) - @patch( - "application.import_observations.services.import_observations.epss_apply_observation" - ) - @patch( - "application.import_observations.services.import_observations.find_potential_duplicates" - ) - @patch( - "application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation" - ) - @patch( - "application.import_observations.parsers.cyclone_dx.parser.CycloneDXParser.get_license_components" - ) - @patch( - "application.import_observations.services.import_observations.process_license_components" - ) + @patch("application.import_observations.services.import_observations.check_security_gate") + @patch("application.import_observations.services.import_observations.push_observations_to_issue_tracker") + @patch("application.import_observations.services.import_observations.apply_epss") + @patch("application.import_observations.services.import_observations.apply_exploit_information") + @patch("application.import_observations.services.import_observations.find_potential_duplicates") + @patch("application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation") + @patch("application.import_observations.parsers.cyclone_dx.parser.CycloneDXParser.get_license_components") + @patch("application.import_observations.services.import_observations.process_license_components") def test_file_upload_licenses_feature_false( self, mock_process_license_components, mock_get_license_components, mock_apply_vex_statements_for_observation, mock_find_potential_duplicates, - mock_epss_apply_observation, + mock_apply_exploit_information, + mock_apply_epss, mock_push_observations_to_issue_tracker, - mock_set_repository_default_branch, mock_check_security_gate, mock_get_current_request, ): @@ -371,6 +360,7 @@ def test_file_upload_licenses_feature_false( "test_endpoint_url", "test_kubernetes_cluster", suppress_licenses=False, + sbom=True, ) self.assertEqual(mock_get_license_components.call_count, 0) @@ -381,39 +371,23 @@ def test_file_upload_licenses_feature_false( settings.save() @patch("application.commons.services.global_request.get_current_request") - @patch( - "application.import_observations.services.import_observations.check_security_gate" - ) - @patch( - "application.import_observations.services.import_observations.set_repository_default_branch" - ) - @patch( - "application.import_observations.services.import_observations.push_observations_to_issue_tracker" - ) - @patch( - "application.import_observations.services.import_observations.epss_apply_observation" - ) - @patch( - "application.import_observations.services.import_observations.find_potential_duplicates" - ) - @patch( - "application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation" - ) - @patch( - "application.import_observations.parsers.cyclone_dx.parser.CycloneDXParser.get_license_components" - ) - @patch( - "application.import_observations.services.import_observations.process_license_components" - ) + @patch("application.import_observations.services.import_observations.check_security_gate") + @patch("application.import_observations.services.import_observations.push_observations_to_issue_tracker") + @patch("application.import_observations.services.import_observations.apply_epss") + @patch("application.import_observations.services.import_observations.apply_exploit_information") + @patch("application.import_observations.services.import_observations.find_potential_duplicates") + @patch("application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation") + @patch("application.import_observations.parsers.cyclone_dx.parser.CycloneDXParser.get_license_components") + @patch("application.import_observations.services.import_observations.process_license_components") def test_file_upload_suppress_licenses_true( self, mock_process_license_components, mock_get_license_components, mock_apply_vex_statements_for_observation, mock_find_potential_duplicates, - mock_epss_apply_observation, + mock_apply_exploit_information, + mock_apply_epss, mock_push_observations_to_issue_tracker, - mock_set_repository_default_branch, mock_check_security_gate, mock_get_current_request, ): @@ -426,37 +400,26 @@ def test_file_upload_suppress_licenses_true( "test_endpoint_url", "test_kubernetes_cluster", suppress_licenses=True, + sbom=False, ) self.assertEqual(mock_get_license_components.call_count, 0) self.assertEqual(mock_process_license_components.call_count, 0) @patch("application.commons.services.global_request.get_current_request") - @patch( - "application.import_observations.services.import_observations.check_security_gate" - ) - @patch( - "application.import_observations.services.import_observations.set_repository_default_branch" - ) - @patch( - "application.import_observations.services.import_observations.push_observations_to_issue_tracker" - ) - @patch( - "application.import_observations.services.import_observations.epss_apply_observation" - ) - @patch( - "application.import_observations.services.import_observations.find_potential_duplicates" - ) - @patch( - "application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation" - ) + @patch("application.import_observations.services.import_observations.check_security_gate") + @patch("application.import_observations.services.import_observations.push_observations_to_issue_tracker") + @patch("application.import_observations.services.import_observations.apply_epss") + @patch("application.import_observations.services.import_observations.apply_exploit_information") + @patch("application.import_observations.services.import_observations.find_potential_duplicates") + @patch("application.vex.services.vex_engine.VEX_Engine.apply_vex_statements_for_observation") def test_file_upload_licenses_feature_true( self, mock_apply_vex_statements_for_observation, mock_find_potential_duplicates, - mock_epss_apply_observation, + mock_apply_exploit_information, + mock_apply_epss, mock_push_observations_to_issue_tracker, - mock_set_repository_default_branch, mock_check_security_gate, mock_get_current_request, ): @@ -473,6 +436,7 @@ def test_file_upload_licenses_feature_true( "test_endpoint_url", "test_kubernetes_cluster", suppress_licenses=False, + sbom=True, ) def _file_upload_licenses( @@ -483,7 +447,14 @@ def _file_upload_licenses( endpoint_url, kubernetes_cluster, suppress_licenses, + sbom, ): + try: + license_policy_standard = License_Policy.objects.get(name="Standard") + except License_Policy.DoesNotExist: + call_command("loaddata", "unittests/fixtures/initial_license_data.json") + license_policy_standard = License_Policy.objects.get(name="Standard") + License_Component.objects.all().delete() # --- First import without license policy --- @@ -497,11 +468,12 @@ def _file_upload_licenses( "r", ) ), - service=service, + service_name=service, docker_image_name_tag=docker_image_name_tag, endpoint_url=endpoint_url, kubernetes_cluster=kubernetes_cluster, suppress_licenses=suppress_licenses, + sbom=sbom, ) ( @@ -522,16 +494,12 @@ def _file_upload_licenses( self.assertEqual(updated_license_objects, 0) self.assertEqual(deleted_license_objects, 0) - license_components = License_Component.objects.filter(product=1).order_by( - "id" - ) + license_components = License_Component.objects.filter(product=1).order_by("id") self.assertEqual(len(license_components), 67) self.assertEqual(license_components[1].branch, branch) self.assertEqual(license_components[1].upload_filename, "licenses_1.json") - self.assertEqual( - license_components[1].component_name, "argon2-cffi-bindings" - ) + self.assertEqual(license_components[1].component_name, "argon2-cffi-bindings") self.assertEqual(license_components[1].component_version, "21.2.0") self.assertEqual( license_components[1].component_name_version, @@ -543,13 +511,13 @@ def _file_upload_licenses( ) self.assertEqual(license_components[1].component_purl_type, "pypi") self.assertEqual(license_components[1].component_cpe, "") - dependencies = """SecObserve:1.26.0 --> argon2-cffi:23.1.0 + dependencies = """SecObserve:1.48.0 --> argon2-cffi:23.1.0 argon2-cffi:23.1.0 --> argon2-cffi-bindings:21.2.0""" self.assertEqual(license_components[1].component_dependencies, dependencies) - self.assertEqual( - license_components[1].license, License.objects.get(spdx_id="MIT") - ) - self.assertEqual(license_components[1].non_spdx_license, "") + self.assertEqual(license_components[1].effective_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[1].effective_license_name, "MIT") + self.assertEqual(license_components[1].imported_declared_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[1].imported_declared_license_name, "MIT") self.assertEqual( license_components[1].evaluation_result, License_Policy_Evaluation_Result.RESULT_UNKNOWN, @@ -561,29 +529,53 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[3].component_name_version, "asgiref:3.8.1") + self.assertEqual(license_components[3].effective_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[3].effective_license_name, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[3].imported_declared_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[3].imported_declared_license_name, "0BSD, BSD-3-Clause") self.assertEqual( - license_components[3].component_name_version, "asgiref:3.8.1" + license_components[3].evaluation_result, + License_Policy_Evaluation_Result.RESULT_UNKNOWN, ) - self.assertEqual(license_components[3].license, None) self.assertEqual( - license_components[3].non_spdx_license, "0BSD, BSD-3-Clause" + license_components[3].numerical_evaluation_result, + License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( + License_Policy_Evaluation_Result.RESULT_UNKNOWN, + ), ) + + self.assertEqual(license_components[4].component_name_version, "attrs:24.2.0") + self.assertEqual(license_components[4].effective_non_spdx_license, "attrs non-standard license") + self.assertEqual(license_components[4].effective_license_name, "attrs non-standard license") + self.assertEqual(license_components[4].imported_declared_non_spdx_license, "attrs non-standard license") + self.assertEqual(license_components[4].imported_declared_license_name, "attrs non-standard license") self.assertEqual( - license_components[3].evaluation_result, + license_components[4].evaluation_result, License_Policy_Evaluation_Result.RESULT_UNKNOWN, ) self.assertEqual( - license_components[3].numerical_evaluation_result, + license_components[4].numerical_evaluation_result, License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( License_Policy_Evaluation_Result.RESULT_UNKNOWN, ), ) + self.assertEqual(license_components[24].component_name_version, "email-validator:2.1.1") + self.assertEqual( + license_components[24].effective_license_expression, + "GPL-2.0-or-later WITH Bison-exception-2.2", + ) + self.assertEqual( + license_components[24].effective_license_name, + "GPL-2.0-or-later WITH Bison-exception-2.2", + ) self.assertEqual( - license_components[24].component_name_version, "email-validator:2.1.1" + license_components[24].imported_declared_license_expression, + "GPL-2.0-or-later WITH Bison-exception-2.2", ) self.assertEqual( - license_components[24].license_expression, + license_components[24].imported_declared_license_name, "GPL-2.0-or-later WITH Bison-exception-2.2", ) self.assertEqual( @@ -600,11 +592,11 @@ def _file_upload_licenses( # --- Second import with license policy --- product = Product.objects.get(id=1) - product.license_policy = License_Policy.objects.get(name="Standard") + product.license_policy = license_policy_standard product.save() license_policy_item = License_Policy_Item( - license_policy=License_Policy.objects.get(name="Standard"), + license_policy=license_policy_standard, license_group=None, license=None, license_expression="GPL-2.0-or-later WITH Bison-exception-2.2", @@ -622,11 +614,12 @@ def _file_upload_licenses( "r", ) ), - service=service, + service_name=service, docker_image_name_tag=docker_image_name_tag, endpoint_url=endpoint_url, kubernetes_cluster=kubernetes_cluster, suppress_licenses=suppress_licenses, + sbom=sbom, ) ( @@ -647,19 +640,17 @@ def _file_upload_licenses( self.assertEqual(updated_license_objects, 67) self.assertEqual(deleted_license_objects, 0) - license_components = License_Component.objects.filter(product=1).order_by( - "id" - ) + license_components = License_Component.objects.filter(product=1).order_by("id") self.assertEqual(len(license_components), 67) self.assertEqual( license_components[1].component_name_version, "argon2-cffi-bindings:21.2.0", ) - self.assertEqual( - license_components[1].license, License.objects.get(spdx_id="MIT") - ) - self.assertEqual(license_components[1].non_spdx_license, "") + self.assertEqual(license_components[1].effective_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[1].effective_license_name, "MIT") + self.assertEqual(license_components[1].imported_declared_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[1].imported_declared_license_name, "MIT") self.assertEqual( license_components[1].evaluation_result, License_Policy_Evaluation_Result.RESULT_ALLOWED, @@ -671,32 +662,51 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[3].component_name_version, "asgiref:3.8.1") + self.assertEqual(license_components[3].effective_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[3].effective_license_name, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[3].imported_declared_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[3].imported_declared_license_name, "0BSD, BSD-3-Clause") self.assertEqual( - license_components[3].component_name_version, "asgiref:3.8.1" + license_components[3].evaluation_result, + License_Policy_Evaluation_Result.RESULT_ALLOWED, ) - self.assertEqual(license_components[3].license, None) self.assertEqual( - license_components[3].non_spdx_license, "0BSD, BSD-3-Clause" + license_components[3].numerical_evaluation_result, + License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( + License_Policy_Evaluation_Result.RESULT_ALLOWED, + ), ) + + self.assertEqual(license_components[4].component_name_version, "attrs:24.2.0") + self.assertEqual(license_components[4].effective_non_spdx_license, "attrs non-standard license") + self.assertEqual(license_components[4].effective_license_name, "attrs non-standard license") + self.assertEqual(license_components[4].imported_declared_non_spdx_license, "attrs non-standard license") + self.assertEqual(license_components[4].imported_declared_license_name, "attrs non-standard license") self.assertEqual( - license_components[3].evaluation_result, + license_components[4].evaluation_result, License_Policy_Evaluation_Result.RESULT_UNKNOWN, ) self.assertEqual( - license_components[3].numerical_evaluation_result, + license_components[4].numerical_evaluation_result, License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( License_Policy_Evaluation_Result.RESULT_UNKNOWN, ), ) + self.assertEqual(license_components[9].component_name_version, "cryptography:43.0.1") self.assertEqual( - license_components[9].component_name_version, "cryptography:43.0.1" + license_components[9].effective_license_expression, + "LGPL-3.0-or-later OR BSD-3-Clause", ) - self.assertEqual(license_components[9].license, None) self.assertEqual( - license_components[9].license_expression, + license_components[9].effective_license_name, "LGPL-3.0-or-later OR BSD-3-Clause", ) + self.assertEqual( + license_components[9].imported_declared_license_expression, "LGPL-3.0-or-later OR BSD-3-Clause" + ) + self.assertEqual(license_components[9].imported_declared_license_name, "LGPL-3.0-or-later OR BSD-3-Clause") self.assertEqual( license_components[9].evaluation_result, License_Policy_Evaluation_Result.RESULT_ALLOWED, @@ -709,11 +719,18 @@ def _file_upload_licenses( ) self.assertEqual(license_components[10].component_name_version, "cvss:3.2") - self.assertEqual(license_components[10].license, None) self.assertEqual( - license_components[10].license_expression, + license_components[10].effective_license_expression, "GPL-3.0-or-later AND BSD-3-Clause", ) + self.assertEqual( + license_components[10].effective_license_name, + "GPL-3.0-or-later AND BSD-3-Clause", + ) + self.assertEqual( + license_components[10].imported_declared_license_expression, "GPL-3.0-or-later AND BSD-3-Clause" + ) + self.assertEqual(license_components[10].imported_declared_license_name, "GPL-3.0-or-later AND BSD-3-Clause") self.assertEqual( license_components[10].evaluation_result, License_Policy_Evaluation_Result.RESULT_FORBIDDEN, @@ -725,12 +742,21 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[11].component_name_version, "defusedcsv:2.0.0") + self.assertEqual( + license_components[11].effective_license_expression, + "(Apache-2.0 OR BSD-3-Clause) AND MIT", + ) self.assertEqual( - license_components[11].component_name_version, "defusedcsv:2.0.0" + license_components[11].effective_license_name, + "(Apache-2.0 OR BSD-3-Clause) AND MIT", ) - self.assertEqual(license_components[11].license, None) self.assertEqual( - license_components[11].license_expression, + license_components[11].imported_declared_license_expression, + "(Apache-2.0 OR BSD-3-Clause) AND MIT", + ) + self.assertEqual( + license_components[11].imported_declared_license_name, "(Apache-2.0 OR BSD-3-Clause) AND MIT", ) self.assertEqual( @@ -744,11 +770,21 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[24].component_name_version, "email-validator:2.1.1") + self.assertEqual( + license_components[24].effective_license_expression, + "GPL-2.0-or-later WITH Bison-exception-2.2", + ) + self.assertEqual( + license_components[24].effective_license_name, + "GPL-2.0-or-later WITH Bison-exception-2.2", + ) self.assertEqual( - license_components[24].component_name_version, "email-validator:2.1.1" + license_components[24].imported_declared_license_expression, + "GPL-2.0-or-later WITH Bison-exception-2.2", ) self.assertEqual( - license_components[24].license_expression, + license_components[24].imported_declared_license_name, "GPL-2.0-or-later WITH Bison-exception-2.2", ) self.assertEqual( @@ -764,20 +800,30 @@ def _file_upload_licenses( # --- Third import with some changes --- - license_policy = License_Policy.objects.get(name="Standard") + license_policy = license_policy_standard license_policy.ignore_component_types = "npm" license_policy.save() license_policy_item = License_Policy_Item( - license_policy=License_Policy.objects.get(name="Standard"), + license_policy=license_policy_standard, + license_group=None, + license=License.objects.get(spdx_id="0BSD"), + non_spdx_license="", + evaluation_result=License_Policy_Evaluation_Result.RESULT_FORBIDDEN, + ) + license_policy_item.save() + + license_policy_item = License_Policy_Item( + license_policy=license_policy_standard, license_group=None, license=None, - non_spdx_license="0BSD, BSD-3-Clause", - evaluation_result=License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED, + non_spdx_license="attrs non-standard license", + evaluation_result=License_Policy_Evaluation_Result.RESULT_FORBIDDEN, ) license_policy_item.save() + license_policy_item = License_Policy_Item( - license_policy=License_Policy.objects.get(name="Standard"), + license_policy=license_policy_standard, license_group=None, license=License.objects.get(spdx_id="MIT"), non_spdx_license="", @@ -785,9 +831,8 @@ def _file_upload_licenses( ) license_policy_item.save() - license_policy_item.save() license_policy_item = License_Policy_Item( - license_policy=License_Policy.objects.get(name="Standard"), + license_policy=license_policy_standard, license_group=None, license=License.objects.get(spdx_id="Apache-2.0"), non_spdx_license="", @@ -804,11 +849,12 @@ def _file_upload_licenses( "r", ) ), - service=service, + service_name=service, docker_image_name_tag=docker_image_name_tag, endpoint_url=endpoint_url, kubernetes_cluster=kubernetes_cluster, suppress_licenses=suppress_licenses, + sbom=sbom, ) ( @@ -829,19 +875,17 @@ def _file_upload_licenses( self.assertEqual(updated_license_objects, 64) self.assertEqual(deleted_license_objects, 3) - license_components = License_Component.objects.filter(product=1).order_by( - "id" - ) + license_components = License_Component.objects.filter(product=1).order_by("id") self.assertEqual(len(license_components), 67) self.assertEqual( license_components[64].component_name_version, "argon2-cffi-bindings:21.2.1", ) - self.assertEqual( - license_components[64].license, License.objects.get(spdx_id="MIT") - ) - self.assertEqual(license_components[64].non_spdx_license, "") + self.assertEqual(license_components[64].effective_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[64].effective_license_name, "MIT") + self.assertEqual(license_components[64].imported_concluded_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[64].imported_concluded_license_name, "MIT") self.assertEqual( license_components[64].evaluation_result, License_Policy_Evaluation_Result.RESULT_FORBIDDEN, @@ -853,30 +897,53 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[2].component_name_version, "asgiref:3.8.1") + self.assertEqual(license_components[2].effective_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[2].effective_license_name, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[2].imported_concluded_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[2].imported_concluded_license_name, "0BSD, BSD-3-Clause") self.assertEqual( - license_components[2].component_name_version, "asgiref:3.8.1" + license_components[2].evaluation_result, + License_Policy_Evaluation_Result.RESULT_FORBIDDEN, ) - self.assertEqual(license_components[2].license, None) self.assertEqual( - license_components[2].non_spdx_license, "0BSD, BSD-3-Clause" + license_components[2].numerical_evaluation_result, + License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( + License_Policy_Evaluation_Result.RESULT_FORBIDDEN, + ), ) + + self.assertEqual(license_components[3].component_name_version, "attrs:24.2.0") + self.assertEqual(license_components[3].effective_non_spdx_license, "attrs non-standard license") + self.assertEqual(license_components[3].effective_license_name, "attrs non-standard license") + self.assertEqual(license_components[3].imported_concluded_non_spdx_license, "attrs non-standard license") + self.assertEqual(license_components[3].imported_concluded_license_name, "attrs non-standard license") self.assertEqual( - license_components[2].evaluation_result, - License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED, + license_components[3].evaluation_result, + License_Policy_Evaluation_Result.RESULT_FORBIDDEN, ) self.assertEqual( - license_components[2].numerical_evaluation_result, + license_components[3].numerical_evaluation_result, License_Policy_Evaluation_Result.NUMERICAL_RESULTS.get( - License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED, + License_Policy_Evaluation_Result.RESULT_FORBIDDEN, ), ) + self.assertEqual(license_components[7].component_name_version, "cryptography:43.0.1") self.assertEqual( - license_components[7].component_name_version, "cryptography:43.0.1" + license_components[7].effective_license_expression, + "LGPL-3.0-or-later OR GPL-3.0-or-later", ) - self.assertEqual(license_components[7].license, None) self.assertEqual( - license_components[7].license_expression, + license_components[7].effective_license_name, + "LGPL-3.0-or-later OR GPL-3.0-or-later", + ) + self.assertEqual( + license_components[7].imported_concluded_license_expression, + "LGPL-3.0-or-later OR GPL-3.0-or-later", + ) + self.assertEqual( + license_components[7].imported_concluded_license_name, "LGPL-3.0-or-later OR GPL-3.0-or-later", ) self.assertEqual( @@ -891,9 +958,12 @@ def _file_upload_licenses( ) self.assertEqual(license_components[8].component_name_version, "cvss:3.2") - self.assertEqual(license_components[8].license, None) self.assertEqual( - license_components[8].license_expression, + license_components[8].effective_license_expression, + "LGPL-3.0-or-later AND BSD-3-Clause", + ) + self.assertEqual( + license_components[8].imported_concluded_license_expression, "LGPL-3.0-or-later AND BSD-3-Clause", ) self.assertEqual( @@ -907,12 +977,13 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[9].component_name_version, "defusedcsv:2.0.0") self.assertEqual( - license_components[9].component_name_version, "defusedcsv:2.0.0" + license_components[9].effective_license_expression, + "Apache-2.0 AND (BSD-3-Clause OR MIT)", ) - self.assertEqual(license_components[9].license, None) self.assertEqual( - license_components[9].license_expression, + license_components[9].imported_concluded_license_expression, "Apache-2.0 AND (BSD-3-Clause OR MIT)", ) self.assertEqual( @@ -926,11 +997,13 @@ def _file_upload_licenses( ), ) + self.assertEqual(license_components[22].component_name_version, "email-validator:2.1.1") self.assertEqual( - license_components[22].component_name_version, "email-validator:2.1.1" + license_components[22].effective_license_expression, + "GPL-3.0-or-later WITH Bison-exception-2.2", ) self.assertEqual( - license_components[22].license_expression, + license_components[22].imported_concluded_license_expression, "GPL-3.0-or-later WITH Bison-exception-2.2", ) self.assertEqual( @@ -946,7 +1019,7 @@ def _file_upload_licenses( # --- Fourth import with ignoring the PiPy packages --- - license_policy = License_Policy.objects.get(name="Standard") + license_policy = license_policy_standard license_policy.ignore_component_types = "npm, pypi" license_policy.save() @@ -959,11 +1032,12 @@ def _file_upload_licenses( "r", ) ), - service=service, + service_name=service, docker_image_name_tag=docker_image_name_tag, endpoint_url=endpoint_url, kubernetes_cluster=kubernetes_cluster, suppress_licenses=suppress_licenses, + sbom=sbom, ) ( @@ -984,19 +1058,15 @@ def _file_upload_licenses( self.assertEqual(updated_license_objects, 67) self.assertEqual(deleted_license_objects, 0) - license_components = License_Component.objects.filter(product=1).order_by( - "id" - ) + license_components = License_Component.objects.filter(product=1).order_by("id") self.assertEqual(len(license_components), 67) self.assertEqual( license_components[64].component_name_version, "argon2-cffi-bindings:21.2.1", ) - self.assertEqual( - license_components[64].license, License.objects.get(spdx_id="MIT") - ) - self.assertEqual(license_components[64].non_spdx_license, "") + self.assertEqual(license_components[64].effective_spdx_license, License.objects.get(spdx_id="MIT")) + self.assertEqual(license_components[64].imported_concluded_spdx_license, License.objects.get(spdx_id="MIT")) self.assertEqual( license_components[64].evaluation_result, License_Policy_Evaluation_Result.RESULT_IGNORED, @@ -1008,13 +1078,9 @@ def _file_upload_licenses( ), ) - self.assertEqual( - license_components[2].component_name_version, "asgiref:3.8.1" - ) - self.assertEqual(license_components[2].license, None) - self.assertEqual( - license_components[2].non_spdx_license, "0BSD, BSD-3-Clause" - ) + self.assertEqual(license_components[2].component_name_version, "asgiref:3.8.1") + self.assertEqual(license_components[2].effective_multiple_licenses, "0BSD, BSD-3-Clause") + self.assertEqual(license_components[2].imported_concluded_multiple_licenses, "0BSD, BSD-3-Clause") self.assertEqual( license_components[2].evaluation_result, License_Policy_Evaluation_Result.RESULT_IGNORED, @@ -1037,6 +1103,84 @@ def _file_upload_licenses( self.assertEqual(len(license_components), 0) +class APIImportObservation(BaseTestCase): + def setUp(self): + Observation.objects.all().delete() + Observation_Log.objects.all().delete() + Rule.objects.all().delete() + Vulnerability_Check.objects.all().delete() + call_command("loaddata", "unittests/fixtures/import_observations_fixtures.json") + super().setUp() + + @patch("application.import_observations.parsers.dependency_track.parser.DependencyTrack.check_connection") + def test_api_import_no_connection(self, mock_check_connection): + mock_check_connection.return_value = False, ["error_1", "error_2"], {} + + api_configuration = Api_Configuration(product=self.product_1, parser=Parser.objects.get(id=3)) + parameters = ApiImportParameters( + api_configuration=api_configuration, + branch=None, + service_name="", + docker_image_name_tag="", + endpoint_url="", + kubernetes_cluster="", + ) + + with self.assertRaises(ValidationError) as e: + api_import_observations(parameters) + + self.assertEqual( + "[ErrorDetail(string=\"Connection couldn't be established: error_1 / error_2\", code='invalid')]", + str(e.exception), + ) + mock_check_connection.assert_called_with(api_configuration) + + @patch("application.import_observations.parsers.dependency_track.parser.DependencyTrack.check_connection") + @patch("application.import_observations.parsers.dependency_track.parser.DependencyTrack.get_observations") + @patch("application.import_observations.services.import_observations._process_data") + @patch("application.import_observations.models.Vulnerability_Check.objects.update_or_create") + def test_api_import_success( + self, mock_update_or_create, mock_process_data, mock_get_observations, mock_check_connection + ): + mock_check_connection.return_value = True, [], {"data": "data"} + mock_get_observations.return_value = [self.observation_1], "test_scanner" + mock_process_data.return_value = 1, 2, 3 + + api_configuration = Api_Configuration( + name="test_configuration", product=self.product_1, parser=Parser.objects.get(id=3) + ) + parameters = ApiImportParameters( + api_configuration=api_configuration, + branch=self.branch_1, + service_name=None, + docker_image_name_tag="", + endpoint_url="", + kubernetes_cluster="", + ) + + numbers = api_import_observations(parameters) + + self.assertEqual(1, numbers[0]) + self.assertEqual(2, numbers[1]) + self.assertEqual(3, numbers[2]) + + mock_check_connection.assert_called_with(api_configuration) + mock_get_observations.assert_called_with({"data": "data"}, self.product_1, self.branch_1) + mock_update_or_create.assert_called_with( + product=self.product_1, + branch=self.branch_1, + service=None, + filename="", + api_configuration_name="test_configuration", + defaults={ + "last_import_observations_new": 1, + "last_import_observations_updated": 2, + "last_import_observations_resolved": 3, + "scanner": "test_scanner", + }, + ) + + class RequestMock: def __init__(self, user): self.user = user diff --git a/backend/unittests/import_observations/test_types.py b/backend/unittests/import_observations/test_types.py new file mode 100644 index 000000000..e16af4759 --- /dev/null +++ b/backend/unittests/import_observations/test_types.py @@ -0,0 +1,240 @@ +from unittest import TestCase + +from application.import_observations.types import ExtendedSemVer + + +class TestExtendedSemVer(TestCase): + def test_parse(self): + ext_semver = ExtendedSemVer.parse("1.2.3-alpha") + self.assertEqual(ext_semver.prefix, None) + self.assertEqual(ext_semver.semver, "1.2.3-alpha") + + ext_semver = ExtendedSemVer.parse("v1.2.3-alpha") + self.assertEqual(ext_semver.prefix, None) + self.assertEqual(ext_semver.semver, "1.2.3-alpha") + + ext_semver = ExtendedSemVer.parse("0") + self.assertEqual(ext_semver.prefix, None) + self.assertEqual(ext_semver.semver, "0.0.0") + + ext_semver = ExtendedSemVer.parse("1.2") + self.assertEqual(ext_semver.prefix, None) + ext_semver = ExtendedSemVer.parse("1.2.0") + + ext_semver = ExtendedSemVer.parse("1:1.2.3") + self.assertEqual(ext_semver.prefix, 1) + self.assertEqual(ext_semver.semver, "1.2.3") + + ext_semver = ExtendedSemVer.parse("1:1.2-alpha") + self.assertEqual(ext_semver.prefix, 1) + self.assertEqual(ext_semver.semver, "1.2.0-alpha") + + ext_semver = ExtendedSemVer.parse(None) + self.assertEqual(ext_semver, None) + + ext_semver = ExtendedSemVer.parse("test") + self.assertEqual(ext_semver, None) + + ext_semver = ExtendedSemVer.parse("a:1.2") + self.assertEqual(ext_semver, None) + + def test_eq(self): + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertNotEqual(ext_semver1, None) + self.assertNotEqual(None, ext_semver1) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertEqual(ext_semver1, ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertNotEqual(ext_semver1, ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertEqual(ext_semver1, ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("2:1.2.3-alpha") + self.assertNotEqual(ext_semver1, ext_semver2) + + def test_gt(self): + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 > None) + self.assertFalse(None > ext_semver1) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertTrue(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("2:1.2.3-alpha") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("2:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertTrue(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-beta") + self.assertFalse(ext_semver1 > ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertTrue(ext_semver1 > ext_semver2) + + def test_ge(self): + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 >= None) + self.assertFalse(None >= ext_semver1) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertTrue(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertFalse(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertTrue(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertTrue(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("2:1.2.3-alpha") + self.assertFalse(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("2:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertTrue(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-beta") + self.assertFalse(ext_semver1 >= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertTrue(ext_semver1 >= ext_semver2) + + def test_lt(self): + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 < None) + self.assertFalse(None < ext_semver1) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertFalse(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-beta") + self.assertFalse(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertTrue(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("2:1.2.3-alpha") + self.assertTrue(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("2:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-beta") + self.assertTrue(ext_semver1 < ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 < ext_semver2) + + def test_le(self): + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 <= None) + self.assertFalse(None <= ext_semver1) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertFalse(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-beta") + self.assertFalse(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertTrue(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1.2.3-beta") + self.assertTrue(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1.2.3-alpha") + self.assertFalse(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertTrue(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("2:1.2.3-alpha") + self.assertTrue(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("2:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-alpha") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-beta") + self.assertTrue(ext_semver1 <= ext_semver2) + + ext_semver1 = ExtendedSemVer.parse("1:1.2.3-beta") + ext_semver2 = ExtendedSemVer.parse("1:1.2.3-alpha") + self.assertFalse(ext_semver1 <= ext_semver2) diff --git a/backend/unittests/issue_tracker/issue_trackers/test_base_issue_tracker.py b/backend/unittests/issue_tracker/issue_trackers/test_base_issue_tracker.py index b5f8f4522..30ca5d99f 100644 --- a/backend/unittests/issue_tracker/issue_trackers/test_base_issue_tracker.py +++ b/backend/unittests/issue_tracker/issue_trackers/test_base_issue_tracker.py @@ -26,9 +26,7 @@ def test_get_title_component(self): issue_tracker = BaseIssueTracker() self.observation_1.origin_component_name_version = "component_1:1.0.0" title = issue_tracker._get_title(self.observation_1) - self.assertEqual( - 'High vulnerability: "observation_1" in component_1:1.0.0', title - ) + self.assertEqual('High vulnerability: "observation_1" in component_1:1.0.0', title) def test_get_title_docker_image(self): issue_tracker = BaseIssueTracker() @@ -40,9 +38,7 @@ def test_get_title_endpoint(self): issue_tracker = BaseIssueTracker() self.observation_1.origin_endpoint_hostname = "hostname_1.example.com" title = issue_tracker._get_title(self.observation_1) - self.assertEqual( - 'High vulnerability: "observation_1" in hostname_1.example.com', title - ) + self.assertEqual('High vulnerability: "observation_1" in hostname_1.example.com', title) def test_get_title_source(self): issue_tracker = BaseIssueTracker() @@ -63,9 +59,7 @@ def test_get_title_all(self): title, ) - @patch( - "application.issue_tracker.issue_trackers.base_issue_tracker.get_base_url_frontend" - ) + @patch("application.issue_tracker.issue_trackers.base_issue_tracker.get_base_url_frontend") def test_get_description_with_branch(self, base_url_mock): base_url_mock.return_value = "http://localhost:3000" self.observation_1.pk = 1 @@ -82,14 +76,10 @@ def test_get_description_with_branch(self, base_url_mock): self.assertEqual(expected_description, description) base_url_mock.assert_called_once() - @patch( - "application.issue_tracker.issue_trackers.base_issue_tracker.get_base_url_frontend" - ) + @patch("application.issue_tracker.issue_trackers.base_issue_tracker.get_base_url_frontend") def test_get_description_without_branch(self, base_url_mock): base_url_mock.return_value = "http://localhost:3000" - observation_2 = Observation( - pk=2, product=self.product_1, description="description_2" - ) + observation_2 = Observation(pk=2, product=self.product_1, description="description_2") issue_tracker = BaseIssueTracker() description = issue_tracker._get_description(observation_2) @@ -111,9 +101,7 @@ def test_get_description_for_deleted_observation_without_description(self): def test_get_description_for_deleted_observation_with_description(self): issue_tracker = BaseIssueTracker() - description = issue_tracker._get_description_for_deleted_observation( - "original_description" - ) + description = issue_tracker._get_description_for_deleted_observation("original_description") expected_description = """**--- Observation has been deleted ---** diff --git a/backend/unittests/issue_tracker/issue_trackers/test_github_issue_tracker.py b/backend/unittests/issue_tracker/issue_trackers/test_github_issue_tracker.py index 27eb4d92a..d13f2bfcb 100644 --- a/backend/unittests/issue_tracker/issue_trackers/test_github_issue_tracker.py +++ b/backend/unittests/issue_tracker/issue_trackers/test_github_issue_tracker.py @@ -286,9 +286,7 @@ def test_close_issue_for_deleted_observation_exception(self, patch_mock): description="description_1", labels="label_1,label_2", ) - issue_tracker.close_issue_for_deleted_observation( - self.observation_1.product, issue - ) + issue_tracker.close_issue_for_deleted_observation(self.observation_1.product, issue) self.assertEqual( "404 Client Error: unknown reason for url: https://api.github.com/repos/gh_project_1/issues/gh_1", @@ -316,9 +314,7 @@ def test_close_issue_for_deleted_observation_success(self, patch_mock): labels="label_1,label_2", ) - issue_tracker.close_issue_for_deleted_observation( - self.observation_1.product, issue - ) + issue_tracker.close_issue_for_deleted_observation(self.observation_1.product, issue) patch_mock.assert_called_once_with( url="https://api.github.com/repos/gh_project_1/issues/gh_1", @@ -332,9 +328,5 @@ def test_close_issue_for_deleted_observation_success(self, patch_mock): def test_get_frontend_issue_url(self): issue_tracker = GitHubIssueTracker() - frontend_issue_url = issue_tracker.get_frontend_issue_url( - self.observation_1.product, "gh_1" - ) - self.assertEqual( - "https://github.com/gh_project_1/issues/gh_1", frontend_issue_url - ) + frontend_issue_url = issue_tracker.get_frontend_issue_url(self.observation_1.product, "gh_1") + self.assertEqual("https://github.com/gh_project_1/issues/gh_1", frontend_issue_url) diff --git a/backend/unittests/issue_tracker/issue_trackers/test_gitlab_issue_tracker.py b/backend/unittests/issue_tracker/issue_trackers/test_gitlab_issue_tracker.py index 1519b9f52..40e2e5e1c 100644 --- a/backend/unittests/issue_tracker/issue_trackers/test_gitlab_issue_tracker.py +++ b/backend/unittests/issue_tracker/issue_trackers/test_gitlab_issue_tracker.py @@ -82,9 +82,7 @@ def test_get_issue_exception(self, get_mock): response = Response() response.status_code = 500 response.reason = "unknown reason" - response.url = ( - "https://gitlab.example.com/api/v4/projects/gh_project_1/issues/gh_1" - ) + response.url = "https://gitlab.example.com/api/v4/projects/gh_project_1/issues/gh_1" get_mock.return_value = response with self.assertRaises(HTTPError) as e: issue_tracker = GitLabIssueTracker() @@ -296,9 +294,7 @@ def test_close_issue_for_deleted_observation_exception(self, put_mock): description="description_1", labels="label_1,label_2", ) - issue_tracker.close_issue_for_deleted_observation( - self.observation_1.product, issue - ) + issue_tracker.close_issue_for_deleted_observation(self.observation_1.product, issue) self.assertEqual( "404 Client Error: unknown reason for url: https://api.gitlab.com/repos/gh_project_1/issues/gh_1", @@ -328,9 +324,7 @@ def test_close_issue_for_deleted_observation_success(self, put_mock): labels="label_1,label_2", ) - issue_tracker.close_issue_for_deleted_observation( - self.observation_1.product, issue - ) + issue_tracker.close_issue_for_deleted_observation(self.observation_1.product, issue) put_mock.assert_called_once_with( url="https://gitlab.example.com/api/v4/projects/gh_project_1/issues/gh_1", @@ -346,9 +340,5 @@ def test_close_issue_for_deleted_observation_success(self, put_mock): def test_get_frontend_issue_url(self): issue_tracker = GitLabIssueTracker() - frontend_issue_url = issue_tracker.get_frontend_issue_url( - self.observation_1.product, "gh_1" - ) - self.assertEqual( - "https://gitlab.example.com/gh_project_1/-/issues/gh_1", frontend_issue_url - ) + frontend_issue_url = issue_tracker.get_frontend_issue_url(self.observation_1.product, "gh_1") + self.assertEqual("https://gitlab.example.com/gh_project_1/-/issues/gh_1", frontend_issue_url) diff --git a/backend/unittests/issue_tracker/issue_trackers/test_jira_issue_tracker.py b/backend/unittests/issue_tracker/issue_trackers/test_jira_issue_tracker.py index eb002515a..d6afe163f 100644 --- a/backend/unittests/issue_tracker/issue_trackers/test_jira_issue_tracker.py +++ b/backend/unittests/issue_tracker/issue_trackers/test_jira_issue_tracker.py @@ -38,15 +38,9 @@ def setUp(self): self.observation_1.product.issue_tracker_username = "username_1" self.observation_1.product.issue_tracker_status_closed = "Closed" - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.create_issue" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.get_base_url_frontend" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.create_issue") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.get_base_url_frontend") @patch("application.core.models.Observation.save") def test_create_issue(self, save_mock, base_url_mock, create_issue_mock, jira_mock): self.observation_1.product.issue_tracker_issue_type = "Vulnerability" @@ -77,9 +71,7 @@ def test_create_issue(self, save_mock, base_url_mock, create_issue_mock, jira_mo self.assertEqual("jira_issue_1", issue_id) self.assertEqual("Open", self.observation_1.issue_tracker_jira_initial_status) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") def test_get_issue_not_found(self, issue_mock, jira_mock): issue_mock.return_value = None @@ -88,13 +80,9 @@ def test_get_issue_not_found(self, issue_mock, jira_mock): issue = issue_tracker.get_issue(self.observation_1.product, "jira_1") self.assertIsNone(issue) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") def test_get_issue_success(self, issue_mock, jira_mock): issue_mock.return_value = JiraIssue( @@ -119,17 +107,11 @@ def test_get_issue_success(self, issue_mock, jira_mock): ), issue, ) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") def test_update_issue_no_id(self, update_mock, issue_mock, jira_mock): issue_tracker = JiraIssueTracker(self.observation_1.product) issue_tracker.update_issue(self.observation_1, None) @@ -137,19 +119,11 @@ def test_update_issue_no_id(self, update_mock, issue_mock, jira_mock): issue_mock.assert_not_called() update_mock.assert_not_called() - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_update_issue_no_jira_issue( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_update_issue_no_jira_issue(self, transition_issue_mock, update_mock, issue_mock, jira_mock): self.observation_1.issue_tracker_issue_id = "jira_1" issue = Issue( id="jira_1", @@ -162,25 +136,15 @@ def test_update_issue_no_jira_issue( issue_tracker = JiraIssueTracker(self.observation_1.product) issue_tracker.update_issue(self.observation_1, issue) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") update_mock.assert_not_called() transition_issue_mock.assert_not_called() - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_update_issue_success( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_update_issue_success(self, transition_issue_mock, update_mock, issue_mock, jira_mock): self.observation_1.issue_tracker_issue_id = "jira_1" issue = Issue( id="jira_1", @@ -202,29 +166,17 @@ def test_update_issue_success( issue_tracker = JiraIssueTracker(self.observation_1.product) issue_tracker.update_issue(self.observation_1, issue) - self.assertEqual( - 'Critical vulnerability: "observation_1"', jira_issue.fields.summary - ) + self.assertEqual('Critical vulnerability: "observation_1"', jira_issue.fields.summary) description = "description_1\n\n*Branch:* branch_1\n\n*SecObserve observation:* /#/observations/1/show" self.assertEqual(description, jira_issue.fields.description) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") transition_issue_mock.assert_not_called() - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_update_issue_success_transition( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_update_issue_success_transition(self, transition_issue_mock, update_mock, issue_mock, jira_mock): self.observation_1.issue_tracker_issue_id = "jira_1" self.observation_1.issue_tracker_jira_initial_status = "Open" self.observation_1.product.issue_tracker_status_closed = "Done" @@ -252,9 +204,7 @@ def test_update_issue_success_transition( self.assertEqual(summary, jira_issue.fields.summary) description = "description_1\n\n*Branch:* branch_1\n\n*SecObserve observation:* /#/observations/1/show" self.assertEqual(description, jira_issue.fields.description) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") transition_issue_mock.assert_called_with( JiraIssue( key="jira_issue_1", @@ -268,13 +218,9 @@ def test_update_issue_success_transition( "Open", ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") def test_close_issue_no_id(self, update_mock, issue_mock, jira_mock): issue_tracker = JiraIssueTracker(self.observation_1.product) issue_tracker.close_issue(self.observation_1, None) @@ -282,19 +228,11 @@ def test_close_issue_no_id(self, update_mock, issue_mock, jira_mock): issue_mock.assert_not_called() update_mock.assert_not_called() - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_close_issue_no_jira_issue( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_close_issue_no_jira_issue(self, transition_issue_mock, update_mock, issue_mock, jira_mock): self.observation_1.issue_tracker_issue_id = "jira_1" issue = Issue( id="jira_1", @@ -307,25 +245,15 @@ def test_close_issue_no_jira_issue( issue_tracker = JiraIssueTracker(self.observation_1.product) issue_tracker.close_issue(self.observation_1, issue) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") update_mock.assert_not_called() transition_issue_mock.assert_not_called() - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_close_issue_success( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_close_issue_success(self, transition_issue_mock, update_mock, issue_mock, jira_mock): self.observation_1.issue_tracker_issue_id = "jira_1" self.observation_1.current_status = Status.STATUS_RESOLVED issue = Issue( @@ -352,9 +280,7 @@ def test_close_issue_success( self.assertEqual(summary, jira_issue.fields.summary) description = "description_1\n\n*Branch:* branch_1\n\n*SecObserve observation:* /#/observations/1/show\n\n*Observation status:* Resolved" self.assertEqual(description, jira_issue.fields.description) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") transition_issue_mock.assert_called_once_with( JiraIssue( key="jira_issue_1", @@ -368,19 +294,11 @@ def test_close_issue_success( "Closed", ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_close_deleted_issue_no_jira_issue( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_close_deleted_issue_no_jira_issue(self, transition_issue_mock, update_mock, issue_mock, jira_mock): issue = Issue( id="jira_1", title="title_1", @@ -390,29 +308,17 @@ def test_close_deleted_issue_no_jira_issue( issue_mock.return_value = None issue_tracker = JiraIssueTracker(self.observation_1.product) - issue_tracker.close_issue_for_deleted_observation( - self.observation_1.product, issue - ) + issue_tracker.close_issue_for_deleted_observation(self.observation_1.product, issue) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") update_mock.assert_not_called() transition_issue_mock.assert_not_called() - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info" - ) + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.server_info") @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.issue") - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update" - ) - @patch( - "application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue" - ) - def test_close_deleted_issue_success( - self, transition_issue_mock, update_mock, issue_mock, jira_mock - ): + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JiraIssue.update") + @patch("application.issue_tracker.issue_trackers.jira_issue_tracker.JIRA.transition_issue") + def test_close_deleted_issue_success(self, transition_issue_mock, update_mock, issue_mock, jira_mock): issue = Issue( id="jira_1", title="title_1", @@ -431,16 +337,12 @@ def test_close_deleted_issue_success( issue_mock.return_value = jira_issue issue_tracker = JiraIssueTracker(self.observation_1.product) - issue_tracker.close_issue_for_deleted_observation( - self.observation_1.product, issue - ) + issue_tracker.close_issue_for_deleted_observation(self.observation_1.product, issue) self.assertEqual("title_old", jira_issue.fields.summary) description = "*--- Observation has been deleted ---*\n\ndescription_old" self.assertEqual(description, jira_issue.fields.description) - issue_mock.assert_called_once_with( - "jira_1", fields="summary,description,labels,status" - ) + issue_mock.assert_called_once_with("jira_1", fields="summary,description,labels,status") transition_issue_mock.assert_called_once_with( JiraIssue( key="jira_issue_1", diff --git a/backend/unittests/issue_tracker/services/test_issue_tracker.py b/backend/unittests/issue_tracker/services/test_issue_tracker.py index d28e35d48..4c74cf522 100644 --- a/backend/unittests/issue_tracker/services/test_issue_tracker.py +++ b/backend/unittests/issue_tracker/services/test_issue_tracker.py @@ -29,21 +29,15 @@ def setUp(self): # --- push_observations_to_issue_tracker --- - @patch( - "application.issue_tracker.services.issue_tracker.push_observation_to_issue_tracker" - ) + @patch("application.issue_tracker.services.issue_tracker.push_observation_to_issue_tracker") def test_push_observations_to_issue_tracker_not_active(self, mock): product = Product.objects.get(pk=1) push_observations_to_issue_tracker(product, False) mock.assert_not_called() - @patch( - "application.issue_tracker.services.issue_tracker.push_observation_to_issue_tracker" - ) + @patch("application.issue_tracker.services.issue_tracker.push_observation_to_issue_tracker") @patch("application.issue_tracker.services.issue_tracker.get_current_user") - def test_push_observations_to_issue_tracker( - self, mock_current_user, mock_issue_tracker - ): + def test_push_observations_to_issue_tracker(self, mock_current_user, mock_issue_tracker): product = Product.objects.get(pk=1) product.issue_tracker_active = True observation = Observation.objects.get(pk=1) @@ -74,9 +68,7 @@ def test_push_observation_to_issue_tracker_not_default_branch(self, mock): @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_open_no_id_no_issue( - self, observation_mock, mock - ): + def test_push_observation_to_issue_tracker_open_no_id_no_issue(self, observation_mock, mock): mock.return_value.get_issue.return_value = None observation = Observation.objects.get(pk=1) @@ -91,9 +83,7 @@ def test_push_observation_to_issue_tracker_open_no_id_no_issue( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_open_with_id_with_issue( - self, observation_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_open_with_id_with_issue(self, observation_mock, factory_mock): issue = Issue(id=1, title="title", description="description", labels="labels") factory_mock.return_value.get_issue.return_value = issue @@ -114,9 +104,7 @@ def test_push_observation_to_issue_tracker_open_with_id_with_issue( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_closed_no_id_no_issue( - self, observation_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_closed_no_id_no_issue(self, observation_mock, factory_mock): factory_mock.return_value.get_issue.return_value = None observation = Observation.objects.get(pk=1) @@ -131,9 +119,7 @@ def test_push_observation_to_issue_tracker_closed_no_id_no_issue( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_closed_with_id_with_issue( - self, observation_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_closed_with_id_with_issue(self, observation_mock, factory_mock): issue = Issue(id=1, title="title", description="description", labels="labels") factory_mock.return_value.get_issue.return_value = issue @@ -154,9 +140,7 @@ def test_push_observation_to_issue_tracker_closed_with_id_with_issue( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.issue_tracker.services.issue_tracker.handle_task_exception") - def test_push_observation_to_issue_tracker_exception( - self, exception_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_exception(self, exception_mock, factory_mock): exception = Exception("error") factory_mock.side_effect = exception @@ -170,9 +154,7 @@ def test_push_observation_to_issue_tracker_exception( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum( - self, observation_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum(self, observation_mock, factory_mock): issue = Issue(id=1, title="title", description="description", labels="labels") factory_mock.return_value.get_issue.return_value = issue @@ -181,9 +163,7 @@ def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum( observation.product.issue_tracker_minimum_severity = Severity.SEVERITY_HIGH observation.current_status = Status.STATUS_OPEN observation.current_severity = Severity.SEVERITY_HIGH - observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get( - observation.current_severity, 99 - ) + observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get(observation.current_severity, 99) observation.issue_tracker_issue_id = "123" push_observation_to_issue_tracker(observation, None) @@ -198,9 +178,7 @@ def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum( - self, observation_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum(self, observation_mock, factory_mock): issue = Issue(id=1, title="title", description="description", labels="labels") factory_mock.return_value.get_issue.return_value = issue @@ -209,9 +187,7 @@ def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum( observation.product.issue_tracker_minimum_severity = Severity.SEVERITY_HIGH observation.current_status = Status.STATUS_OPEN observation.current_severity = Severity.SEVERITY_HIGH - observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get( - observation.current_severity, 99 - ) + observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get(observation.current_severity, 99) observation.issue_tracker_issue_id = "123" push_observation_to_issue_tracker(observation, None) @@ -228,9 +204,7 @@ def test_push_observation_to_issue_tracker_with_issue_higher_than_minimum( @patch("application.issue_tracker.services.issue_tracker.issue_tracker_factory") @patch("application.core.models.Observation.save") - def test_push_observation_to_issue_tracker_no_issue_lower_than_minimum( - self, observation_mock, factory_mock - ): + def test_push_observation_to_issue_tracker_no_issue_lower_than_minimum(self, observation_mock, factory_mock): factory_mock.return_value.get_issue.return_value = None observation = Observation.objects.get(pk=1) @@ -238,9 +212,7 @@ def test_push_observation_to_issue_tracker_no_issue_lower_than_minimum( observation.product.issue_tracker_minimum_severity = Severity.SEVERITY_HIGH observation.current_status = Status.STATUS_OPEN observation.current_severity = Severity.SEVERITY_MEDIUM - observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get( - observation.current_severity, 99 - ) + observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get(observation.current_severity, 99) push_observation_to_issue_tracker(observation, None) @@ -263,9 +235,7 @@ def test_push_observation_to_issue_tracker_with_issue_lower_than_minimum_not_clo observation.product.issue_tracker_minimum_severity = Severity.SEVERITY_HIGH observation.current_status = Status.STATUS_OPEN observation.current_severity = Severity.SEVERITY_MEDIUM - observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get( - observation.current_severity, 99 - ) + observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get(observation.current_severity, 99) observation.issue_tracker_issue_id = "123" push_observation_to_issue_tracker(observation, None) @@ -291,9 +261,7 @@ def test_push_observation_to_issue_tracker_with_issue_lower_than_minimum_already observation.product.issue_tracker_minimum_severity = Severity.SEVERITY_HIGH observation.current_status = Status.STATUS_OPEN observation.current_severity = Severity.SEVERITY_MEDIUM - observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get( - observation.current_severity, 99 - ) + observation.numerical_severity = Severity.NUMERICAL_SEVERITIES.get(observation.current_severity, 99) observation.issue_tracker_issue_id = "123" observation.issue_tracker_issue_closed = True diff --git a/backend/unittests/licenses/api/test_serializers.py b/backend/unittests/licenses/api/test_serializers.py index 69442f246..30aa38ea8 100644 --- a/backend/unittests/licenses/api/test_serializers.py +++ b/backend/unittests/licenses/api/test_serializers.py @@ -22,38 +22,28 @@ def setUp(self) -> None: super().setUp() self.license_group_1 = License_Group.objects.get(id=1) - self.license_group_member_1 = License_Group_Member( - license_group=self.license_group_1, user=self.user_internal - ) - self.license_group_member_serializer_1 = LicenseGroupMemberSerializer( - self.license_group_member_1 - ) + self.license_group_member_1 = License_Group_Member(license_group=self.license_group_1, user=self.user_internal) + self.license_group_member_serializer_1 = LicenseGroupMemberSerializer(self.license_group_member_1) def test_validate_license_group_change(self): license_group_2 = License_Group.objects.get(id=2) - attrs = { - "license_group": license_group_2, - } + attrs = {"license_group": license_group_2} with self.assertRaises(ValidationError) as e: self.license_group_member_serializer_1.validate(attrs) self.assertEqual( - "[ErrorDetail(string='License group and user cannot be changed', code='invalid')]", - str(e.exception), + "[ErrorDetail(string='License group and user cannot be changed', code='invalid')]", str(e.exception) ) def test_validate_user_change(self): - attrs = { - "user": self.user_external, - } + attrs = {"user": self.user_external} with self.assertRaises(ValidationError) as e: self.license_group_member_serializer_1.validate(attrs) self.assertEqual( - "[ErrorDetail(string='License group and user cannot be changed', code='invalid')]", - str(e.exception), + "[ErrorDetail(string='License group and user cannot be changed', code='invalid')]", str(e.exception) ) @patch("application.licenses.api.serializers.get_license_group_member") @@ -61,10 +51,7 @@ def test_validate_already_exists(self, mock_license_group_member): self.license_group_member_serializer_1.instance = None mock_license_group_member.return_value = self.license_group_member_1 - attrs = { - "license_group": self.license_group_1, - "user": self.user_internal, - } + attrs = {"license_group": self.license_group_1, "user": self.user_internal} with self.assertRaises(ValidationError) as e: self.license_group_member_serializer_1.validate(attrs) @@ -73,9 +60,7 @@ def test_validate_already_exists(self, mock_license_group_member): "[ErrorDetail(string='License group member Permissive Model (Blue Oak Council) / user_internal@example.com already exists', code='invalid')]", str(e.exception), ) - mock_license_group_member.assert_called_with( - self.license_group_1, self.user_internal - ) + mock_license_group_member.assert_called_with(self.license_group_1, self.user_internal) class TestLicensePolicyMemberSerializer(BaseTestCase): @@ -86,35 +71,27 @@ def setUp(self) -> None: self.license_policy_member_1 = License_Policy_Member( license_policy=self.license_policy_1, user=self.user_internal ) - self.license_policy_member_serializer_1 = LicensePolicyMemberSerializer( - self.license_policy_member_1 - ) + self.license_policy_member_serializer_1 = LicensePolicyMemberSerializer(self.license_policy_member_1) def test_validate_license_policy_change(self): license_policy_2 = License_Policy(name="license_policy_2") - attrs = { - "license_policy": license_policy_2, - } + attrs = {"license_policy": license_policy_2} with self.assertRaises(ValidationError) as e: self.license_policy_member_serializer_1.validate(attrs) self.assertEqual( - "[ErrorDetail(string='License policy and user cannot be changed', code='invalid')]", - str(e.exception), + "[ErrorDetail(string='License policy and user cannot be changed', code='invalid')]", str(e.exception) ) def test_validate_user_change(self): - attrs = { - "user": self.user_external, - } + attrs = {"user": self.user_external} with self.assertRaises(ValidationError) as e: self.license_policy_member_serializer_1.validate(attrs) self.assertEqual( - "[ErrorDetail(string='License policy and user cannot be changed', code='invalid')]", - str(e.exception), + "[ErrorDetail(string='License policy and user cannot be changed', code='invalid')]", str(e.exception) ) @patch("application.licenses.api.serializers.get_license_policy_member") @@ -122,10 +99,7 @@ def test_validate_already_exists(self, mock_license_policy_member): self.license_policy_member_serializer_1.instance = None mock_license_policy_member.return_value = self.license_policy_member_1 - attrs = { - "license_policy": self.license_policy_1, - "user": self.user_internal, - } + attrs = {"license_policy": self.license_policy_1, "user": self.user_internal} with self.assertRaises(ValidationError) as e: self.license_policy_member_serializer_1.validate(attrs) @@ -134,9 +108,7 @@ def test_validate_already_exists(self, mock_license_policy_member): "[ErrorDetail(string='License policy member Standard / user_internal@example.com already exists', code='invalid')]", str(e.exception), ) - mock_license_policy_member.assert_called_with( - self.license_policy_1, self.user_internal - ) + mock_license_policy_member.assert_called_with(self.license_policy_1, self.user_internal) class TestLicensePolicyItemSerializer(BaseTestCase): diff --git a/backend/unittests/licenses/api/test_views.py b/backend/unittests/licenses/api/test_views.py new file mode 100644 index 000000000..af9eadc0c --- /dev/null +++ b/backend/unittests/licenses/api/test_views.py @@ -0,0 +1,75 @@ +from unittest.mock import ANY, patch + +from django.core.management import call_command +from rest_framework.test import APIClient + +from application.access_control.models import User +from application.licenses.models import License_Component +from unittests.base_test_case import BaseTestCase + + +class TestImport(BaseTestCase): + def setUp(self): + call_command( + "loaddata", + [ + "unittests/fixtures/initial_license_data.json", + "unittests/fixtures/unittests_fixtures.json", + "unittests/fixtures/unittests_license_fixtures.json", + ], + ) + super().setUpClass() + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_too_many_attributes(self, mock_authenticate): + user = User.objects.get(username="db_admin") + mock_authenticate.return_value = user, None + + patch_data = { + "manual_concluded_non_spdx_license": "Non SPDX", + "manual_concluded_license_expression": "Expression", + } + api_client = APIClient() + response = api_client.patch("/api/license_components/1/concluded_license/", patch_data, format="json") + + self.assertEqual(response.status_code, 400) + self.assertEqual( + str(response.data), "{'message': 'Non field errors: Only one concluded license field may be set.'}" + ) + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_license_component_not_found(self, mock_authenticate): + user = User.objects.get(username="db_admin") + mock_authenticate.return_value = user, None + + patch_data = {"manual_concluded_non_spdx_license": "Non SPDX"} + api_client = APIClient() + response = api_client.patch("/api/license_components/99999/concluded_license/", patch_data, format="json") + + self.assertEqual(response.status_code, 404) + self.assertEqual(str(response.data), "{'message': 'License component 99999 not found.'}") + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_spdx_license_not_found(self, mock_authenticate): + user = User.objects.get(username="db_admin") + mock_authenticate.return_value = user, None + + patch_data = {"manual_concluded_spdx_license": 99999} + api_client = APIClient() + response = api_client.patch("/api/license_components/1/concluded_license/", patch_data, format="json") + + self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data), "{'message': 'SPDX license 99999 not found.'}") + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.licenses.api.views.save_concluded_license") + def test_spdx_license_success(self, mock_save_concluded_license, mock_authenticate): + user = User.objects.get(username="db_admin") + mock_authenticate.return_value = user, None + + patch_data = {"manual_concluded_spdx_license": 1} + api_client = APIClient() + response = api_client.patch("/api/license_components/1/concluded_license/", patch_data, format="json") + + self.assertEqual(response.status_code, 200) + mock_save_concluded_license.assert_called_once_with(License_Component.objects.get(pk=1)) diff --git a/backend/unittests/licenses/management/__init__.py b/backend/unittests/licenses/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/licenses/management/commands/__init__.py b/backend/unittests/licenses/management/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/licenses/management/commands/test_initial_license_load.py b/backend/unittests/licenses/management/commands/test_initial_license_load.py new file mode 100644 index 000000000..36ec2bcf3 --- /dev/null +++ b/backend/unittests/licenses/management/commands/test_initial_license_load.py @@ -0,0 +1,146 @@ +import traceback +from unittest import TestCase +from unittest.mock import call, patch + +from django.core.management import call_command + + +class TestInitialLicenseLoadCommand(TestCase): + """Unit tests for the `initial_license_load` management command.""" + + @patch("application.licenses.management.commands.initial_license_load.import_licenses") + @patch("application.licenses.management.commands.initial_license_load.import_scancode_licensedb") + @patch("application.licenses.management.commands.initial_license_load.create_scancode_standard_policy") + @patch("application.licenses.management.commands.initial_license_load.License_Group.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.License.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.License_Policy.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.logger") + def test_handle_happy_path( + self, + mock_logger, + mock_license_policy_exists, + mock_license_group_exists, + mock_license_exists, + mock_create_policy, + mock_import_scancode, + mock_import_licenses, + ): + """ + Verify that when there are no licenses, license groups or policies + the three import helpers are called once and the expected log entries + are produced. + """ + # All three existence checks return False + mock_license_exists.return_value = False + mock_license_group_exists.return_value = False + mock_license_policy_exists.return_value = False + + # Execute the command via Django's helper + call_command("initial_license_load") + + # Import helpers should have been called exactly once + mock_import_licenses.assert_called_once_with() + mock_import_scancode.assert_called_once_with() + mock_create_policy.assert_called_once_with() + + # Verify that the logger received the expected INFO messages + expected_calls = [ + call("Importing licenses, license groups and license policies ..."), + call("... licenses imported from SPDX"), + call("... license groups imported from ScanCode LicenseDB"), + call("... standard license policy created"), + ] + + # Note: The logger was patched, so we check that `info` was called with the + # expected strings in the correct order. + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + # No errors should have been logged + mock_logger.error.assert_not_called() + + @patch("application.licenses.management.commands.initial_license_load.import_licenses") + @patch("application.licenses.management.commands.initial_license_load.import_scancode_licensedb") + @patch("application.licenses.management.commands.initial_license_load.create_scancode_standard_policy") + @patch("application.licenses.management.commands.initial_license_load.License_Group.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.License.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.License_Policy.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.logger") + def test_handle_early_exit( + self, + mock_logger, + mock_license_policy_exists, + mock_license_group_exists, + mock_license_exists, + mock_create_policy, + mock_import_scancode, + mock_import_licenses, + ): + """ + When any of the existence checks return True, the command should exit + immediately – no import helpers are called and no INFO logs are emitted. + """ + # Simulate that licenses already exist + mock_license_exists.return_value = True + mock_license_group_exists.return_value = False + mock_license_policy_exists.return_value = False + + call_command("initial_license_load") + + # Import helpers should **not** be called + mock_import_licenses.assert_not_called() + mock_import_scancode.assert_not_called() + mock_create_policy.assert_not_called() + + # No INFO or ERROR logs should have been written + mock_logger.info.assert_not_called() + mock_logger.error.assert_not_called() + + @patch("application.licenses.management.commands.initial_license_load.import_licenses") + @patch("application.licenses.management.commands.initial_license_load.import_scancode_licensedb") + @patch("application.licenses.management.commands.initial_license_load.create_scancode_standard_policy") + @patch("application.licenses.management.commands.initial_license_load.License_Group.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.License.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.License_Policy.objects.exists") + @patch("application.licenses.management.commands.initial_license_load.logger") + def test_handle_exception_logging( + self, + mock_logger, + mock_license_policy_exists, + mock_license_group_exists, + mock_license_exists, + mock_create_policy, + mock_import_scancode, + mock_import_licenses, + ): + """ + If `import_licenses` raises an exception, the exception message and stack + trace should be logged, and the remaining helpers must **not** be invoked. + """ + # All exist checks return False → the command will attempt imports + mock_license_exists.return_value = False + mock_license_group_exists.return_value = False + mock_license_policy_exists.return_value = False + + # Make the first import raise + mock_import_licenses.side_effect = RuntimeError("Import failed") + + call_command("initial_license_load") + + # Verify that import_licenses raised and the error was caught + mock_import_licenses.assert_called_once_with() + + # The remaining helpers should not be called + mock_import_scancode.assert_not_called() + mock_create_policy.assert_not_called() + + # Verify that the logger received the expected INFO messages + expected_calls = [call("Importing licenses, license groups and license policies ...")] + + # Note: The logger was patched, so we check that `info` was called with the + # expected strings in the correct order. + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + # Check that two error logs were written: the message and the traceback + # (traceback format is implementation‑specific, we just ensure it was called) + mock_logger.error.assert_any_call("Import failed") + # mock_logger.error.assert_any_call(traceback.format_exc()) diff --git a/backend/unittests/licenses/services/test_concluded_license.py b/backend/unittests/licenses/services/test_concluded_license.py new file mode 100644 index 000000000..b304bab4b --- /dev/null +++ b/backend/unittests/licenses/services/test_concluded_license.py @@ -0,0 +1,414 @@ +from unittest.mock import patch + +from django.core.management import call_command + +from application.access_control.models import User +from application.core.models import Product +from application.licenses.models import Concluded_License, License, License_Component +from application.licenses.services.concluded_license import ( + ConcludeLicenseApplicator, + update_concluded_license, +) +from application.licenses.types import NO_LICENSE_INFORMATION +from unittests.base_test_case import BaseTestCase + + +class TestConcludedLicense(BaseTestCase): + @classmethod + def setUpClass(cls): + call_command( + "loaddata", + [ + "unittests/fixtures/initial_license_data.json", + "unittests/fixtures/unittests_fixtures.json", + "unittests/fixtures/unittests_license_fixtures.json", + ], + ) + + super().setUpClass() + + def setUp(self): + super().setUp() + self.license_obj = License.objects.first() + + self.product_indirect = Product.objects.get(pk=2) + self.product_indirect.product_group = Product.objects.get(name="db_product_group") + self.product_indirect.save() + + self.product_direct = Product.objects.get(pk=1) + self.product_direct.product_group = Product.objects.get(name="db_product_group") + self.product_direct.save() + + self.component = License_Component( + product=self.product_direct, + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + component_purl_type="npm", + effective_spdx_license=None, + effective_license_expression="", + effective_non_spdx_license="", + ) + self.db_user = User.objects.get(username="db_admin") + + def test_apply_concluded_license_no_product_group_exact_match_spdx_license(self): + """ + Test apply_concluded_license when there's an exact match for the component + with a concluded_spdx_license. + """ + # Arrange + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_spdx_license=self.license_obj, + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertEqual(self.component.manual_concluded_spdx_license, self.license_obj) + self.assertEqual(self.component.manual_concluded_license_name, self.license_obj.spdx_id) + self.assertEqual(self.component.manual_concluded_comment, f"Set manually by {str(concluded_license.user)}") + self.assertEqual(self.component.manual_concluded_license_expression, "") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + # Clean up + concluded_license.delete() + + def test_apply_concluded_license_with_product_group_exact_match_spdx_license(self): + """ + Test apply_concluded_license when there's an exact match for the component + with a concluded_spdx_license for another product in the same product_group. + """ + # Arrange + concluded_license = Concluded_License.objects.create( + product=self.product_indirect, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_spdx_license=self.license_obj, + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertEqual(self.component.manual_concluded_spdx_license, self.license_obj) + self.assertEqual(self.component.manual_concluded_license_name, self.license_obj.spdx_id) + self.assertEqual( + self.component.manual_concluded_comment, + f"Copied from product {self.product_indirect.name}, set by {str(concluded_license.user)}", + ) + self.assertEqual(self.component.manual_concluded_license_expression, "") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + # Clean up + concluded_license.delete() + + def test_apply_concluded_license_exact_match_license_expression(self): + """ + Test apply_concluded_license when there's an exact match for the component + with a concluded_license_expression. + """ + # Arrange + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_license_expression="MIT OR Apache-2.0", + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertIsNone(self.component.manual_concluded_spdx_license) + self.assertEqual(self.component.manual_concluded_license_name, "MIT OR Apache-2.0") + self.assertEqual(self.component.manual_concluded_license_expression, "MIT OR Apache-2.0") + self.assertEqual(self.component.manual_concluded_comment, f"Set manually by {str(concluded_license.user)}") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + # Clean up + concluded_license.delete() + + def test_apply_concluded_license_exact_match_non_spdx_license(self): + """ + Test apply_concluded_license when there's an exact match for the component + with a concluded_non_spdx_license. + """ + # Arrange + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_non_spdx_license="Custom License", + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertIsNone(self.component.manual_concluded_spdx_license) + self.assertEqual(self.component.manual_concluded_license_name, "Custom License") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "Custom License") + self.assertEqual(self.component.manual_concluded_comment, f"Set manually by {str(concluded_license.user)}") + self.assertEqual(self.component.manual_concluded_license_expression, "") + + # Clean up + concluded_license.delete() + + def test_apply_concluded_license_name_no_product_group_match_different_version(self): + """ + Test apply_concluded_license when there's a match by name but not version. + """ + # Arrange + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="2.0.0", # Different version + manual_concluded_spdx_license=self.license_obj, + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertEqual(self.component.manual_concluded_spdx_license, self.license_obj) + self.assertEqual(self.component.manual_concluded_license_name, self.license_obj.spdx_id) + self.assertEqual( + self.component.manual_concluded_comment, + f"Copied from version {concluded_license.component_version}, set by {str(concluded_license.user)}", + ) + self.assertEqual(self.component.manual_concluded_license_expression, "") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + # Clean up + concluded_license.delete() + + def test_apply_concluded_license_name_with_product_group_match_different_version(self): + """ + Test apply_concluded_license when there's a match by name but not version. + """ + # Arrange + concluded_license = Concluded_License.objects.create( + product=self.product_indirect, + component_purl_type="npm", + component_name="test_component", + component_version="2.0.0", # Different version + manual_concluded_spdx_license=self.license_obj, + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertEqual(self.component.manual_concluded_spdx_license, self.license_obj) + self.assertEqual(self.component.manual_concluded_license_name, self.license_obj.spdx_id) + self.assertEqual( + self.component.manual_concluded_comment, + f"Copied from product {self.product_indirect} and version {concluded_license.component_version}, set by {str(concluded_license.user)}", + ) + self.assertEqual(self.component.manual_concluded_license_expression, "") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + # Clean up + concluded_license.delete() + + def test_apply_concluded_license_group_no_match(self): + """ + Test apply_concluded_license when there's no match at all. + """ + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert + self.assertIsNone(self.component.manual_concluded_spdx_license) + self.assertEqual(self.component.manual_concluded_license_name, NO_LICENSE_INFORMATION) + self.assertEqual(self.component.manual_concluded_comment, "") + self.assertEqual(self.component.manual_concluded_license_expression, "") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + def test_apply_concluded_license_no_change_needed(self): + """ + Test apply_concluded_license when the effective license already matches the concluded license. + """ + # Arrange + self.component.effective_spdx_license = self.license_obj + + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_spdx_license=self.license_obj, + user=self.db_user, + ) + concluded_license_applicator = ConcludeLicenseApplicator(self.product_direct) + + # Act + concluded_license_applicator.apply_concluded_license(self.component) + + # Assert - No changes should be made + self.assertIsNone(self.component.manual_concluded_spdx_license) + self.assertEqual(self.component.manual_concluded_license_name, NO_LICENSE_INFORMATION) + self.assertEqual(self.component.manual_concluded_comment, "") + self.assertEqual(self.component.manual_concluded_license_expression, "") + self.assertEqual(self.component.manual_concluded_non_spdx_license, "") + + # Clean up + concluded_license.delete() + + @patch("application.licenses.services.concluded_license.get_current_user") + def test_update_concluded_license_delete_existing(self, mock_get_current_user): + """ + Test update_concluded_license when concluded_license_name is NO_LICENSE_INFORMATION + and a concluded license exists. + """ + # Arrange + mock_get_current_user.return_value = self.db_user + + self.component.manual_concluded_license_name = NO_LICENSE_INFORMATION + + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_spdx_license=self.license_obj, + user=self.db_user, + ) + + # Act + update_concluded_license(self.component) + + # Assert + with self.assertRaises(Concluded_License.DoesNotExist): + Concluded_License.objects.get( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + ) + + @patch("application.licenses.services.concluded_license.get_current_user") + def test_update_concluded_license_no_existing_to_delete(self, mock_get_current_user): + """ + Test update_concluded_license when concluded_license_name is NO_LICENSE_INFORMATION + and no concluded license exists. + """ + # Arrange + mock_get_current_user.return_value = self.db_user + + self.component.manual_concluded_license_name = NO_LICENSE_INFORMATION + + # Act - This should not raise an exception + update_concluded_license(self.component) + + # Assert + self.assertEqual( + Concluded_License.objects.filter( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + ).count(), + 0, + ) + + @patch("application.licenses.services.concluded_license.get_current_user") + def test_update_concluded_license_create_new(self, mock_get_current_user): + """ + Test update_concluded_license when concluded_license_name is not NO_LICENSE_INFORMATION + and no concluded license exists. + """ + # Arrange + mock_get_current_user.return_value = self.db_user + + self.component.manual_concluded_license_name = "MIT" + self.component.manual_concluded_spdx_license = self.license_obj + self.component.manual_concluded_license_expression = "" + self.component.manual_concluded_non_spdx_license = "" + + # Act + update_concluded_license(self.component) + + # Assert + concluded_license = Concluded_License.objects.get( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + ) + + self.assertEqual(concluded_license.manual_concluded_spdx_license, self.license_obj) + self.assertEqual(concluded_license.manual_concluded_license_expression, "") + self.assertEqual(concluded_license.manual_concluded_non_spdx_license, "") + self.assertEqual(concluded_license.user, self.db_user) + self.assertEqual(self.component.manual_concluded_comment, f"Set manually by {str(self.db_user)}") + + # Clean up + concluded_license.delete() + + @patch("application.licenses.services.concluded_license.get_current_user") + def test_update_concluded_license_update_existing(self, mock_get_current_user): + """ + Test update_concluded_license when concluded_license_name is not NO_LICENSE_INFORMATION + and a concluded license exists. + """ + # Arrange + mock_get_current_user.return_value = self.db_user + + self.component.manual_concluded_license_name = "MIT" + self.component.manual_concluded_spdx_license = self.license_obj + self.component.manual_concluded_license_expression = "" + self.component.manual_concluded_non_spdx_license = "" + + # Create an existing concluded license with different values + concluded_license = Concluded_License.objects.create( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + manual_concluded_license_expression="Apache-2.0", # Different from component + user=self.user_admin, # Different user + ) + + # Act + update_concluded_license(self.component) + + # Assert + updated_license = Concluded_License.objects.get( + product=self.product_direct, + component_purl_type="npm", + component_name="test_component", + component_version="1.0.0", + ) + + self.assertEqual(updated_license.manual_concluded_spdx_license, self.license_obj) + self.assertEqual(updated_license.manual_concluded_license_expression, "") + self.assertEqual(updated_license.manual_concluded_non_spdx_license, "") + self.assertEqual(updated_license.user, self.db_user) # Should be updated to current user + self.assertEqual(self.component.manual_concluded_comment, f"Set manually by {str(self.db_user)}") + + # Clean up + updated_license.delete() diff --git a/backend/unittests/licenses/services/test_export_license_components.py b/backend/unittests/licenses/services/test_export_license_components.py index 557d2859d..79f7f9b74 100644 --- a/backend/unittests/licenses/services/test_export_license_components.py +++ b/backend/unittests/licenses/services/test_export_license_components.py @@ -15,7 +15,7 @@ def setUpClass(self): call_command( "loaddata", [ - "application/licenses/fixtures/initial_data.json", + "unittests/fixtures/initial_license_data.json", "unittests/fixtures/unittests_fixtures.json", "unittests/fixtures/unittests_license_fixtures.json", ], @@ -32,7 +32,7 @@ def test_export_license_components_excel(self): i = 0 for row in worksheet.rows: i += 1 - self.assertEqual(23, len(row)) + self.assertEqual(45, len(row)) self.assertEqual(2, i) actual_values = [] @@ -43,6 +43,7 @@ def test_export_license_components_excel(self): "Branch", "Branch id", "Component cpe", + "Component cyclonedx bom link", "Component dependencies", "Component name", "Component name version", @@ -50,16 +51,37 @@ def test_export_license_components_excel(self): "Component purl type", "Component version", "Created", + "Effective license expression", + "Effective license name", + "Effective multiple licenses", + "Effective non spdx license", + "Effective spdx license", + "Effective spdx license id", "Evaluation result", "Id", "Import last seen", + "Imported concluded license expression", + "Imported concluded license name", + "Imported concluded multiple licenses", + "Imported concluded non spdx license", + "Imported concluded spdx license", + "Imported concluded spdx license id", + "Imported declared license expression", + "Imported declared license name", + "Imported declared multiple licenses", + "Imported declared non spdx license", + "Imported declared spdx license", + "Imported declared spdx license id", "Last change", - "License", - "License expression", - "License id", - "License name", - "Non spdx license", + "Manual concluded comment", + "Manual concluded license expression", + "Manual concluded license name", + "Manual concluded non spdx license", + "Manual concluded spdx license", + "Manual concluded spdx license id", "Numerical evaluation result", + "Origin service", + "Origin service id", "Product", "Product id", "Upload filename", @@ -67,22 +89,44 @@ def test_export_license_components_excel(self): None, "", "", + "", "internal_component", "internal_component:1.0.0", "", "", "1.0.0", datetime(2022, 12, 15, 16, 10, 35, 513000), + "", + "No license information", + "", + "", + None, + None, "Allowed", 1, datetime(2022, 12, 15, 16, 10, 35, 513000), - datetime(2022, 12, 15, 16, 10, 35, 513000), - None, "", + "No license information", + "", + "", + None, None, + "", "internal license", + "", "internal license", + None, + None, + datetime(2022, 12, 15, 16, 10, 35, 513000), + "", + "", + "No license information", + "", + None, + None, 1, + None, + None, "db_product_internal", 1, "", diff --git a/backend/unittests/licenses/services/test_export_license_policy.py b/backend/unittests/licenses/services/test_export_license_policy.py index fe5434461..4a368148b 100644 --- a/backend/unittests/licenses/services/test_export_license_policy.py +++ b/backend/unittests/licenses/services/test_export_license_policy.py @@ -1,9 +1,12 @@ from django.core.management import call_command from application.licenses.models import License, License_Policy, License_Policy_Item -from application.licenses.services.export_license_policy import ( - export_license_policy_json, - export_license_policy_yaml, +from application.licenses.services.export_license_policy_sbom_utility import ( + export_license_policy_sbom_utility, +) +from application.licenses.services.export_license_policy_secobserve import ( + export_license_policy_secobserve_json, + export_license_policy_secobserve_yaml, ) from application.licenses.types import License_Policy_Evaluation_Result from unittests.base_test_case import BaseTestCase @@ -15,7 +18,7 @@ def setUpClass(self): call_command( "loaddata", [ - "application/licenses/fixtures/initial_data.json", + "unittests/fixtures/initial_license_data.json", "unittests/fixtures/unittests_fixtures.json", "unittests/fixtures/unittests_license_fixtures.json", ], @@ -40,11 +43,20 @@ def setUpClass(self): non_spdx_license="Non-SPDX", evaluation_result=License_Policy_Evaluation_Result.RESULT_FORBIDDEN, ).save() + License_Policy_Item( + license_policy=license_policy, + non_spdx_license="Ignored license", + evaluation_result=License_Policy_Evaluation_Result.RESULT_IGNORED, + comment="Ignored license comment", + ).save() + License_Policy_Item( + license_policy=license_policy, + non_spdx_license="Unknown license", + evaluation_result=License_Policy_Evaluation_Result.RESULT_UNKNOWN, + comment="Unknown license comment", + ).save() - self.license_policy_with_parent = License_Policy( - name="license_policy_with_parent", - parent=license_policy, - ) + self.license_policy_with_parent = License_Policy(name="license_policy_with_parent", parent=license_policy) self.license_policy_with_parent.save() License_Policy_Item( @@ -76,7 +88,7 @@ def setUpClass(self): def test_export_json(self): license_policy = License_Policy.objects.get(pk=1000) - json_data = export_license_policy_json(license_policy) + json_data = export_license_policy_secobserve_json(license_policy) json_data_expected = """{ "description": "description_1000", @@ -105,6 +117,18 @@ def test_export_json(self): "evaluation_result": "Forbidden", "from_parent": false, "non_spdx_license": "Non-SPDX" + }, + { + "comment": "Ignored license comment", + "evaluation_result": "Ignored", + "from_parent": false, + "non_spdx_license": "Ignored license" + }, + { + "comment": "Unknown license comment", + "evaluation_result": "Unknown", + "from_parent": false, + "non_spdx_license": "Unknown license" } ], "name": "public" @@ -113,7 +137,7 @@ def test_export_json(self): def test_export_yaml(self): license_policy = License_Policy.objects.get(pk=1000) - yaml_data = export_license_policy_yaml(license_policy) + yaml_data = export_license_policy_secobserve_yaml(license_policy) yaml_data_expected = """description: description_1000 ignore_component_types: @@ -133,12 +157,20 @@ def test_export_yaml(self): - evaluation_result: Forbidden from_parent: false non_spdx_license: Non-SPDX +- comment: Ignored license comment + evaluation_result: Ignored + from_parent: false + non_spdx_license: Ignored license +- comment: Unknown license comment + evaluation_result: Unknown + from_parent: false + non_spdx_license: Unknown license name: public """ self.assertEqual(yaml_data_expected, yaml_data) def test_export_json_with_parent(self): - json_data = export_license_policy_json(self.license_policy_with_parent) + json_data = export_license_policy_secobserve_json(self.license_policy_with_parent) json_data_expected = """{ "description": "", @@ -167,6 +199,18 @@ def test_export_json_with_parent(self): "from_parent": false, "non_spdx_license": "Non-SPDX" }, + { + "comment": "Ignored license comment", + "evaluation_result": "Ignored", + "from_parent": true, + "non_spdx_license": "Ignored license" + }, + { + "comment": "Unknown license comment", + "evaluation_result": "Unknown", + "from_parent": true, + "non_spdx_license": "Unknown license" + }, { "comment": "Forbidden non-SPDX license", "evaluation_result": "Forbidden", @@ -180,7 +224,7 @@ def test_export_json_with_parent(self): self.assertEqual(json_data_expected, json_data) def test_export_yaml_with_parent(self): - yaml_data = export_license_policy_yaml(self.license_policy_with_parent) + yaml_data = export_license_policy_secobserve_yaml(self.license_policy_with_parent) yaml_data_expected = """description: '' items: @@ -200,6 +244,14 @@ def test_export_yaml_with_parent(self): evaluation_result: Allowed from_parent: false non_spdx_license: Non-SPDX +- comment: Ignored license comment + evaluation_result: Ignored + from_parent: true + non_spdx_license: Ignored license +- comment: Unknown license comment + evaluation_result: Unknown + from_parent: true + non_spdx_license: Unknown license - comment: Forbidden non-SPDX license evaluation_result: Forbidden from_parent: false @@ -208,3 +260,202 @@ def test_export_yaml_with_parent(self): parent: public """ self.assertEqual(yaml_data_expected, yaml_data) + + def test_export_sbom_utility(self): + license_policy = License_Policy.objects.get(pk=1000) + json_data = export_license_policy_sbom_utility(license_policy) + + json_data_expected = """{ + "policies": [ + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BlueOak-1-0-0", + "id": "BlueOak-1.0.0", + "name": "Blue Oak Model License 1.0.0", + "osi": true, + "reference": "https://spdx.org/licenses/BlueOak-1.0.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "0BSD", + "id": "0BSD", + "name": "BSD Zero Clause License", + "osi": true, + "reference": "https://spdx.org/licenses/0BSD.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "REVIEW REQUIRED" + ], + "deprecated": false, + "family": "MIT-OR-3BSD", + "id": "", + "name": "MIT OR 3BSD", + "osi": false, + "reference": "", + "usagePolicy": "needs-review" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Non-SPDX", + "id": "", + "name": "Non-SPDX", + "osi": false, + "reference": "", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "IGNORED" + ], + "deprecated": false, + "family": "Ignored-license", + "id": "", + "name": "Ignored license", + "notes": [ + "Ignored license comment" + ], + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "UNKNOWN" + ], + "deprecated": false, + "family": "Unknown-license", + "id": "", + "name": "Unknown license", + "notes": [ + "Unknown license comment" + ], + "osi": false, + "reference": "", + "usagePolicy": "needs-review" + } + ] +}""" + self.assertEqual(json_data_expected, json_data) + + def test_export_sbom_utility_with_parent(self): + json_data = export_license_policy_sbom_utility(self.license_policy_with_parent) + + json_data_expected = """{ + "policies": [ + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BlueOak-1-0-0", + "id": "BlueOak-1.0.0", + "name": "Blue Oak Model License 1.0.0", + "osi": true, + "reference": "https://spdx.org/licenses/BlueOak-1.0.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "0BSD", + "id": "0BSD", + "name": "BSD Zero Clause License", + "notes": [ + "Permissive license" + ], + "osi": true, + "reference": "https://spdx.org/licenses/0BSD.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MIT-OR-3BSD", + "id": "", + "name": "MIT OR 3BSD", + "notes": [ + "Permissive license expression" + ], + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Non-SPDX", + "id": "", + "name": "Non-SPDX", + "notes": [ + "Permissive non-SPDX license" + ], + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "IGNORED" + ], + "deprecated": false, + "family": "Ignored-license", + "id": "", + "name": "Ignored license", + "notes": [ + "Ignored license comment" + ], + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "UNKNOWN" + ], + "deprecated": false, + "family": "Unknown-license", + "id": "", + "name": "Unknown license", + "notes": [ + "Unknown license comment" + ], + "osi": false, + "reference": "", + "usagePolicy": "needs-review" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Another-non-SPDX", + "id": "", + "name": "Another non-SPDX", + "notes": [ + "Forbidden non-SPDX license" + ], + "osi": false, + "reference": "", + "usagePolicy": "deny" + } + ] +}""" + self.assertEqual(json_data_expected, json_data) diff --git a/backend/unittests/licenses/services/test_license_component.py b/backend/unittests/licenses/services/test_license_component.py new file mode 100644 index 000000000..f29cfc415 --- /dev/null +++ b/backend/unittests/licenses/services/test_license_component.py @@ -0,0 +1,391 @@ +from unittest.mock import ANY, MagicMock, patch + +from django.core.management import call_command +from django.test import TestCase +from rest_framework.exceptions import ValidationError + +from application.core.models import Product +from application.licenses.models import License, License_Component +from application.licenses.services.license_component import ( + save_concluded_license, + set_effective_license, +) +from application.licenses.types import NO_LICENSE_INFORMATION +from unittests.base_test_case import BaseTestCase + + +class TestLicenseComponent(BaseTestCase): + @classmethod + def setUpClass(cls): + call_command( + "loaddata", + [ + "unittests/fixtures/initial_license_data.json", + "unittests/fixtures/unittests_fixtures.json", + "unittests/fixtures/unittests_license_fixtures.json", + ], + ) + + super().setUpClass() + + def test_set_effective_license_with_concluded_license(self): + """ + Test that when concluded_license_name is not NO_LICENSE_INFORMATION, + the effective license is set from the concluded license. + """ + # Arrange + license_obj = License.objects.first() + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + # Set concluded license fields + manual_concluded_license_name="MIT", + manual_concluded_spdx_license=license_obj, + manual_concluded_license_expression="MIT expression", + manual_concluded_non_spdx_license="MIT non-spdx", + # Set imported concluded license fields (should be ignored) + imported_concluded_license_name="Apache-2.0", + imported_concluded_spdx_license=None, + imported_concluded_license_expression="Apache-2.0 expression", + imported_concluded_non_spdx_license="Apache-2.0 non-spdx", + imported_concluded_multiple_licenses="Apache-2.0, GPL-2.0", + # Set imported declared license fields (should be ignored) + imported_declared_license_name="GPL-3.0", + imported_declared_spdx_license=None, + imported_declared_license_expression="GPL-3.0 expression", + imported_declared_non_spdx_license="GPL-3.0 non-spdx", + imported_declared_multiple_licenses="GPL-3.0, BSD-3-Clause", + ) + + # Act + set_effective_license(component) + + # Assert + self.assertEqual(component.effective_license_name, "MIT") + self.assertEqual(component.effective_spdx_license, license_obj) + self.assertEqual(component.effective_license_expression, "MIT expression") + self.assertEqual(component.effective_non_spdx_license, "MIT non-spdx") + self.assertEqual(component.effective_multiple_licenses, "") + + def test_set_effective_license_with_imported_concluded_license(self): + """ + Test that when concluded_license_name is NO_LICENSE_INFORMATION but + imported_concluded_license_name is not, the effective license is set from + the imported concluded license. + """ + # Arrange + license_obj = License.objects.first() + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + # Set concluded license fields to NO_LICENSE_INFORMATION + manual_concluded_license_name=NO_LICENSE_INFORMATION, + manual_concluded_spdx_license=None, + manual_concluded_license_expression="", + manual_concluded_non_spdx_license="", + # Set imported concluded license fields + imported_concluded_license_name="Apache-2.0", + imported_concluded_spdx_license=license_obj, + imported_concluded_license_expression="Apache-2.0 expression", + imported_concluded_non_spdx_license="Apache-2.0 non-spdx", + imported_concluded_multiple_licenses="Apache-2.0, GPL-2.0", + # Set imported declared license fields (should be ignored) + imported_declared_license_name="GPL-3.0", + imported_declared_spdx_license=None, + imported_declared_license_expression="GPL-3.0 expression", + imported_declared_non_spdx_license="GPL-3.0 non-spdx", + imported_declared_multiple_licenses="GPL-3.0, BSD-3-Clause", + ) + + # Act + set_effective_license(component) + + # Assert + self.assertEqual(component.effective_license_name, "Apache-2.0") + self.assertEqual(component.effective_spdx_license, license_obj) + self.assertEqual(component.effective_license_expression, "Apache-2.0 expression") + self.assertEqual(component.effective_non_spdx_license, "Apache-2.0 non-spdx") + self.assertEqual(component.effective_multiple_licenses, "Apache-2.0, GPL-2.0") + + def test_set_effective_license_with_imported_declared_license(self): + """ + Test that when both concluded_license_name and imported_concluded_license_name + are NO_LICENSE_INFORMATION but imported_declared_license_name is not, + the effective license is set from the imported declared license. + """ + # Arrange + license_obj = License.objects.first() + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + # Set concluded license fields to NO_LICENSE_INFORMATION + manual_concluded_license_name=NO_LICENSE_INFORMATION, + manual_concluded_spdx_license=None, + manual_concluded_license_expression="", + manual_concluded_non_spdx_license="", + # Set imported concluded license fields to NO_LICENSE_INFORMATION + imported_concluded_license_name=NO_LICENSE_INFORMATION, + imported_concluded_spdx_license=None, + imported_concluded_license_expression="", + imported_concluded_non_spdx_license="", + imported_concluded_multiple_licenses="", + # Set imported declared license fields + imported_declared_license_name="GPL-3.0", + imported_declared_spdx_license=license_obj, + imported_declared_license_expression="GPL-3.0 expression", + imported_declared_non_spdx_license="GPL-3.0 non-spdx", + imported_declared_multiple_licenses="GPL-3.0, BSD-3-Clause", + ) + + # Act + set_effective_license(component) + + # Assert + self.assertEqual(component.effective_license_name, "GPL-3.0") + self.assertEqual(component.effective_spdx_license, license_obj) + self.assertEqual(component.effective_license_expression, "GPL-3.0 expression") + self.assertEqual(component.effective_non_spdx_license, "GPL-3.0 non-spdx") + self.assertEqual(component.effective_multiple_licenses, "GPL-3.0, BSD-3-Clause") + + def test_set_effective_license_with_no_license_information(self): + """ + Test that when all license names are NO_LICENSE_INFORMATION, + the effective license remains NO_LICENSE_INFORMATION. + """ + # Arrange + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + # Set all license fields to NO_LICENSE_INFORMATION + manual_concluded_license_name=NO_LICENSE_INFORMATION, + manual_concluded_spdx_license=None, + manual_concluded_license_expression="", + manual_concluded_non_spdx_license="", + imported_concluded_license_name=NO_LICENSE_INFORMATION, + imported_concluded_spdx_license=None, + imported_concluded_license_expression="", + imported_concluded_non_spdx_license="", + imported_concluded_multiple_licenses="", + imported_declared_license_name=NO_LICENSE_INFORMATION, + imported_declared_spdx_license=None, + imported_declared_license_expression="", + imported_declared_non_spdx_license="", + imported_declared_multiple_licenses="", + ) + + # Act + set_effective_license(component) + + # Assert + self.assertEqual(component.effective_license_name, NO_LICENSE_INFORMATION) + self.assertIsNone(component.effective_spdx_license) + self.assertEqual(component.effective_license_expression, "") + self.assertEqual(component.effective_non_spdx_license, "") + self.assertEqual(component.effective_multiple_licenses, "") + + @patch("application.licenses.services.license_component.update_concluded_license") + @patch("application.licenses.services.license_component.get_license_policy") + def test_save_concluded_license_with_spdx_license(self, mock_get_license_policy, mock_update_concluded_license): + """ + Test save_concluded_license when a concluded_spdx_license is provided. + """ + # Arrange + mock_get_license_policy.return_value = None # No license policy + license_obj = License.objects.first() + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + manual_concluded_spdx_license=license_obj, + ) + + # Act + with patch.object(component, "save") as mock_save: + save_concluded_license(component) + + # Assert + self.assertEqual(component.manual_concluded_license_name, license_obj.spdx_id) + mock_update_concluded_license.assert_called_once_with(component) + mock_get_license_policy.assert_called_once_with(component.product) + mock_save.assert_called_once() + + @patch("application.licenses.services.license_component.update_concluded_license") + @patch("application.licenses.services.license_component.get_license_policy") + @patch("application.licenses.services.license_component.get_spdx_licensing") + def test_save_concluded_license_with_valid_license_expression( + self, mock_get_spdx_licensing, mock_get_license_policy, mock_update_concluded_license + ): + """ + Test save_concluded_license when a valid concluded_license_expression is provided. + """ + # Arrange + mock_get_license_policy.return_value = None # No license policy + + # Mock the licensing validation + mock_licensing = MagicMock() + mock_expression_info = MagicMock() + mock_expression_info.errors = [] + mock_expression_info.normalized_expression = "MIT OR Apache-2.0" + mock_licensing.validate.return_value = mock_expression_info + mock_get_spdx_licensing.return_value = mock_licensing + + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + manual_concluded_license_expression="MIT OR Apache-2.0", + ) + + # Act + with patch.object(component, "save") as mock_save: + save_concluded_license(component) + + # Assert + self.assertEqual(component.manual_concluded_license_name, "MIT OR Apache-2.0") + mock_licensing.validate.assert_called_once_with("MIT OR Apache-2.0", strict=True) + mock_update_concluded_license.assert_called_once_with(component) + mock_get_license_policy.assert_called_once_with(component.product) + mock_save.assert_called_once() + + @patch("application.licenses.services.license_component.update_concluded_license") + @patch("application.licenses.services.license_component.get_license_policy") + @patch("application.licenses.services.license_component.get_spdx_licensing") + def test_save_concluded_license_with_invalid_license_expression( + self, mock_get_spdx_licensing, mock_get_license_policy, mock_update_concluded_license + ): + """ + Test save_concluded_license when an invalid concluded_license_expression is provided. + """ + # Arrange + # Mock the licensing validation to return errors + mock_licensing = MagicMock() + mock_expression_info = MagicMock() + mock_expression_info.errors = ["Invalid license expression"] + mock_licensing.validate.return_value = mock_expression_info + mock_get_spdx_licensing.return_value = mock_licensing + + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + manual_concluded_license_expression="INVALID-LICENSE", + ) + + # Act & Assert + with self.assertRaises(ValidationError): + save_concluded_license(component) + + # Verify the validation was called + mock_licensing.validate.assert_called_once_with("INVALID-LICENSE", strict=True) + # These should not be called due to the validation error + mock_update_concluded_license.assert_not_called() + mock_get_license_policy.assert_not_called() + + @patch("application.licenses.services.license_component.update_concluded_license") + @patch("application.licenses.services.license_component.get_license_policy") + def test_save_concluded_license_with_non_spdx_license(self, mock_get_license_policy, mock_update_concluded_license): + """ + Test save_concluded_license when a concluded_non_spdx_license is provided. + """ + # Arrange + mock_get_license_policy.return_value = None # No license policy + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + manual_concluded_non_spdx_license="Custom License", + ) + + # Act + with patch.object(component, "save") as mock_save: + save_concluded_license(component) + + # Assert + self.assertEqual(component.manual_concluded_license_name, "Custom License") + mock_update_concluded_license.assert_called_once_with(component) + mock_get_license_policy.assert_called_once_with(component.product) + mock_save.assert_called_once() + + @patch("application.licenses.services.license_component.update_concluded_license") + @patch("application.licenses.services.license_component.get_license_policy") + def test_save_concluded_license_with_no_license_info(self, mock_get_license_policy, mock_update_concluded_license): + """ + Test save_concluded_license when no license information is provided. + """ + # Arrange + mock_get_license_policy.return_value = None # No license policy + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + ) + + # Act + with patch.object(component, "save") as mock_save: + save_concluded_license(component) + + # Assert + self.assertEqual(component.manual_concluded_license_name, NO_LICENSE_INFORMATION) + mock_update_concluded_license.assert_called_once_with(component) + mock_get_license_policy.assert_called_once_with(component.product) + mock_save.assert_called_once() + + @patch("application.licenses.services.license_component.update_concluded_license") + @patch("application.licenses.services.license_component.get_license_policy") + @patch("application.licenses.services.license_component.get_license_evaluation_results_for_product") + @patch("application.licenses.services.license_component.apply_license_policy_to_component") + @patch("application.licenses.services.license_component.get_comma_separated_as_list") + def test_save_concluded_license_with_license_policy( + self, + mock_get_comma_separated, + mock_apply_policy, + mock_get_results, + mock_get_license_policy, + mock_update_concluded_license, + ): + """ + Test save_concluded_license when a license policy exists for the product. + """ + # Arrange + mock_license_policy = MagicMock() + mock_license_policy.ignore_component_types = "type1,type2" + mock_get_license_policy.return_value = mock_license_policy + + mock_evaluation_results = {"result1": "value1"} + mock_get_results.return_value = mock_evaluation_results + + mock_get_comma_separated.return_value = ["type1", "type2"] + + component = License_Component( + product=Product.objects.get(pk=1), + component_name="test_component", + component_version="1.0.0", + component_name_version="test_component:1.0.0", + manual_concluded_non_spdx_license="Custom License", + ) + + # Act + with patch.object(component, "save") as mock_save: + save_concluded_license(component) + + # Assert + self.assertEqual(component.manual_concluded_license_name, "Custom License") + mock_update_concluded_license.assert_called_once_with(component) + mock_get_license_policy.assert_called_once_with(component.product) + mock_get_results.assert_called_once_with(component.product) + mock_get_comma_separated.assert_called_once_with(mock_license_policy.ignore_component_types) + mock_apply_policy.assert_called_once_with(component, mock_evaluation_results, ["type1", "type2"], ANY) + mock_save.assert_called_once() diff --git a/backend/unittests/licenses/services/test_license_policy.py b/backend/unittests/licenses/services/test_license_policy.py new file mode 100644 index 000000000..5a9344950 --- /dev/null +++ b/backend/unittests/licenses/services/test_license_policy.py @@ -0,0 +1,107 @@ +# backend/unittests/licenses/services/test_license_policy.py +from django.test import TestCase + +from application.licenses.models import ( + License_Group, + License_Policy, + License_Policy_Item, +) +from application.licenses.services.license_policy import create_scancode_standard_policy +from application.licenses.types import License_Policy_Evaluation_Result + + +class TestCreateScancodeStandardPolicy(TestCase): + def setUp(self): + # Clear any existing license policies + License_Policy.objects.exclude(parent__isnull=True).delete() + License_Policy.objects.all().delete() + License_Group.objects.all().delete() + + def test_create_scancode_standard_policy_creates_policy_when_none_exists(self): + # Arrange + # Ensure no "Standard" policy exists + License_Policy.objects.filter(name="Standard").delete() + + # Create some license groups with ScanCode LicenseDB in their name + License_Group.objects.create(name="Permissive (ScanCode LicenseDB)") + License_Group.objects.create(name="Copyleft (ScanCode LicenseDB)") + License_Group.objects.create(name="Public Domain (ScanCode LicenseDB)") + License_Group.objects.create(name="Other Group (ScanCode LicenseDB)") + License_Group.objects.create(name="Regular Group") + + # Act + create_scancode_standard_policy() + + # Assert + policy = License_Policy.objects.get(name="Standard") + self.assertIsNotNone(policy) + self.assertTrue(policy.is_public) + self.assertEqual(policy.description, "Created automatically during initial startup") + + # Check that policy items were created for the ScanCode groups + items = License_Policy_Item.objects.filter(license_policy=policy) + self.assertEqual(items.count(), 4) # Should have items for 4 groups with ScanCode LicenseDB + + # Check evaluation results based on group names + for item in items: + if item.license_group.name.startswith("Permissive") or item.license_group.name.startswith("Public Domain"): + self.assertEqual(item.evaluation_result, License_Policy_Evaluation_Result.RESULT_ALLOWED) + elif item.license_group.name.startswith("Copyleft"): + self.assertEqual(item.evaluation_result, License_Policy_Evaluation_Result.RESULT_FORBIDDEN) + else: + # Other groups should default to REVIEW_REQUIRED + self.assertEqual(item.evaluation_result, License_Policy_Evaluation_Result.RESULT_REVIEW_REQUIRED) + + def test_create_scancode_standard_policy_no_groups(self): + # Arrange + # Ensure no "Standard" policy exists + License_Policy.objects.filter(name="Standard").delete() + + # Create a regular group without ScanCode LicenseDB + License_Group.objects.create(name="Regular Group") + + # Act + create_scancode_standard_policy() + + # Assert + policy = License_Policy.objects.filter(name="Standard").first() + self.assertIsNone(policy) # Should not create policy when no ScanCode groups exist + + def test_create_scancode_standard_policy_already_exists(self): + # Arrange + # Create a "Standard" policy already + policy = License_Policy.objects.create( + name="Standard", description="Created automatically during initial startup", is_public=True + ) + + # Create some license groups + group1 = License_Group.objects.create(name="Permissive (ScanCode LicenseDB)") + group2 = License_Group.objects.create(name="Copyleft (ScanCode LicenseDB)") + + # Act + create_scancode_standard_policy() + + # Assert + # Should not create a new policy + policies = License_Policy.objects.filter(name="Standard") + self.assertEqual(policies.count(), 1) + + # Should not create new items + items = License_Policy_Item.objects.filter(license_policy=policy) + self.assertEqual(items.count(), 0) # No new items should be created + + def test_create_scancode_standard_policy_no_scan_code_groups(self): + # Arrange + # Ensure no "Standard" policy exists + License_Policy.objects.filter(name="Standard").delete() + + # Create groups without ScanCode LicenseDB + License_Group.objects.create(name="Regular Group 1") + License_Group.objects.create(name="Another Group") + + # Act + create_scancode_standard_policy() + + # Assert + policy = License_Policy.objects.filter(name="Standard").first() + self.assertIsNone(policy) # Should not create policy when no ScanCode groups exist diff --git a/backend/unittests/licenses/services/test_licenselynx.py b/backend/unittests/licenses/services/test_licenselynx.py new file mode 100644 index 000000000..f2c0f9926 --- /dev/null +++ b/backend/unittests/licenses/services/test_licenselynx.py @@ -0,0 +1,313 @@ +import unittest +from unittest.mock import patch + +from licenselynx.licenselynx import LicenseObject + +from application.licenses.models import License, License_Component +from application.licenses.services.licenselynx import ( + _get_mapped_licence, + _get_mapped_licence_string, + apply_licenselynx, +) +from application.licenses.services.spdx_license_cache import SPDXLicenseCache + + +class TestLicenseLynx(unittest.TestCase): + + @patch("application.licenses.services.licenselynx.LicenseLynx") + def test_get_mapped_licence_string_success(self, mock_licenselynx): + # Setup + mock_license_object = LicenseObject(id="MIT", src="SPDX") + mock_licenselynx.map.return_value = mock_license_object + + # Execute + result = _get_mapped_licence_string("MIT License") + + # Verify + self.assertEqual(result, "MIT") + mock_licenselynx.map.assert_called_once_with("MIT License") + + @patch("application.licenses.services.licenselynx.LicenseLynx") + def test_get_mapped_licence_string_none(self, mock_licenselynx): + # Setup + mock_licenselynx.map.return_value = None + + # Execute + result = _get_mapped_licence_string("Non-existent License") + + # Verify + self.assertIsNone(result) + mock_licenselynx.map.assert_called_once_with("Non-existent License") + + @patch("application.licenses.services.licenselynx.SPDXLicenseCache") + @patch("application.licenses.services.licenselynx.LicenseLynx") + def test_get_mapped_licence_success(self, mock_licenselynx, mock_spdx_cache): + # Setup + mock_license_object = LicenseObject(id="MIT", src="SPDX") + mock_licenselynx.map.return_value = mock_license_object + + mock_spdx_license = License(spdx_id="MIT") + mock_spdx_cache.get.return_value = mock_spdx_license + + # Execute + result = _get_mapped_licence("MIT License", mock_spdx_cache) + + # Verify + self.assertEqual(result, mock_spdx_license) + mock_licenselynx.map.assert_called_once_with("MIT License") + mock_spdx_cache.get.assert_called_once_with("MIT") + + @patch("application.licenses.services.licenselynx.SPDXLicenseCache") + @patch("application.licenses.services.licenselynx.LicenseLynx") + def test_get_mapped_licence_none(self, mock_licenselynx, mock_spdx_cache): + # Setup + mock_licenselynx.map.return_value = None + mock_spdx_cache.get.return_value = None + + # Execute + result = _get_mapped_licence("Non-existent License", mock_spdx_cache) + + # Verify + self.assertIsNone(result) + mock_licenselynx.map.assert_called_once_with("Non-existent License") + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_manual_concluded_license_no_change( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = "MIT" + component.manual_concluded_comment = "Some comment" + + # Execute + apply_licenselynx(component, None) + + # Verify + mock_get_licence_string.assert_not_called() + mock_get_licence.assert_not_called() + mock_set_effective.assert_not_called() + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_declared_non_spdx_license( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = "MIT License" + component.imported_declared_multiple_licenses = None + component.imported_concluded_non_spdx_license = None + component.imported_concluded_multiple_licenses = None + + # Mock the return value for the _get_mapped_licence function + mock_mapped_license = License(spdx_id="MIT") + mock_get_licence.return_value = mock_mapped_license + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertEqual(component.manual_concluded_spdx_license, mock_mapped_license) + self.assertEqual(component.manual_concluded_comment, "Set by LicenseLynx") + self.assertEqual(component.manual_concluded_license_name, "MIT") + mock_get_licence.assert_called_once_with("MIT License", None) + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_declared_non_spdx_license_not_found( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = "MIT License" + component.imported_declared_multiple_licenses = None + component.imported_concluded_non_spdx_license = None + component.imported_concluded_multiple_licenses = None + + # Mock the return value for the _get_mapped_licence function + mock_get_licence.return_value = None + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertIsNone(component.manual_concluded_spdx_license) + self.assertIsNone(component.manual_concluded_comment) + self.assertIsNone(component.manual_concluded_license_name) + mock_get_licence.assert_called_once_with("MIT License", None) + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_declared_multiple_licenses( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = None + component.imported_declared_multiple_licenses = "MIT, Apache-2.0" + component.imported_concluded_non_spdx_license = None + component.imported_concluded_multiple_licenses = None + + # Mock the return value for the _get_mapped_licence_string function + mock_get_licence_string.return_value = "0BSD" + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertEqual(component.imported_declared_multiple_licenses, "0BSD, 0BSD") + self.assertEqual(component.imported_declared_license_name, "0BSD, 0BSD") + mock_get_licence_string.assert_called() + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_declared_multiple_licenses_not_found( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = None + component.imported_declared_multiple_licenses = "MIT, Apache-2.0" + component.imported_concluded_non_spdx_license = None + component.imported_concluded_multiple_licenses = None + + # Mock the return value for the _get_mapped_licence_string function + mock_get_licence_string.return_value = None + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertEqual(component.imported_declared_multiple_licenses, "MIT, Apache-2.0") + self.assertEqual(component.imported_declared_license_name, "MIT, Apache-2.0") + mock_get_licence_string.assert_called() + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_concluded_non_spdx_license( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = None + component.imported_declared_multiple_licenses = None + component.imported_concluded_non_spdx_license = "MIT License" + component.imported_concluded_multiple_licenses = None + + # Mock the return value for the _get_mapped_licence function + mock_mapped_license = License(spdx_id="MIT") + mock_get_licence.return_value = mock_mapped_license + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertEqual(component.manual_concluded_spdx_license, mock_mapped_license) + self.assertEqual(component.manual_concluded_comment, "Set by LicenseLynx") + self.assertEqual(component.manual_concluded_license_name, "MIT") + mock_get_licence.assert_called_once_with("MIT License", None) + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_concluded_non_spdx_license_not_found( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = None + component.imported_declared_multiple_licenses = None + component.imported_concluded_non_spdx_license = "MIT License" + component.imported_concluded_multiple_licenses = None + + # Mock the return value for the _get_mapped_licence function + mock_get_licence.return_value = None + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertIsNone(component.manual_concluded_spdx_license) + self.assertIsNone(component.manual_concluded_comment) + self.assertIsNone(component.manual_concluded_license_name) + mock_get_licence.assert_called_once_with("MIT License", None) + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_concluded_multiple_licenses( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = None + component.imported_declared_multiple_licenses = None + component.imported_concluded_non_spdx_license = None + component.imported_concluded_multiple_licenses = "MIT, Apache-2.0" + + # Mock the return value for the _get_mapped_licence_string function + mock_get_licence_string.return_value = "0BSD" + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertEqual(component.imported_concluded_multiple_licenses, "0BSD, 0BSD") + self.assertEqual(component.imported_concluded_license_name, "0BSD, 0BSD") + mock_get_licence_string.assert_called() + mock_set_effective.assert_called_once_with(component) + + @patch("application.licenses.services.licenselynx.set_effective_license") + @patch("application.licenses.services.licenselynx._get_mapped_licence") + @patch("application.licenses.services.licenselynx._get_mapped_licence_string") + def test_apply_licenselynx_imported_concluded_multiple_licenses_not_found( + self, mock_get_licence_string, mock_get_licence, mock_set_effective + ): + # Setup - using actual License_Component class + component = License_Component() + component.manual_concluded_license_name = None + component.manual_concluded_comment = None + component.imported_declared_non_spdx_license = None + component.imported_declared_multiple_licenses = None + component.imported_concluded_non_spdx_license = None + component.imported_concluded_multiple_licenses = "MIT, Apache-2.0" + + # Mock the return value for the _get_mapped_licence_string function + mock_get_licence_string.return_value = None + + # Execute + apply_licenselynx(component, None) + + # Verify + self.assertEqual(component.imported_concluded_multiple_licenses, "MIT, Apache-2.0") + self.assertEqual(component.imported_concluded_license_name, "MIT, Apache-2.0") + mock_get_licence_string.assert_called() + mock_set_effective.assert_called_once_with(component) diff --git a/backend/unittests/licenses/services/test_spdx_license_cache.py b/backend/unittests/licenses/services/test_spdx_license_cache.py new file mode 100644 index 000000000..432926ac6 --- /dev/null +++ b/backend/unittests/licenses/services/test_spdx_license_cache.py @@ -0,0 +1,118 @@ +import unittest +from unittest.mock import MagicMock, patch + +from application.licenses.models import License +from application.licenses.services.spdx_license_cache import SPDXLicenseCache + + +class TestSPDXLicenseCache(unittest.TestCase): + + def setUp(self): + self.cache = SPDXLicenseCache() + + def test_get_invalid_spdx_id_returns_none(self): + """Test that invalid SPDX IDs return None""" + # Test with invalid characters + invalid_ids = ["", "invalid/spdx/id", "invalid id", "invalid@id"] + + for spdx_id in invalid_ids: + with self.subTest(spdx_id=spdx_id): + result = self.cache.get(spdx_id) + self.assertIsNone(result) + + def test_get_valid_spdx_id_cache_hit_returns_license(self): + """Test that valid SPDX IDs in cache return the cached license""" + # Set up a mock license + mock_license = MagicMock(spec=License) + mock_license.spdx_id = "MIT" + + # Add to cache + self.cache.cache["MIT"] = mock_license + + # Test cache hit + result = self.cache.get("MIT") + self.assertEqual(result, mock_license) + + def test_get_valid_spdx_id_cache_hit_returns_none_for_no_entry(self): + """Test that valid SPDX IDs with NO_ENTRY in cache return None""" + # Add NO_ENTRY to cache + self.cache.cache["MIT"] = SPDXLicenseCache.NO_ENTRY + + # Test cache hit with NO_ENTRY + result = self.cache.get("MIT") + self.assertIsNone(result) + + @patch("application.licenses.services.spdx_license_cache.get_license_by_spdx_id") + def test_get_valid_spdx_id_cache_miss_license_found(self, mock_get_license): + """Test that valid SPDX IDs not in cache but found in database return the license""" + # Set up mock license + mock_license = MagicMock(spec=License) + mock_license.spdx_id = "MIT" + mock_get_license.return_value = mock_license + + # Test cache miss with license found + result = self.cache.get("MIT") + self.assertEqual(result, mock_license) + # Verify license was added to cache + self.assertEqual(self.cache.cache["MIT"], mock_license) + + @patch("application.licenses.services.spdx_license_cache.get_license_by_spdx_id") + def test_get_valid_spdx_id_cache_miss_license_not_found(self, mock_get_license): + """Test that valid SPDX IDs not in cache and not found in database return None""" + # Set up mock to return None + mock_get_license.return_value = None + + # Test cache miss with license not found + result = self.cache.get("MIT") + self.assertIsNone(result) + # Verify NO_ENTRY was added to cache + self.assertEqual(self.cache.cache["MIT"], SPDXLicenseCache.NO_ENTRY) + + def test_get_valid_spdx_id_cache_hit_with_string_returns_none(self): + """Test that valid SPDX IDs with string value in cache return None""" + # Add a string value (not License) to cache + self.cache.cache["MIT"] = "some_string_value" + + # Test cache hit with string value + result = self.cache.get("MIT") + self.assertIsNone(result) + + def test_get_valid_spdx_id_cache_hit_with_license_returns_license(self): + """Test that valid SPDX IDs with License value in cache return the license""" + # Set up a mock license + mock_license = MagicMock(spec=License) + mock_license.spdx_id = "MIT" + + # Add to cache + self.cache.cache["MIT"] = mock_license + + # Test cache hit with License value + result = self.cache.get("MIT") + self.assertEqual(result, mock_license) + + def test_get_valid_spdx_id_various_formats(self): + """Test that various valid SPDX ID formats work correctly""" + valid_ids = ["MIT", "Apache-2.0", "BSD-2-Clause", "GPL-3.0", "ISC", "LGPL-3.0"] + + # Mock get_license_by_spdx_id to return None for all + with patch("application.licenses.services.spdx_license_cache.get_license_by_spdx_id") as mock_get_license: + mock_get_license.return_value = None + + for spdx_id in valid_ids: + with self.subTest(spdx_id=spdx_id): + result = self.cache.get(spdx_id) + self.assertIsNone(result) + # Verify NO_ENTRY was added to cache + self.assertEqual(self.cache.cache[spdx_id], SPDXLicenseCache.NO_ENTRY) + + def test_cache_is_empty_initially(self): + """Test that cache is empty initially""" + self.assertEqual(len(self.cache.cache), 0) + + def test_cache_stores_no_entry_correctly(self): + """Test that NO_ENTRY is stored correctly in cache""" + # Add NO_ENTRY to cache directly + self.cache.cache["TEST"] = SPDXLicenseCache.NO_ENTRY + + # Verify it's stored correctly + self.assertEqual(self.cache.cache["TEST"], SPDXLicenseCache.NO_ENTRY) diff --git a/backend/unittests/metrics/services/test_metrics.py b/backend/unittests/metrics/services/test_metrics.py new file mode 100644 index 000000000..2ad0b6fa1 --- /dev/null +++ b/backend/unittests/metrics/services/test_metrics.py @@ -0,0 +1,790 @@ +from datetime import date, datetime, timedelta +from unittest.mock import call, patch + +from django.utils import timezone + +from application.core.types import Severity, Status +from application.metrics.services.metrics import ( + _initialize_response_data, + calculate_metrics_for_product, + calculate_product_metrics, + get_codecharta_metrics, + get_product_metrics_current, + get_product_metrics_timeline, +) +from unittests.base_test_case import BaseTestCase + + +class TestInitializeResponseData(BaseTestCase): + def test_initialize_response_data(self): + result = _initialize_response_data() + + expected = { + "active_critical": 0, + "active_high": 0, + "active_medium": 0, + "active_low": 0, + "active_none": 0, + "active_unknown": 0, + "open": 0, + "affected": 0, + "resolved": 0, + "duplicate": 0, + "false_positive": 0, + "in_review": 0, + "not_affected": 0, + "not_security": 0, + "risk_accepted": 0, + } + self.assertEqual(result, expected) + + +class TestCalculateProductMetrics(BaseTestCase): + @patch("application.metrics.services.metrics.Product_Metrics_Status.load") + @patch("application.metrics.services.metrics.calculate_metrics_for_product") + @patch("application.metrics.services.metrics.Product.objects") + def test_calculate_product_metrics_no_products(self, mock_product_objects, mock_calc, mock_status_load): + mock_product_objects.filter.return_value = [] + + status = ProductMetricsStatusStub() + mock_status_load.return_value = status + + result = calculate_product_metrics() + + self.assertEqual(result, "Calculated metrics for 0 products.") + mock_product_objects.filter.assert_called_once_with(is_product_group=False) + mock_calc.assert_not_called() + mock_status_load.assert_called_once() + + @patch("application.metrics.services.metrics.timezone") + @patch("application.metrics.services.metrics.Product_Metrics_Status.load") + @patch("application.metrics.services.metrics.calculate_metrics_for_product") + @patch("application.metrics.services.metrics.Product.objects") + def test_calculate_product_metrics_one_product( + self, mock_product_objects, mock_calc, mock_status_load, mock_timezone + ): + mock_product_objects.filter.return_value = [self.product_1] + mock_calc.return_value = True + + now = datetime(2025, 6, 15, 12, 0, 0) + mock_timezone.now.return_value = now + + status = ProductMetricsStatusStub() + mock_status_load.return_value = status + + result = calculate_product_metrics() + + self.assertEqual(result, "Calculated metrics for 1 product.") + mock_calc.assert_called_once_with(self.product_1) + self.assertEqual(status.last_calculated, now) + + @patch("application.metrics.services.metrics.timezone") + @patch("application.metrics.services.metrics.Product_Metrics_Status.load") + @patch("application.metrics.services.metrics.calculate_metrics_for_product") + @patch("application.metrics.services.metrics.Product.objects") + def test_calculate_product_metrics_multiple_products( + self, mock_product_objects, mock_calc, mock_status_load, mock_timezone + ): + product_2 = type(self.product_1) + product_2.name = "product_2" + mock_product_objects.filter.return_value = [ + self.product_1, + product_2, + ] + mock_calc.return_value = True + + now = datetime(2025, 6, 15, 12, 0, 0) + mock_timezone.now.return_value = now + + status = ProductMetricsStatusStub() + mock_status_load.return_value = status + + result = calculate_product_metrics() + + self.assertEqual(result, "Calculated metrics for 2 products.") + self.assertEqual(mock_calc.call_count, 2) + + @patch("application.metrics.services.metrics.timezone") + @patch("application.metrics.services.metrics.Product_Metrics_Status.load") + @patch("application.metrics.services.metrics.calculate_metrics_for_product") + @patch("application.metrics.services.metrics.Product.objects") + def test_calculate_product_metrics_some_without_changes( + self, mock_product_objects, mock_calc, mock_status_load, mock_timezone + ): + product_2 = type(self.product_1) + product_2.name = "product_2" + mock_product_objects.filter.return_value = [ + self.product_1, + product_2, + ] + mock_calc.side_effect = [True, False] + + now = datetime(2025, 6, 15, 12, 0, 0) + mock_timezone.now.return_value = now + + status = ProductMetricsStatusStub() + mock_status_load.return_value = status + + result = calculate_product_metrics() + + self.assertEqual(result, "Calculated metrics for 1 product.") + + +class TestCalculateMetricsForProduct(BaseTestCase): + @patch("application.metrics.services.metrics.Observation.objects") + @patch("application.metrics.services.metrics.Product_Metrics.objects") + @patch("application.metrics.services.metrics._get_latest_product_metrics") + @patch("application.metrics.services.metrics.timezone") + def test_no_previous_metrics_no_observations( + self, mock_timezone, mock_get_latest, mock_pm_objects, mock_obs_objects + ): + today = date(2025, 6, 15) + mock_timezone.localdate.return_value = today + self.product_1.last_observation_change = datetime(2025, 6, 15, 10, 0, 0) + + mock_get_latest.return_value = None + + todays_metrics = ProductMetricsStub() + mock_pm_objects.update_or_create.return_value = (todays_metrics, True) + mock_obs_objects.filter.return_value.values.return_value = [] + + result = calculate_metrics_for_product(self.product_1) + + self.assertTrue(result) + mock_pm_objects.update_or_create.assert_called_once() + todays_metrics.assert_save_called(self) + + @patch("application.metrics.services.metrics.Observation.objects") + @patch("application.metrics.services.metrics.Product_Metrics.objects") + @patch("application.metrics.services.metrics._get_latest_product_metrics") + @patch("application.metrics.services.metrics.timezone") + def test_observations_today_with_all_severities( + self, mock_timezone, mock_get_latest, mock_pm_objects, mock_obs_objects + ): + today = date(2025, 6, 15) + mock_timezone.localdate.return_value = today + self.product_1.last_observation_change = datetime(2025, 6, 15, 10, 0, 0) + + mock_get_latest.return_value = None + + todays_metrics = ProductMetricsStub() + mock_pm_objects.update_or_create.return_value = (todays_metrics, True) + + observations = [ + {"current_severity": Severity.SEVERITY_CRITICAL, "current_status": Status.STATUS_OPEN}, + {"current_severity": Severity.SEVERITY_HIGH, "current_status": Status.STATUS_OPEN}, + {"current_severity": Severity.SEVERITY_MEDIUM, "current_status": Status.STATUS_AFFECTED}, + {"current_severity": Severity.SEVERITY_LOW, "current_status": Status.STATUS_IN_REVIEW}, + {"current_severity": Severity.SEVERITY_NONE, "current_status": Status.STATUS_OPEN}, + {"current_severity": Severity.SEVERITY_UNKNOWN, "current_status": Status.STATUS_AFFECTED}, + ] + mock_obs_objects.filter.return_value.values.return_value = observations + + result = calculate_metrics_for_product(self.product_1) + + self.assertTrue(result) + self.assertEqual(todays_metrics.active_critical, 1) + self.assertEqual(todays_metrics.active_high, 1) + self.assertEqual(todays_metrics.active_medium, 1) + self.assertEqual(todays_metrics.active_low, 1) + self.assertEqual(todays_metrics.active_none, 1) + self.assertEqual(todays_metrics.active_unknown, 1) + self.assertEqual(todays_metrics.open, 3) + self.assertEqual(todays_metrics.affected, 2) + self.assertEqual(todays_metrics.in_review, 1) + + @patch("application.metrics.services.metrics.Observation.objects") + @patch("application.metrics.services.metrics.Product_Metrics.objects") + @patch("application.metrics.services.metrics._get_latest_product_metrics") + @patch("application.metrics.services.metrics.timezone") + def test_observations_today_with_all_statuses( + self, mock_timezone, mock_get_latest, mock_pm_objects, mock_obs_objects + ): + today = date(2025, 6, 15) + mock_timezone.localdate.return_value = today + self.product_1.last_observation_change = datetime(2025, 6, 15, 10, 0, 0) + + mock_get_latest.return_value = None + + todays_metrics = ProductMetricsStub() + mock_pm_objects.update_or_create.return_value = (todays_metrics, True) + + observations = [ + {"current_severity": Severity.SEVERITY_CRITICAL, "current_status": Status.STATUS_OPEN}, + {"current_severity": Severity.SEVERITY_HIGH, "current_status": Status.STATUS_AFFECTED}, + {"current_severity": Severity.SEVERITY_MEDIUM, "current_status": Status.STATUS_RESOLVED}, + {"current_severity": Severity.SEVERITY_LOW, "current_status": Status.STATUS_DUPLICATE}, + {"current_severity": Severity.SEVERITY_NONE, "current_status": Status.STATUS_FALSE_POSITIVE}, + {"current_severity": Severity.SEVERITY_UNKNOWN, "current_status": Status.STATUS_IN_REVIEW}, + {"current_severity": Severity.SEVERITY_LOW, "current_status": Status.STATUS_NOT_AFFECTED}, + {"current_severity": Severity.SEVERITY_LOW, "current_status": Status.STATUS_NOT_SECURITY}, + {"current_severity": Severity.SEVERITY_LOW, "current_status": Status.STATUS_RISK_ACCEPTED}, + ] + mock_obs_objects.filter.return_value.values.return_value = observations + + result = calculate_metrics_for_product(self.product_1) + + self.assertTrue(result) + self.assertEqual(todays_metrics.open, 1) + self.assertEqual(todays_metrics.affected, 1) + self.assertEqual(todays_metrics.resolved, 1) + self.assertEqual(todays_metrics.duplicate, 1) + self.assertEqual(todays_metrics.false_positive, 1) + self.assertEqual(todays_metrics.in_review, 1) + self.assertEqual(todays_metrics.not_affected, 1) + self.assertEqual(todays_metrics.not_security, 1) + self.assertEqual(todays_metrics.risk_accepted, 1) + # Active statuses: Open, Affected, In review + self.assertEqual(todays_metrics.active_critical, 1) + self.assertEqual(todays_metrics.active_high, 1) + self.assertEqual(todays_metrics.active_unknown, 1) + # Resolved, Duplicate, etc. are not active + self.assertEqual(todays_metrics.active_medium, 0) + self.assertEqual(todays_metrics.active_low, 0) + + @patch("application.metrics.services.metrics.Product_Metrics.objects") + @patch("application.metrics.services.metrics._get_latest_product_metrics") + @patch("application.metrics.services.metrics.timezone") + def test_no_changes_today_copies_previous_metrics(self, mock_timezone, mock_get_latest, mock_pm_objects): + today = date(2025, 6, 15) + yesterday = date(2025, 6, 14) + mock_timezone.localdate.return_value = today + self.product_1.last_observation_change = datetime(2025, 6, 14, 10, 0, 0) + + latest_metrics = ProductMetricsStub( + date=yesterday, + active_critical=5, + active_high=3, + active_medium=2, + active_low=1, + open=4, + resolved=2, + ) + mock_get_latest.return_value = latest_metrics + + created_metrics = [] + mock_pm_objects.create.side_effect = lambda **kwargs: created_metrics.append(kwargs) + + result = calculate_metrics_for_product(self.product_1) + + self.assertTrue(result) + self.assertEqual(len(created_metrics), 1) + self.assertEqual(created_metrics[0]["date"], today) + self.assertEqual(created_metrics[0]["active_critical"], 5) + self.assertEqual(created_metrics[0]["active_high"], 3) + self.assertEqual(created_metrics[0]["active_medium"], 2) + self.assertEqual(created_metrics[0]["active_low"], 1) + self.assertEqual(created_metrics[0]["open"], 4) + self.assertEqual(created_metrics[0]["resolved"], 2) + + @patch("application.metrics.services.metrics.Product_Metrics.objects") + @patch("application.metrics.services.metrics._get_latest_product_metrics") + @patch("application.metrics.services.metrics.timezone") + def test_no_changes_today_fills_gap_days(self, mock_timezone, mock_get_latest, mock_pm_objects): + today = date(2025, 6, 15) + three_days_ago = date(2025, 6, 12) + mock_timezone.localdate.return_value = today + self.product_1.last_observation_change = datetime(2025, 6, 12, 10, 0, 0) + + latest_metrics = ProductMetricsStub(date=three_days_ago, active_critical=2, open=1) + mock_get_latest.return_value = latest_metrics + + created_metrics = [] + mock_pm_objects.create.side_effect = lambda **kwargs: created_metrics.append(kwargs) + + result = calculate_metrics_for_product(self.product_1) + + self.assertTrue(result) + self.assertEqual(len(created_metrics), 3) + self.assertEqual(created_metrics[0]["date"], date(2025, 6, 13)) + self.assertEqual(created_metrics[1]["date"], date(2025, 6, 14)) + self.assertEqual(created_metrics[2]["date"], date(2025, 6, 15)) + for m in created_metrics: + self.assertEqual(m["active_critical"], 2) + self.assertEqual(m["open"], 1) + + @patch("application.metrics.services.metrics.Product_Metrics.objects") + @patch("application.metrics.services.metrics._get_latest_product_metrics") + @patch("application.metrics.services.metrics.timezone") + def test_no_changes_today_metrics_already_up_to_date(self, mock_timezone, mock_get_latest, mock_pm_objects): + today = date(2025, 6, 15) + mock_timezone.localdate.return_value = today + self.product_1.last_observation_change = datetime(2025, 6, 14, 10, 0, 0) + + latest_metrics = ProductMetricsStub(date=today) + mock_get_latest.return_value = latest_metrics + + result = calculate_metrics_for_product(self.product_1) + + self.assertFalse(result) + mock_pm_objects.create.assert_not_called() + + +class TestGetLatestProductMetrics(BaseTestCase): + @patch("application.metrics.services.metrics.Product_Metrics.objects") + def test_returns_latest_metrics(self, mock_pm_objects): + from application.metrics.services.metrics import ( + _get_latest_product_metrics, + ) + + expected_metrics = ProductMetricsStub(date=date(2025, 6, 15)) + mock_pm_objects.filter.return_value.latest.return_value = expected_metrics + + result = _get_latest_product_metrics(self.product_1) + + self.assertEqual(result, expected_metrics) + mock_pm_objects.filter.assert_called_once_with(product=self.product_1) + mock_pm_objects.filter.return_value.latest.assert_called_once_with("date") + + @patch("application.metrics.services.metrics.Product_Metrics.objects") + def test_returns_none_when_no_metrics(self, mock_pm_objects): + from application.metrics.models import Product_Metrics + from application.metrics.services.metrics import ( + _get_latest_product_metrics, + ) + + mock_pm_objects.filter.return_value.latest.side_effect = Product_Metrics.DoesNotExist + + result = _get_latest_product_metrics(self.product_1) + + self.assertIsNone(result) + + +class TestGetProductMetricsTimeline(BaseTestCase): + @patch("application.metrics.services.metrics.get_days") + @patch("application.metrics.services.metrics.get_product_metrics") + def test_no_product_no_age_filter(self, mock_get_metrics, mock_get_days): + metrics = [ + ProductMetricsStub(date=date(2025, 6, 14), active_critical=2, open=1), + ProductMetricsStub(date=date(2025, 6, 15), active_critical=3, open=2), + ] + mock_get_metrics.return_value = metrics + mock_get_days.return_value = None + + result = get_product_metrics_timeline(None, "all") + + self.assertEqual(len(result), 2) + self.assertEqual(result["2025-06-14"]["active_critical"], 2) + self.assertEqual(result["2025-06-14"]["open"], 1) + self.assertEqual(result["2025-06-15"]["active_critical"], 3) + self.assertEqual(result["2025-06-15"]["open"], 2) + + @patch("application.metrics.services.metrics.get_days") + @patch("application.metrics.services.metrics.get_product_metrics") + def test_no_product_aggregates_multiple_products_same_date(self, mock_get_metrics, mock_get_days): + metrics = [ + ProductMetricsStub( + date=date(2025, 6, 15), + active_critical=2, + active_high=1, + open=3, + ), + ProductMetricsStub( + date=date(2025, 6, 15), + active_critical=1, + active_high=4, + open=2, + ), + ] + mock_get_metrics.return_value = metrics + mock_get_days.return_value = None + + result = get_product_metrics_timeline(None, "all") + + self.assertEqual(len(result), 1) + self.assertEqual(result["2025-06-15"]["active_critical"], 3) + self.assertEqual(result["2025-06-15"]["active_high"], 5) + self.assertEqual(result["2025-06-15"]["open"], 5) + + @patch("application.metrics.services.metrics.get_days") + @patch("application.metrics.services.metrics.get_product_metrics") + def test_single_product_no_aggregation(self, mock_get_metrics, mock_get_days): + self.product_1.is_product_group = False + metrics_qs = QuerySetStub( + [ + ProductMetricsStub( + date=date(2025, 6, 15), + active_critical=5, + active_high=3, + open=2, + resolved=1, + ) + ] + ) + mock_get_metrics.return_value = metrics_qs + mock_get_days.return_value = None + + result = get_product_metrics_timeline(self.product_1, "all") + + self.assertEqual(len(result), 1) + self.assertEqual(result["2025-06-15"]["active_critical"], 5) + self.assertEqual(result["2025-06-15"]["active_high"], 3) + self.assertEqual(result["2025-06-15"]["open"], 2) + self.assertEqual(result["2025-06-15"]["resolved"], 1) + metrics_qs.assert_filtered_with(self, product=self.product_1) + + @patch("application.metrics.services.metrics.get_days") + @patch("application.metrics.services.metrics.get_product_metrics") + def test_product_group_filters_and_aggregates(self, mock_get_metrics, mock_get_days): + self.product_group_1.is_product_group = True + metrics_qs = QuerySetStub( + [ + ProductMetricsStub( + date=date(2025, 6, 15), + active_critical=2, + open=1, + ), + ProductMetricsStub( + date=date(2025, 6, 15), + active_critical=3, + open=4, + ), + ] + ) + mock_get_metrics.return_value = metrics_qs + mock_get_days.return_value = None + + result = get_product_metrics_timeline(self.product_group_1, "all") + + self.assertEqual(result["2025-06-15"]["active_critical"], 5) + self.assertEqual(result["2025-06-15"]["open"], 5) + metrics_qs.assert_filtered_with(self, product__product_group=self.product_group_1) + + @patch("application.metrics.services.metrics.timezone") + @patch("application.metrics.services.metrics.get_days") + @patch("application.metrics.services.metrics.get_product_metrics") + def test_age_filter_applied(self, mock_get_metrics, mock_get_days, mock_timezone): + mock_get_days.return_value = 7 + + now = datetime(2025, 6, 15, 14, 30, 0) + mock_timezone.now.return_value = now + + metrics_qs = QuerySetStub([]) + mock_get_metrics.return_value = metrics_qs + + result = get_product_metrics_timeline(None, "Past 7 days") + + self.assertEqual(result, {}) + expected_threshold = datetime(2025, 6, 8, 0, 0, 0) + metrics_qs.assert_filtered_with(self, date__gte=expected_threshold) + + @patch("application.metrics.services.metrics.get_days") + @patch("application.metrics.services.metrics.get_product_metrics") + def test_empty_metrics(self, mock_get_metrics, mock_get_days): + mock_get_metrics.return_value = [] + mock_get_days.return_value = None + + result = get_product_metrics_timeline(None, "all") + + self.assertEqual(result, {}) + + +class TestGetProductMetricsCurrent(BaseTestCase): + @patch("application.metrics.services.metrics.get_todays_product_metrics") + def test_no_product_no_metrics(self, mock_get_todays): + mock_get_todays.return_value = QuerySetStub([]) + + result = get_product_metrics_current(None) + + expected = _initialize_response_data() + self.assertEqual(result, expected) + + @patch("application.metrics.services.metrics.get_todays_product_metrics") + def test_no_product_with_metrics(self, mock_get_todays): + metrics = [ + ProductMetricsStub( + active_critical=1, + active_high=2, + active_medium=3, + open=4, + resolved=5, + ), + ProductMetricsStub( + active_critical=10, + active_high=20, + active_medium=30, + open=40, + resolved=50, + ), + ] + mock_get_todays.return_value = QuerySetStub(metrics) + + result = get_product_metrics_current(None) + + self.assertEqual(result["active_critical"], 11) + self.assertEqual(result["active_high"], 22) + self.assertEqual(result["active_medium"], 33) + self.assertEqual(result["open"], 44) + self.assertEqual(result["resolved"], 55) + + @patch("application.metrics.services.metrics.get_todays_product_metrics") + def test_single_product_filters(self, mock_get_todays): + self.product_1.is_product_group = False + metrics = [ProductMetricsStub(active_critical=7, open=3)] + metrics_qs = QuerySetStub(metrics) + mock_get_todays.return_value = metrics_qs + + result = get_product_metrics_current(self.product_1) + + self.assertEqual(result["active_critical"], 7) + self.assertEqual(result["open"], 3) + metrics_qs.assert_filtered_with(self, product=self.product_1) + + @patch("application.metrics.services.metrics.get_todays_product_metrics") + def test_product_group_filters(self, mock_get_todays): + self.product_group_1.is_product_group = True + metrics = [ProductMetricsStub(active_critical=4, open=2)] + metrics_qs = QuerySetStub(metrics) + mock_get_todays.return_value = metrics_qs + + result = get_product_metrics_current(self.product_group_1) + + self.assertEqual(result["active_critical"], 4) + self.assertEqual(result["open"], 2) + metrics_qs.assert_filtered_with(self, product__product_group=self.product_group_1) + + +class TestGetCodechartaMetrics(BaseTestCase): + @patch("application.metrics.services.metrics.Observation.objects") + def test_no_observations(self, mock_obs_objects): + mock_obs_objects.filter.return_value = [] + + result = get_codecharta_metrics(self.product_1) + + self.assertEqual(result, []) + mock_obs_objects.filter.assert_called_once_with( + product=self.product_1, + branch=self.product_1.repository_default_branch, + current_status__in=Status.STATUS_ACTIVE, + ) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_observation_without_source_file(self, mock_obs_objects): + obs = ObservationStub( + origin_source_file="", + current_severity=Severity.SEVERITY_HIGH, + ) + mock_obs_objects.filter.return_value = [obs] + + result = get_codecharta_metrics(self.product_1) + + self.assertEqual(result, []) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_single_observation_critical(self, mock_obs_objects): + obs = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_CRITICAL, + ) + mock_obs_objects.filter.return_value = [obs] + + result = get_codecharta_metrics(self.product_1) + + self.assertEqual(len(result), 1) + entry = result[0] + self.assertEqual(entry["source_file"], "src/main.py") + self.assertEqual(entry["vulnerabilities_total"], 1) + self.assertEqual(entry["vulnerabilities_critical"], 1) + self.assertEqual(entry["vulnerabilities_high"], 0) + self.assertEqual(entry["vulnerabilities_high_and_above"], 1) + self.assertEqual(entry["vulnerabilities_medium_and_above"], 1) + self.assertEqual(entry["vulnerabilities_low_and_above"], 1) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_single_observation_high(self, mock_obs_objects): + obs = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_HIGH, + ) + mock_obs_objects.filter.return_value = [obs] + + result = get_codecharta_metrics(self.product_1) + + entry = result[0] + self.assertEqual(entry["vulnerabilities_high"], 1) + self.assertEqual(entry["vulnerabilities_high_and_above"], 1) + self.assertEqual(entry["vulnerabilities_medium_and_above"], 1) + self.assertEqual(entry["vulnerabilities_low_and_above"], 1) + self.assertEqual(entry["vulnerabilities_critical"], 0) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_single_observation_medium(self, mock_obs_objects): + obs = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_MEDIUM, + ) + mock_obs_objects.filter.return_value = [obs] + + result = get_codecharta_metrics(self.product_1) + + entry = result[0] + self.assertEqual(entry["vulnerabilities_medium"], 1) + self.assertEqual(entry["vulnerabilities_high_and_above"], 0) + self.assertEqual(entry["vulnerabilities_medium_and_above"], 1) + self.assertEqual(entry["vulnerabilities_low_and_above"], 1) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_single_observation_low(self, mock_obs_objects): + obs = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_LOW, + ) + mock_obs_objects.filter.return_value = [obs] + + result = get_codecharta_metrics(self.product_1) + + entry = result[0] + self.assertEqual(entry["vulnerabilities_low"], 1) + self.assertEqual(entry["vulnerabilities_high_and_above"], 0) + self.assertEqual(entry["vulnerabilities_medium_and_above"], 0) + self.assertEqual(entry["vulnerabilities_low_and_above"], 1) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_single_observation_none_severity(self, mock_obs_objects): + obs = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_NONE, + ) + mock_obs_objects.filter.return_value = [obs] + + result = get_codecharta_metrics(self.product_1) + + entry = result[0] + self.assertEqual(entry["vulnerabilities_none"], 1) + self.assertEqual(entry["vulnerabilities_high_and_above"], 0) + self.assertEqual(entry["vulnerabilities_medium_and_above"], 0) + self.assertEqual(entry["vulnerabilities_low_and_above"], 0) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_multiple_observations_same_file(self, mock_obs_objects): + obs1 = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_CRITICAL, + ) + obs2 = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_HIGH, + ) + obs3 = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_MEDIUM, + ) + mock_obs_objects.filter.return_value = [obs1, obs2, obs3] + + result = get_codecharta_metrics(self.product_1) + + self.assertEqual(len(result), 1) + entry = result[0] + self.assertEqual(entry["vulnerabilities_total"], 3) + self.assertEqual(entry["vulnerabilities_critical"], 1) + self.assertEqual(entry["vulnerabilities_high"], 1) + self.assertEqual(entry["vulnerabilities_medium"], 1) + self.assertEqual(entry["vulnerabilities_high_and_above"], 2) + self.assertEqual(entry["vulnerabilities_medium_and_above"], 3) + self.assertEqual(entry["vulnerabilities_low_and_above"], 3) + + @patch("application.metrics.services.metrics.Observation.objects") + def test_multiple_observations_different_files(self, mock_obs_objects): + obs1 = ObservationStub( + origin_source_file="src/main.py", + current_severity=Severity.SEVERITY_CRITICAL, + ) + obs2 = ObservationStub( + origin_source_file="src/utils.py", + current_severity=Severity.SEVERITY_LOW, + ) + mock_obs_objects.filter.return_value = [obs1, obs2] + + result = get_codecharta_metrics(self.product_1) + + self.assertEqual(len(result), 2) + files = {entry["source_file"]: entry for entry in result} + self.assertEqual(files["src/main.py"]["vulnerabilities_critical"], 1) + self.assertEqual(files["src/utils.py"]["vulnerabilities_low"], 1) + + +# --- Stubs --- + + +class ProductMetricsStatusStub: + def __init__(self): + self.last_calculated = None + self._saved = False + + def save(self): + self._saved = True + + +class ProductMetricsStub: + def __init__( + self, + date=None, + active_critical=0, + active_high=0, + active_medium=0, + active_low=0, + active_none=0, + active_unknown=0, + open=0, + affected=0, + resolved=0, + duplicate=0, + false_positive=0, + in_review=0, + not_affected=0, + not_security=0, + risk_accepted=0, + ): + self.date = date + self.active_critical = active_critical + self.active_high = active_high + self.active_medium = active_medium + self.active_low = active_low + self.active_none = active_none + self.active_unknown = active_unknown + self.open = open + self.affected = affected + self.resolved = resolved + self.duplicate = duplicate + self.false_positive = false_positive + self.in_review = in_review + self.not_affected = not_affected + self.not_security = not_security + self.risk_accepted = risk_accepted + self._saved = False + + def save(self): + self._saved = True + + def assert_save_called(self, test_case): + test_case.assertTrue(self._saved) + + +class ObservationStub: + def __init__(self, origin_source_file="", current_severity=""): + self.origin_source_file = origin_source_file + self.current_severity = current_severity + + +class QuerySetStub: + """A simple stub that supports filter() chaining and iteration.""" + + def __init__(self, items=None): + self._items = items or [] + self._filter_calls = [] + + def filter(self, **kwargs): + self._filter_calls.append(kwargs) + return self + + def __iter__(self): + return iter(self._items) + + def __len__(self): + return len(self._items) + + def assert_filtered_with(self, test_case, **expected_kwargs): + test_case.assertTrue( + any(kwargs == expected_kwargs for kwargs in self._filter_calls), + f"Expected filter call with {expected_kwargs}, got {self._filter_calls}", + ) diff --git a/backend/unittests/notifications/__init__.py b/backend/unittests/notifications/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/notifications/api/__init__.py b/backend/unittests/notifications/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/notifications/api/test_views.py b/backend/unittests/notifications/api/test_views.py new file mode 100644 index 000000000..73039c43c --- /dev/null +++ b/backend/unittests/notifications/api/test_views.py @@ -0,0 +1,67 @@ +from unittest.mock import patch + +from django.core.management import call_command +from rest_framework.status import ( + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_404_NOT_FOUND, +) +from rest_framework.test import APIClient + +from application.access_control.queries.user import get_user_by_username +from application.notifications.models import Notification_Viewed +from unittests.base_test_case import BaseTestCase + + +class TestViews(BaseTestCase): + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_notification_bulk_mark_as_viewed_no_list(self, mock_authentication): + mock_authentication.return_value = self.user_internal, None + + api_client = APIClient() + response = api_client.post("/api/notifications/bulk_mark_as_viewed/") + + self.assertEqual(HTTP_400_BAD_REQUEST, response.status_code) + self.assertEqual({"message": "Notifications: This field is required."}, response.data) + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_notification_bulk_mark_as_viewed_successful(self, mock_authentication): + call_command("loaddata", "unittests/fixtures/unittests_fixtures.json") + # mock_authentication.return_value = self.user_internal, None + user = get_user_by_username("db_internal_write") + mock_authentication.return_value = user, None + + data = {"notifications": [3, 5]} + api_client = APIClient() + response = api_client.post("/api/notifications/bulk_mark_as_viewed/", data=data, format="json") + + self.assertEqual(HTTP_204_NO_CONTENT, response.status_code) + + notification_viewed = Notification_Viewed.objects.get(notification_id=3, user=user) + self.assertIsNotNone(notification_viewed) + + notification_viewed = Notification_Viewed.objects.get(notification_id=5, user=user) + self.assertIsNotNone(notification_viewed) + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_notification_mark_as_viewed_not_found(self, mock_authentication): + mock_authentication.return_value = self.user_internal, None + + api_client = APIClient() + response = api_client.post("/api/notifications/99999/mark_as_viewed/") + + self.assertEqual(HTTP_404_NOT_FOUND, response.status_code) + + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + def test_notification_mark_as_viewed_successful(self, mock_authentication): + call_command("loaddata", "unittests/fixtures/unittests_fixtures.json") + + mock_authentication.return_value = self.user_internal, None + + api_client = APIClient() + response = api_client.post("/api/notifications/1/mark_as_viewed/") + + self.assertEqual(HTTP_204_NO_CONTENT, response.status_code) + + notification_viewed = Notification_Viewed.objects.get(notification_id=1, user=self.user_internal) + self.assertIsNotNone(notification_viewed) diff --git a/backend/unittests/notifications/services/__init__.py b/backend/unittests/notifications/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/unittests/commons/api/test_exception_handler.py b/backend/unittests/notifications/services/test_exception_handler.py similarity index 75% rename from backend/unittests/commons/api/test_exception_handler.py rename to backend/unittests/notifications/services/test_exception_handler.py index 94ed56113..8ad645eee 100644 --- a/backend/unittests/commons/api/test_exception_handler.py +++ b/backend/unittests/notifications/services/test_exception_handler.py @@ -15,7 +15,7 @@ HTTP_500_INTERNAL_SERVER_ERROR, ) -from application.commons.api.exception_handler import custom_exception_handler +from application.notifications.api.exception_handler import custom_exception_handler from unittests.base_test_case import BaseTestCase @@ -28,9 +28,7 @@ def test_protected_error_formatted(self): response = custom_exception_handler(exception, None) self.assertEqual(HTTP_409_CONFLICT, response.status_code) - data = { - "message": "Cannot delete Product because it still has Services, Observations." - } + data = {"message": "Cannot delete Product because it still has Services, Observations."} self.assertEqual(data, response.data) def test_protected_error_raw(self): @@ -46,9 +44,9 @@ def test_protected_error_raw(self): } self.assertEqual(data, response.data) - @patch("application.commons.api.exception_handler.logger.error") - @patch("application.commons.api.exception_handler.format_log_message") - @patch("application.commons.api.exception_handler.send_exception_notification") + @patch("application.notifications.api.exception_handler.logger.error") + @patch("application.notifications.api.exception_handler.format_log_message") + @patch("application.notifications.api.exception_handler.send_exception_notification") def test_no_response(self, mock_notify, mock_format, mock_logging): exception = Exception("Something unexpected has happened") response = custom_exception_handler(exception, None) @@ -57,11 +55,11 @@ def test_no_response(self, mock_notify, mock_format, mock_logging): data = {"message": "Internal server error, check logs for details"} self.assertEqual(data, response.data) mock_notify.assert_called_with(exception) - mock_format.assert_called_with(response=response, exception=exception) + mock_format.assert_called_with(response=response, exception=exception, username="user_internal@example.com") self.assertEqual(mock_logging.call_count, 2) - @patch("application.commons.api.exception_handler.logger.warning") - @patch("application.commons.api.exception_handler.format_log_message") + @patch("application.notifications.api.exception_handler.logger.warning") + @patch("application.notifications.api.exception_handler.format_log_message") def test_authentication_failed(self, mock_format, mock_logging): exception = AuthenticationFailed("Authentication has failed") response = custom_exception_handler(exception, None) @@ -69,11 +67,11 @@ def test_authentication_failed(self, mock_format, mock_logging): self.assertEqual(HTTP_401_UNAUTHORIZED, response.status_code) data = {"message": "Authentication has failed"} self.assertEqual(data, response.data) - mock_format.assert_called_with(response=response, exception=exception) + mock_format.assert_called_with(response=response, exception=exception, username="user_internal@example.com") mock_logging.assert_called_once() - @patch("application.commons.api.exception_handler.logger.warning") - @patch("application.commons.api.exception_handler.format_log_message") + @patch("application.notifications.api.exception_handler.logger.warning") + @patch("application.notifications.api.exception_handler.format_log_message") def test_permission_denied(self, mock_format, mock_logging): exception = PermissionDenied("Not authentication") response = custom_exception_handler(exception, None) @@ -81,11 +79,11 @@ def test_permission_denied(self, mock_format, mock_logging): self.assertEqual(HTTP_403_FORBIDDEN, response.status_code) data = {"message": "Not authentication"} self.assertEqual(data, response.data) - mock_format.assert_called_with(response=response, exception=exception) + mock_format.assert_called_with(response=response, exception=exception, username="user_internal@example.com") mock_logging.assert_called_once() - @patch("application.commons.api.exception_handler.logger.warning") - @patch("application.commons.api.exception_handler.format_log_message") + @patch("application.notifications.api.exception_handler.logger.warning") + @patch("application.notifications.api.exception_handler.format_log_message") def test_other_user_error(self, mock_format, mock_logging): exception = ValidationError("Not validated") response = custom_exception_handler(exception, None) @@ -96,9 +94,9 @@ def test_other_user_error(self, mock_format, mock_logging): mock_format.assert_not_called() mock_logging.assert_not_called() - @patch("application.commons.api.exception_handler.logger.error") - @patch("application.commons.api.exception_handler.format_log_message") - @patch("application.commons.api.exception_handler.send_exception_notification") + @patch("application.notifications.api.exception_handler.logger.error") + @patch("application.notifications.api.exception_handler.format_log_message") + @patch("application.notifications.api.exception_handler.send_exception_notification") def test_server_error(self, mock_notification, mock_format, mock_logging): exception = APIException(Exception("Not authentication")) response = custom_exception_handler(exception, None) @@ -106,6 +104,6 @@ def test_server_error(self, mock_notification, mock_format, mock_logging): self.assertEqual(HTTP_500_INTERNAL_SERVER_ERROR, response.status_code) data = {"message": "Internal server error, check logs for details"} self.assertEqual(data, response.data) - mock_format.assert_called_with(response=response, exception=exception) + mock_format.assert_called_with(response=response, exception=exception, username="user_internal@example.com") self.assertEqual(mock_logging.call_count, 2) mock_notification.assert_called_with(exception) diff --git a/backend/unittests/commons/services/test_send_notifications.py b/backend/unittests/notifications/services/test_send_notifications.py similarity index 69% rename from backend/unittests/commons/services/test_send_notifications.py rename to backend/unittests/notifications/services/test_send_notifications.py index 0888665f9..a99a2c3ff 100644 --- a/backend/unittests/commons/services/test_send_notifications.py +++ b/backend/unittests/notifications/services/test_send_notifications.py @@ -3,9 +3,10 @@ from requests import Response -from application.commons.models import Notification, Settings +from application.commons.models import Settings from application.commons.services.functions import get_classname -from application.commons.services.send_notifications import ( +from application.notifications.models import Notification +from application.notifications.services.send_notifications import ( LAST_EXCEPTIONS, _create_notification_message, _get_first_name, @@ -14,12 +15,12 @@ _get_notification_slack_webhook, _get_stack_trace, _ratelimit_exception, - _send_email_notification, - _send_msteams_notification, - _send_slack_notification, get_base_url_frontend, + send_email_notification, send_exception_notification, + send_msteams_notification, send_product_security_gate_notification, + send_slack_notification, send_task_exception_notification, ) from unittests.base_test_case import BaseTestCase @@ -28,18 +29,14 @@ class TestPushNotifications(BaseTestCase): # --- send_product_security_gate_notification --- - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications.get_current_user") - @patch("application.commons.services.send_notifications._get_notification_email_to") - @patch( - "application.commons.services.send_notifications._get_notification_slack_webhook" - ) - @patch( - "application.commons.services.send_notifications._get_notification_ms_teams_webhook" - ) - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications.get_current_user") + @patch("application.notifications.services.send_notifications._get_notification_email_to") + @patch("application.notifications.services.send_notifications._get_notification_slack_webhook") + @patch("application.notifications.services.send_notifications._get_notification_ms_teams_webhook") + @patch("application.notifications.models.Notification.objects.create") def test_send_product_security_gate_notification_no_webhook_no_email( self, mock_notification_create, @@ -72,20 +69,16 @@ def test_send_product_security_gate_notification_no_webhook_no_email( ) @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications.get_base_url_frontend") - @patch("application.commons.services.send_notifications._get_first_name") - @patch("application.commons.services.send_notifications.get_current_user") - @patch("application.commons.services.send_notifications._get_notification_email_to") - @patch( - "application.commons.services.send_notifications._get_notification_slack_webhook" - ) - @patch( - "application.commons.services.send_notifications._get_notification_ms_teams_webhook" - ) - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications.get_base_url_frontend") + @patch("application.notifications.services.send_notifications._get_first_name") + @patch("application.notifications.services.send_notifications.get_current_user") + @patch("application.notifications.services.send_notifications._get_notification_email_to") + @patch("application.notifications.services.send_notifications._get_notification_slack_webhook") + @patch("application.notifications.services.send_notifications._get_notification_ms_teams_webhook") + @patch("application.notifications.models.Notification.objects.create") def test_send_product_security_gate_notification_security_gate_none( self, mock_notification_create, @@ -106,15 +99,9 @@ def test_send_product_security_gate_notification_security_gate_none( mock_base_url.return_value = "https://secobserve.com/" mock_get_first_name.return_value = "first_name" mock_current_user.return_value = self.user_internal - mock_get_notification_email_to.return_value = ( - "test1@example.com, test2@example.com" - ) - mock_get_notification_ms_teams_webhook.return_value = ( - "https://msteams.microsoft.com" - ) - mock_get_notification_slack_webhook.return_value = ( - "https://secobserve.slack.com" - ) + mock_get_notification_email_to.return_value = "test1@example.com, test2@example.com" + mock_get_notification_ms_teams_webhook.return_value = "https://msteams.microsoft.com" + mock_get_notification_slack_webhook.return_value = "https://secobserve.slack.com" self.product_1.security_gate_passed = None self.product_1.pk = 1 @@ -171,20 +158,16 @@ def test_send_product_security_gate_notification_security_gate_none( ) @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications.get_base_url_frontend") - @patch("application.commons.services.send_notifications._get_first_name") - @patch("application.commons.services.send_notifications.get_current_user") - @patch("application.commons.services.send_notifications._get_notification_email_to") - @patch( - "application.commons.services.send_notifications._get_notification_slack_webhook" - ) - @patch( - "application.commons.services.send_notifications._get_notification_ms_teams_webhook" - ) - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications.get_base_url_frontend") + @patch("application.notifications.services.send_notifications._get_first_name") + @patch("application.notifications.services.send_notifications.get_current_user") + @patch("application.notifications.services.send_notifications._get_notification_email_to") + @patch("application.notifications.services.send_notifications._get_notification_slack_webhook") + @patch("application.notifications.services.send_notifications._get_notification_ms_teams_webhook") + @patch("application.notifications.models.Notification.objects.create") def test_send_product_security_gate_notification_security_gate_passed( self, mock_notification_create, @@ -205,15 +188,9 @@ def test_send_product_security_gate_notification_security_gate_passed( mock_base_url.return_value = "https://secobserve.com/" mock_get_first_name.return_value = "first_name" mock_current_user.return_value = self.user_internal - mock_get_notification_email_to.return_value = ( - "test1@example.com, test2@example.com" - ) - mock_get_notification_ms_teams_webhook.return_value = ( - "https://msteams.microsoft.com" - ) - mock_get_notification_slack_webhook.return_value = ( - "https://secobserve.slack.com" - ) + mock_get_notification_email_to.return_value = "test1@example.com, test2@example.com" + mock_get_notification_ms_teams_webhook.return_value = "https://msteams.microsoft.com" + mock_get_notification_slack_webhook.return_value = "https://secobserve.slack.com" self.product_1.security_gate_passed = True self.product_1.pk = 1 @@ -270,20 +247,16 @@ def test_send_product_security_gate_notification_security_gate_passed( ) @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications.get_base_url_frontend") - @patch("application.commons.services.send_notifications._get_first_name") - @patch("application.commons.services.send_notifications.get_current_user") - @patch("application.commons.services.send_notifications._get_notification_email_to") - @patch( - "application.commons.services.send_notifications._get_notification_slack_webhook" - ) - @patch( - "application.commons.services.send_notifications._get_notification_ms_teams_webhook" - ) - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications.get_base_url_frontend") + @patch("application.notifications.services.send_notifications._get_first_name") + @patch("application.notifications.services.send_notifications.get_current_user") + @patch("application.notifications.services.send_notifications._get_notification_email_to") + @patch("application.notifications.services.send_notifications._get_notification_slack_webhook") + @patch("application.notifications.services.send_notifications._get_notification_ms_teams_webhook") + @patch("application.notifications.models.Notification.objects.create") def test_send_product_security_gate_notification_security_gate_failed( self, mock_notification_create, @@ -304,15 +277,9 @@ def test_send_product_security_gate_notification_security_gate_failed( mock_base_url.return_value = "https://secobserve.com/" mock_get_first_name.return_value = "first_name" mock_current_user.return_value = self.user_internal - mock_get_notification_email_to.return_value = ( - "test1@example.com, test2@example.com" - ) - mock_get_notification_ms_teams_webhook.return_value = ( - "https://msteams.microsoft.com" - ) - mock_get_notification_slack_webhook.return_value = ( - "https://secobserve.slack.com" - ) + mock_get_notification_email_to.return_value = "test1@example.com, test2@example.com" + mock_get_notification_ms_teams_webhook.return_value = "https://msteams.microsoft.com" + mock_get_notification_slack_webhook.return_value = "https://secobserve.slack.com" self.product_1.security_gate_passed = False self.product_1.pk = 1 @@ -371,12 +338,12 @@ def test_send_product_security_gate_notification_security_gate_failed( # --- send_exception_notification --- @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._ratelimit_exception") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications.get_current_user") - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications._ratelimit_exception") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications.get_current_user") + @patch("application.notifications.models.Notification.objects.create") def test_send_exception_notification_no_webhook_no_email( self, mock_notification_create, @@ -404,10 +371,10 @@ def test_send_exception_notification_no_webhook_no_email( ) @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._ratelimit_exception") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_email_notification") + @patch("application.notifications.services.send_notifications._ratelimit_exception") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") def test_send_exception_notification_no_ratelimit( self, mock_send_email, @@ -431,13 +398,13 @@ def test_send_exception_notification_no_ratelimit( mock_send_email.assert_not_called() @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._ratelimit_exception") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications._get_first_name") - @patch("application.commons.services.send_notifications.get_current_user") - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications._ratelimit_exception") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications._get_first_name") + @patch("application.notifications.services.send_notifications.get_current_user") + @patch("application.notifications.models.Notification.objects.create") def test_send_exception_notification_success( self, mock_notification_create, @@ -517,11 +484,11 @@ def test_send_exception_notification_success( # --- send_task_exception_notification --- @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._ratelimit_exception") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications._ratelimit_exception") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.models.Notification.objects.create") def test_send_task_exception_notification_no_webhook_no_email( self, mock_notification_create, @@ -555,10 +522,10 @@ def test_send_task_exception_notification_no_webhook_no_email( ) @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._ratelimit_exception") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_email_notification") + @patch("application.notifications.services.send_notifications._ratelimit_exception") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") def test_send_task_exception_notification_no_ratelimit( self, mock_send_email, @@ -587,12 +554,12 @@ def test_send_task_exception_notification_no_ratelimit( mock_send_email.assert_not_called() @patch("application.commons.models.Settings.load") - @patch("application.commons.services.send_notifications._ratelimit_exception") - @patch("application.commons.services.send_notifications._send_msteams_notification") - @patch("application.commons.services.send_notifications._send_slack_notification") - @patch("application.commons.services.send_notifications._send_email_notification") - @patch("application.commons.services.send_notifications._get_first_name") - @patch("application.commons.models.Notification.objects.create") + @patch("application.notifications.services.send_notifications._ratelimit_exception") + @patch("application.notifications.services.send_notifications.send_msteams_notification") + @patch("application.notifications.services.send_notifications.send_slack_notification") + @patch("application.notifications.services.send_notifications.send_email_notification") + @patch("application.notifications.services.send_notifications._get_first_name") + @patch("application.notifications.models.Notification.objects.create") def test_send_task_exception_notification_success( self, mock_notification_create, @@ -689,29 +656,23 @@ def test_send_task_exception_notification_success( type=Notification.TYPE_TASK, ) - # --- _send_email_notification --- + # --- send_email_notification --- - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.send_mail") - def test_send_email_notification_empty_message( - self, mock_send_email, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.send_mail") + def test_send_email_notification_empty_message(self, mock_send_email, mock_create_message): mock_create_message.return_value = None - _send_email_notification("test@example.com", "subject", "test_template") + send_email_notification("test@example.com", "subject", "test_template") mock_create_message.assert_called_with("test_template") mock_send_email.assert_not_called() @patch("application.commons.models.Settings.load") - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.send_mail") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.send_mail") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") def test_send_email_notification_exception( self, mock_format, @@ -726,7 +687,13 @@ def test_send_email_notification_exception( mock_create_message.return_value = "test_message" mock_send_email.side_effect = Exception("test_exception") - _send_email_notification("test@example.com", "subject", "test_template") + with patch.dict( + "os.environ", + { + "EMAIL_HOST": "mail.example.com", + }, + ): + send_email_notification("test@example.com", "subject", "test_template") mock_create_message.assert_called_with("test_template") mock_send_email.assert_called_with( @@ -740,12 +707,10 @@ def test_send_email_notification_exception( mock_format.assert_called_once() @patch("application.commons.models.Settings.load") - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.send_mail") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.send_mail") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") def test_send_msteams_notification_success( self, mock_format, @@ -759,7 +724,7 @@ def test_send_msteams_notification_success( mock_settings_load.return_value = settings mock_create_message.return_value = "test_message" - _send_email_notification("test@example.com", "subject", "test_template") + send_email_notification("test@example.com", "subject", "test_template") mock_create_message.assert_called_with("test_template") mock_send_email.assert_called_with( @@ -772,176 +737,132 @@ def test_send_msteams_notification_success( mock_logger.assert_not_called() mock_format.assert_not_called() - # --- _send_msteams_notification --- + # --- send_msteams_notification --- - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - def test_send_msteams_notification_empty_message( - self, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + def test_send_msteams_notification_empty_message(self, mock_request, mock_create_message): mock_create_message.return_value = None - _send_msteams_notification("test_webhook", "test_template") + send_msteams_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") mock_request.assert_not_called() - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") - def test_send_msteams_notification_exception( - self, mock_format, mock_logger, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") + def test_send_msteams_notification_exception(self, mock_format, mock_logger, mock_request, mock_create_message): mock_create_message.return_value = "test_message" mock_request.side_effect = Exception("test_exception") - _send_msteams_notification("test_webhook", "test_template") + send_msteams_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") - mock_request.assert_called_with( - method="POST", url="test_webhook", data="test_message", timeout=60 - ) + mock_request.assert_called_with(method="POST", url="test_webhook", data="test_message", timeout=60) mock_logger.assert_called_once() mock_format.assert_called_once() - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") - def test_send_msteams_notification_not_ok( - self, mock_format, mock_logger, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") + def test_send_msteams_notification_not_ok(self, mock_format, mock_logger, mock_request, mock_create_message): mock_create_message.return_value = "test_message" response = Response() response.status_code = 400 mock_request.return_value = response - _send_msteams_notification("test_webhook", "test_template") + send_msteams_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") - mock_request.assert_called_with( - method="POST", url="test_webhook", data="test_message", timeout=60 - ) + mock_request.assert_called_with(method="POST", url="test_webhook", data="test_message", timeout=60) mock_logger.assert_called_once() mock_format.assert_called_once() - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") - def test_send_msteams_notification_success( - self, mock_format, mock_logger, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") + def test_send_msteams_notification_success(self, mock_format, mock_logger, mock_request, mock_create_message): mock_create_message.return_value = "test_message" response = Response() response.status_code = 200 mock_request.return_value = response - _send_msteams_notification("test_webhook", "test_template") + send_msteams_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") - mock_request.assert_called_with( - method="POST", url="test_webhook", data="test_message", timeout=60 - ) + mock_request.assert_called_with(method="POST", url="test_webhook", data="test_message", timeout=60) mock_logger.assert_not_called() mock_format.assert_not_called() - # --- _send_slack_notification --- + # --- send_slack_notification --- - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - def test_send_slack_notification_empty_message( - self, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + def test_send_slack_notification_empty_message(self, mock_request, mock_create_message): mock_create_message.return_value = None - _send_slack_notification("test_webhook", "test_template") + send_slack_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") mock_request.assert_not_called() - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") - def test_send_slack_notification_exception( - self, mock_format, mock_logger, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") + def test_send_slack_notification_exception(self, mock_format, mock_logger, mock_request, mock_create_message): mock_create_message.return_value = "test_message" mock_request.side_effect = Exception("test_exception") - _send_slack_notification("test_webhook", "test_template") + send_slack_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") - mock_request.assert_called_with( - method="POST", url="test_webhook", data="test_message", timeout=60 - ) + mock_request.assert_called_with(method="POST", url="test_webhook", data="test_message", timeout=60) mock_logger.assert_called_once() mock_format.assert_called_once() - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") - def test_send_slack_notification_not_ok( - self, mock_format, mock_logger, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") + def test_send_slack_notification_not_ok(self, mock_format, mock_logger, mock_request, mock_create_message): mock_create_message.return_value = "test_message" response = Response() response.status_code = 400 mock_request.return_value = response - _send_slack_notification("test_webhook", "test_template") + send_slack_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") - mock_request.assert_called_with( - method="POST", url="test_webhook", data="test_message", timeout=60 - ) + mock_request.assert_called_with(method="POST", url="test_webhook", data="test_message", timeout=60) mock_logger.assert_called_once() mock_format.assert_called_once() - @patch( - "application.commons.services.send_notifications._create_notification_message" - ) - @patch("application.commons.services.send_notifications.requests.request") - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") - def test_send_slack_notification_success( - self, mock_format, mock_logger, mock_request, mock_create_message - ): + @patch("application.notifications.services.send_notifications._create_notification_message") + @patch("application.notifications.services.send_notifications.requests.request") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") + def test_send_slack_notification_success(self, mock_format, mock_logger, mock_request, mock_create_message): mock_create_message.return_value = "test_message" response = Response() response.status_code = 200 mock_request.return_value = response - _send_slack_notification("test_webhook", "test_template") + send_slack_notification("test_webhook", "test_template") mock_create_message.assert_called_with("test_template") - mock_request.assert_called_with( - method="POST", url="test_webhook", data="test_message", timeout=60 - ) + mock_request.assert_called_with(method="POST", url="test_webhook", data="test_message", timeout=60) mock_logger.assert_not_called() mock_format.assert_not_called() # --- _create_notification_message --- - @patch("application.commons.services.send_notifications.logger.error") - @patch("application.commons.services.send_notifications.format_log_message") + @patch("application.notifications.services.send_notifications.logger.error") + @patch("application.notifications.services.send_notifications.format_log_message") def test_create_notification_message_not_found(self, mock_format, mock_logging): message = _create_notification_message("invalid_template_name.tpl") self.assertIsNone(message) @@ -1038,9 +959,7 @@ def test_ratelimit_exception_new_key(self): self.assertTrue(_ratelimit_exception(exception)) self.assertEqual(1, len(LAST_EXCEPTIONS.keys())) - difference: timedelta = ( - datetime.now() - LAST_EXCEPTIONS["builtins.Exception/test_exception/None/"] - ) + difference: timedelta = datetime.now() - LAST_EXCEPTIONS["builtins.Exception/test_exception/None/"] self.assertGreater(difference.microseconds, 0) self.assertLess(difference.microseconds, 999) @@ -1051,14 +970,12 @@ def test_ratelimit_exception_true(self, mock_settings_load): mock_settings_load.return_value = settings LAST_EXCEPTIONS.clear() - LAST_EXCEPTIONS[ - "builtins.Exception/test_exception/test_function/test_arguments" - ] = datetime.now() - timedelta(seconds=11) + LAST_EXCEPTIONS["builtins.Exception/test_exception/test_function/test_arguments"] = datetime.now() - timedelta( + seconds=11 + ) exception = Exception("test_exception") - self.assertTrue( - _ratelimit_exception(exception, "test_function", "test_arguments") - ) + self.assertTrue(_ratelimit_exception(exception, "test_function", "test_arguments")) self.assertEqual(1, len(LAST_EXCEPTIONS.keys())) @patch("application.commons.models.Settings.load") @@ -1068,31 +985,29 @@ def test_ratelimit_exception_false(self, mock_settings_load): mock_settings_load.return_value = settings LAST_EXCEPTIONS.clear() - LAST_EXCEPTIONS[ - "builtins.Exception/test_exception/test_function/test_arguments" - ] = datetime.now() - timedelta(seconds=9) + LAST_EXCEPTIONS["builtins.Exception/test_exception/test_function/test_arguments"] = datetime.now() - timedelta( + seconds=9 + ) exception = Exception("test_exception") - self.assertFalse( - _ratelimit_exception(exception, "test_function", "test_arguments") - ) + self.assertFalse(_ratelimit_exception(exception, "test_function", "test_arguments")) self.assertEqual(1, len(LAST_EXCEPTIONS.keys())) ## --- _get_user_first_name --- - @patch("application.commons.services.send_notifications.get_user_by_email") + @patch("application.notifications.services.send_notifications.get_user_by_email") def test_get_user_first_name_no_user(self, mock_get_user): mock_get_user.return_value = None self.assertEqual("", _get_first_name("test@example.com")) mock_get_user.assert_called_once_with("test@example.com") - @patch("application.commons.services.send_notifications.get_user_by_email") + @patch("application.notifications.services.send_notifications.get_user_by_email") def test_get_user_first_name_no_first_name(self, mock_get_user): mock_get_user.return_value = self.user_internal self.assertEqual("", _get_first_name("test@example.com")) mock_get_user.assert_called_once_with("test@example.com") - @patch("application.commons.services.send_notifications.get_user_by_email") + @patch("application.notifications.services.send_notifications.get_user_by_email") def test_get_user_first_name_success(self, mock_get_user): mock_get_user.return_value = self.user_internal self.user_internal.first_name = "first_name" @@ -1101,14 +1016,14 @@ def test_get_user_first_name_success(self, mock_get_user): ## --- _get_stack_trace --- - @patch("application.commons.services.send_notifications.traceback.format_tb") + @patch("application.notifications.services.send_notifications.traceback.format_tb") def test_get_stack_trace_format_as_code(self, mock_format): mock_format.return_value = ["line1", "line2"] exception = Exception("test_exception") self.assertEqual("```\nline1line2\n```", _get_stack_trace(exception, True)) mock_format.assert_called_once() - @patch("application.commons.services.send_notifications.traceback.format_tb") + @patch("application.notifications.services.send_notifications.traceback.format_tb") def test_get_stack_trace_plain(self, mock_format): mock_format.return_value = ["line1", "line2"] exception = Exception("test_exception") @@ -1137,16 +1052,12 @@ def test_notification_email_to_product_email_to_empty(self): def test_get_notification_ms_teams_webhook_product_webhook(self): self.product_1.notification_ms_teams_webhook = "test@example.com" - self.assertEqual( - "test@example.com", _get_notification_ms_teams_webhook(self.product_1) - ) + self.assertEqual("test@example.com", _get_notification_ms_teams_webhook(self.product_1)) def test_get_notification_ms_teams_webhook_product_group_webhook(self): self.product_group_1.notification_ms_teams_webhook = "test@example.com" self.product_1.product_group = self.product_group_1 - self.assertEqual( - "test@example.com", _get_notification_ms_teams_webhook(self.product_1) - ) + self.assertEqual("test@example.com", _get_notification_ms_teams_webhook(self.product_1)) def test_get_notification_ms_teams_webhook_product_group_webhook_empty(self): self.product_1.product_group = self.product_group_1 @@ -1159,16 +1070,12 @@ def test_get_notification_ms_teams_webhook_product_webhook_empty(self): def test_get_notification_slack_webhook_product_webhook(self): self.product_1.notification_slack_webhook = "test@example.com" - self.assertEqual( - "test@example.com", _get_notification_slack_webhook(self.product_1) - ) + self.assertEqual("test@example.com", _get_notification_slack_webhook(self.product_1)) def test_get_notification_slack_webhook_product_group_webhook(self): self.product_group_1.notification_slack_webhook = "test@example.com" self.product_1.product_group = self.product_group_1 - self.assertEqual( - "test@example.com", _get_notification_slack_webhook(self.product_1) - ) + self.assertEqual("test@example.com", _get_notification_slack_webhook(self.product_1)) def test_get_notification_slack_webhook_product_group_webhook_empty(self): self.product_1.product_group = self.product_group_1 diff --git a/backend/unittests/commons/services/test_tasks.py b/backend/unittests/notifications/services/test_tasks.py similarity index 57% rename from backend/unittests/commons/services/test_tasks.py rename to backend/unittests/notifications/services/test_tasks.py index 6f0275fe0..cb4ae53a9 100644 --- a/backend/unittests/commons/services/test_tasks.py +++ b/backend/unittests/notifications/services/test_tasks.py @@ -1,14 +1,14 @@ -from unittest.mock import ANY, call, patch +from unittest.mock import patch -from application.commons.services.tasks import handle_task_exception +from application.notifications.services.tasks import handle_task_exception from unittests.base_test_case import BaseTestCase class TestTasks(BaseTestCase): @patch("inspect.currentframe") - @patch("application.commons.services.tasks.send_task_exception_notification") - @patch("application.commons.services.tasks.format_log_message") - @patch("application.commons.services.tasks.logger.error") + @patch("application.notifications.services.tasks.send_task_exception_notification") + @patch("application.notifications.services.tasks.format_log_message") + @patch("application.notifications.services.tasks.logger.error") def test_handle_task_exception_without_frame( self, mock_logger, @@ -18,16 +18,20 @@ def test_handle_task_exception_without_frame( ): mock_currentframe.return_value = None exception = Exception("Test exception") - handle_task_exception(exception, self.user_internal) + handle_task_exception(exception, self.user_internal, self.product_1) self.assertEqual(mock_logger.call_count, 2) mock_format_log_message.assert_called_with( message="Error while executing background task", data={}, exception=exception, - user=self.user_internal, + username="user_internal@example.com", ) mock_send_task_exception_notification.assert_called_with( - function=None, arguments=None, user=self.user_internal, exception=exception + function=None, + arguments=None, + user=self.user_internal, + exception=exception, + product=self.product_1, ) mock_currentframe.assert_called_once() diff --git a/backend/unittests/rules/services/test_rego_interpreter.py b/backend/unittests/rules/services/test_rego_interpreter.py new file mode 100644 index 000000000..47db09eef --- /dev/null +++ b/backend/unittests/rules/services/test_rego_interpreter.py @@ -0,0 +1,193 @@ +import unittest +from unittest.mock import MagicMock, patch + +from application.rules.services.rego_interpreter import RegoException, RegoInterpreter + + +class TestRegoException(unittest.TestCase): + def test_message(self): + exception = RegoException("test error") + self.assertEqual(str(exception), "[ErrorDetail(string='test error', code='invalid')]") + + def test_is_exception(self): + exception = RegoException("test error") + self.assertIsInstance(exception, Exception) + + +class TestRegoInterpreterInit(unittest.TestCase): + + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_init_success(self, mock_interpreter_cls): + mock_interpreter = MagicMock() + mock_interpreter_cls.return_value = mock_interpreter + mock_bundle = MagicMock() + mock_interpreter.build.return_value = mock_bundle + + rego_module = "package rule\ndefault allow = false" + interpreter = RegoInterpreter(rego_module) + + self.assertEqual(interpreter.policy, rego_module) + self.assertEqual(interpreter.rego_bundle, mock_bundle) + self.assertEqual(mock_interpreter.log_level, 1) + mock_interpreter.add_module.assert_called_once_with("rule", rego_module) + mock_interpreter.build.assert_called_once_with("data") + + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_init_rego_error(self, mock_interpreter_cls): + from regopy.rego_shared import RegoError + + mock_interpreter = MagicMock() + mock_interpreter_cls.return_value = mock_interpreter + mock_interpreter.add_module.side_effect = RegoError("syntax error") + + with self.assertRaises(RegoException) as context: + RegoInterpreter("invalid rego") + + self.assertIn("Error while building rego bundle", str(context.exception)) + self.assertIn("syntax error", str(context.exception)) + + +class TestRegoInterpreterQuery(unittest.TestCase): + + @patch("application.rules.services.rego_interpreter.Interpreter") + def setUp(self, mock_interpreter_cls): + mock_interpreter = MagicMock() + mock_interpreter_cls.return_value = mock_interpreter + mock_interpreter.build.return_value = MagicMock() + self.interpreter = RegoInterpreter("package rule") + self.mock_bundle = self.interpreter.rego_bundle + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_success(self, mock_interpreter_cls, mock_input_cls): + mock_input = MagicMock() + mock_input_cls.return_value = mock_input + + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + + expected_result = {"severity": "High", "status": "Open"} + mock_expression = MagicMock() + mock_expression.get.return_value = expected_result + mock_node = MagicMock() + mock_node.expressions = [mock_expression] + mock_output = MagicMock() + mock_output.results = [mock_node] + mock_rego_run.query_bundle.return_value = mock_output + + data = {"title": "test"} + result = self.interpreter.query(data) + + self.assertEqual(result, expected_result) + mock_input_cls.assert_called_once_with(data) + mock_rego_run.set_input.assert_called_once_with(mock_input) + mock_rego_run.query_bundle.assert_called_once_with(self.mock_bundle) + mock_expression.get.assert_called_once_with("rule") + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_with_none_data(self, mock_interpreter_cls, mock_input_cls): + mock_input = MagicMock() + mock_input_cls.return_value = mock_input + + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + + expected_result = {"severity": "Low"} + mock_expression = MagicMock() + mock_expression.get.return_value = expected_result + mock_node = MagicMock() + mock_node.expressions = [mock_expression] + mock_output = MagicMock() + mock_output.results = [mock_node] + mock_rego_run.query_bundle.return_value = mock_output + + result = self.interpreter.query() + + self.assertEqual(result, expected_result) + mock_input_cls.assert_called_once_with(None) + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_no_results(self, mock_interpreter_cls, mock_input_cls): + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + + mock_output = MagicMock() + mock_output.results = [] + mock_rego_run.query_bundle.return_value = mock_output + + with self.assertRaises(RegoException) as context: + self.interpreter.query({"title": "test"}) + + self.assertEqual(str(context.exception), "[ErrorDetail(string='Rego output has no results', code='invalid')]") + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_no_results_none(self, mock_interpreter_cls, mock_input_cls): + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + + mock_output = MagicMock() + mock_output.results = None + mock_rego_run.query_bundle.return_value = mock_output + + with self.assertRaises(RegoException) as context: + self.interpreter.query({"title": "test"}) + + self.assertEqual(str(context.exception), "[ErrorDetail(string='Rego output has no results', code='invalid')]") + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_no_expressions(self, mock_interpreter_cls, mock_input_cls): + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + + mock_node = MagicMock() + mock_node.expressions = [] + mock_output = MagicMock() + mock_output.results = [mock_node] + mock_rego_run.query_bundle.return_value = mock_output + + with self.assertRaises(RegoException) as context: + self.interpreter.query({"title": "test"}) + + self.assertEqual( + str(context.exception), "[ErrorDetail(string='Rego results have no expressions', code='invalid')]" + ) + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_no_rule_element(self, mock_interpreter_cls, mock_input_cls): + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + + mock_expression = MagicMock() + mock_expression.get.return_value = None + mock_node = MagicMock() + mock_node.expressions = [mock_expression] + mock_output = MagicMock() + mock_output.results = [mock_node] + mock_rego_run.query_bundle.return_value = mock_output + + with self.assertRaises(RegoException) as context: + self.interpreter.query({"title": "test"}) + + self.assertEqual( + str(context.exception), "[ErrorDetail(string=\"Rego expressions have no 'rule' element\", code='invalid')]" + ) + + @patch("application.rules.services.rego_interpreter.Input") + @patch("application.rules.services.rego_interpreter.Interpreter") + def test_query_rego_error(self, mock_interpreter_cls, mock_input_cls): + from regopy.rego_shared import RegoError + + mock_rego_run = MagicMock() + mock_interpreter_cls.return_value = mock_rego_run + mock_rego_run.query_bundle.side_effect = RegoError("query failed") + + with self.assertRaises(RegoException) as context: + self.interpreter.query({"title": "test"}) + + self.assertIn("Error while querying rego module", str(context.exception)) + self.assertIn("query failed", str(context.exception)) diff --git a/backend/unittests/rules/services/test_rule_engine.py b/backend/unittests/rules/services/test_rule_engine.py index 508277d7e..70987b8a9 100644 --- a/backend/unittests/rules/services/test_rule_engine.py +++ b/backend/unittests/rules/services/test_rule_engine.py @@ -1,7 +1,7 @@ from unittest.mock import call, patch from application.core.models import Product -from application.rules.services.rule_engine import Rule_Engine +from application.rules.services.rule_engine import Rule_Engine, _check_regex from unittests.base_test_case import BaseTestCase @@ -62,22 +62,22 @@ def test_check_regex_no_pattern(self): product = Product() product.save() rule_engine = Rule_Engine(product) - self.assertTrue(rule_engine._check_regex(None, "value")) + self.assertTrue(_check_regex("", "value")) def test_check_regex_no_value(self): product = Product() product.save() rule_engine = Rule_Engine(product) - self.assertFalse(rule_engine._check_regex("pattern", None)) + self.assertFalse(_check_regex("pattern", "")) def test_check_regex_no_match(self): product = Product() product.save() rule_engine = Rule_Engine(product) - self.assertFalse(rule_engine._check_regex("pattern", "value")) + self.assertFalse(_check_regex("pattern", "value")) def test_check_regex_match(self): product = Product() product.save() rule_engine = Rule_Engine(product) - self.assertTrue(rule_engine._check_regex("v.+lue", "VALUE")) + self.assertTrue(_check_regex("v.+lue", "VALUE")) diff --git a/backend/unittests/rules/services/test_simulator.py b/backend/unittests/rules/services/test_simulator.py new file mode 100644 index 000000000..def18ad3e --- /dev/null +++ b/backend/unittests/rules/services/test_simulator.py @@ -0,0 +1,324 @@ +import unittest +from unittest.mock import MagicMock, patch + +from application.core.models import Observation, Product +from application.rules.models import Rule +from application.rules.services.simulator import MAX_OBSERVATIONS, simulate_rule +from application.rules.types import Rule_Type + + +class TestSimulateRule(unittest.TestCase): + + def setUp(self): + self.mock_product = MagicMock(spec=Product) + self.mock_product.name = "Test Product" + self.mock_product.is_product_group = False + self.mock_product.pk = 1 + + self.mock_observation = MagicMock(spec=Observation) + self.mock_observation.product = self.mock_product + self.mock_observation.title = "Test Observation" + + self.mock_rule = MagicMock(spec=Rule) + self.mock_rule.product = None + self.mock_rule.type = Rule_Type.RULE_TYPE_FIELDS + self.mock_rule.parser = None + self.mock_rule.scanner_prefix = None + + def _setup_queryset_mock(self, mock_obs_manager, observations): + """Configure the chained queryset mock to return the given observations.""" + mock_qs = MagicMock() + mock_qs.filter.return_value = mock_qs + mock_qs.order_by.return_value = mock_qs + mock_qs.select_related.return_value = mock_qs + mock_qs.__iter__ = lambda self_qs: iter(observations) + mock_obs_manager.filter.return_value = mock_qs + return mock_qs + + # --- Product routing tests --- + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_product_single(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.product = self.mock_product + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + self.assertEqual(results, [self.mock_observation]) + mock_obs_manager.filter.assert_called_once_with(product=self.mock_product) + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_product_group(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + mock_product_group = MagicMock(spec=Product) + mock_product_group.is_product_group = True + self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.product = mock_product_group + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + self.assertEqual(results, [self.mock_observation]) + mock_obs_manager.filter.assert_called_once_with(product__in=mock_product_group.products.all()) + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.get_products") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_no_product_general_rule(self, mock_obs_manager, mock_rule_engine_cls, mock_get_products, mock_normalize): + mock_get_products.return_value = [self.mock_product] + self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + self.assertEqual(results, [self.mock_observation]) + mock_obs_manager.filter.assert_called_once_with( + product__in=[self.mock_product], product__apply_general_rules=True + ) + mock_get_products.assert_called_once() + + # --- RULE_TYPE_FIELDS filtering tests --- + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.get_products") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_fields_type_with_parser_filter( + self, mock_obs_manager, mock_rule_engine_cls, mock_get_products, mock_normalize + ): + mock_get_products.return_value = [self.mock_product] + mock_qs = self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.parser = MagicMock() + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + mock_qs.filter.assert_any_call(parser=self.mock_rule.parser) + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.get_products") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_fields_type_with_scanner_prefix_filter( + self, mock_obs_manager, mock_rule_engine_cls, mock_get_products, mock_normalize + ): + mock_get_products.return_value = [self.mock_product] + mock_qs = self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.scanner_prefix = "Scanner/" + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + mock_qs.filter.assert_any_call(scanner__startswith="Scanner/") + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.get_products") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_fields_type_with_parser_and_scanner_prefix( + self, mock_obs_manager, mock_rule_engine_cls, mock_get_products, mock_normalize + ): + mock_get_products.return_value = [self.mock_product] + mock_qs = self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.parser = MagicMock() + self.mock_rule.scanner_prefix = "Scanner/" + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + mock_qs.filter.assert_any_call(parser=self.mock_rule.parser) + mock_qs.filter.assert_any_call(scanner__startswith="Scanner/") + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.get_products") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_rego_type_no_parser_or_scanner_filter( + self, mock_obs_manager, mock_rule_engine_cls, mock_get_products, mock_normalize + ): + mock_get_products.return_value = [self.mock_product] + mock_qs = self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.type = Rule_Type.RULE_TYPE_REGO + self.mock_rule.parser = MagicMock() + self.mock_rule.scanner_prefix = "Scanner/" + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 1) + mock_qs.filter.assert_not_called() + + # --- Observation matching and counting tests --- + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_no_matches(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = False + self.mock_rule.product = self.mock_product + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 0) + self.assertEqual(results, []) + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_empty_observations(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + self._setup_queryset_mock(mock_obs_manager, []) + self.mock_rule.product = self.mock_product + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 0) + self.assertEqual(results, []) + mock_rule_engine_cls.assert_not_called() + mock_normalize.assert_not_called() + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_max_observations_caps_results(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + observations = [] + for i in range(150): + obs = MagicMock(spec=Observation) + obs.product = self.mock_product + obs.title = f"Observation {i}" + observations.append(obs) + + self._setup_queryset_mock(mock_obs_manager, observations) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.product = self.mock_product + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 150) + self.assertEqual(len(results), MAX_OBSERVATIONS) + for i in range(MAX_OBSERVATIONS): + self.assertEqual(results[i].title, f"Observation {i}") + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_multiple_observations_mixed_matches(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + obs_match = MagicMock(spec=Observation) + obs_match.product = self.mock_product + obs_no_match = MagicMock(spec=Observation) + obs_no_match.product = self.mock_product + + self._setup_queryset_mock(mock_obs_manager, [obs_match, obs_no_match, obs_match]) + mock_engine = mock_rule_engine_cls.return_value + mock_engine.check_rule_for_observation.side_effect = [True, False, True] + self.mock_rule.product = self.mock_product + + count, results = simulate_rule(self.mock_rule) + + self.assertEqual(count, 2) + self.assertEqual(results, [obs_match, obs_match]) + + # --- Rule engine caching tests --- + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_rule_engine_cached_per_product(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + product_a = MagicMock(spec=Product) + product_a.pk = 10 + product_b = MagicMock(spec=Product) + product_b.pk = 20 + + obs_a1 = MagicMock(spec=Observation) + obs_a1.product = product_a + obs_a2 = MagicMock(spec=Observation) + obs_a2.product = product_a + obs_b = MagicMock(spec=Observation) + obs_b.product = product_b + + self._setup_queryset_mock(mock_obs_manager, [obs_a1, obs_a2, obs_b]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = True + self.mock_rule.product = self.mock_product + + simulate_rule(self.mock_rule) + + self.assertEqual(mock_rule_engine_cls.call_count, 2) + mock_rule_engine_cls.assert_any_call(product_a) + mock_rule_engine_cls.assert_any_call(product_b) + + # --- observation_before reset and normalize tests --- + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_normalize_called_for_each_observation(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + obs1 = MagicMock(spec=Observation) + obs1.product = self.mock_product + obs2 = MagicMock(spec=Observation) + obs2.product = self.mock_product + + self._setup_queryset_mock(mock_obs_manager, [obs1, obs2]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = False + self.mock_rule.product = self.mock_product + + simulate_rule(self.mock_rule) + + self.assertEqual(mock_normalize.call_count, 2) + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.copy") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_observation_before_fields_are_reset( + self, mock_obs_manager, mock_rule_engine_cls, mock_copy, mock_normalize + ): + obs_before = MagicMock(spec=Observation) + mock_copy.return_value = obs_before + + self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_rule_engine_cls.return_value.check_rule_for_observation.return_value = False + self.mock_rule.product = self.mock_product + + simulate_rule(self.mock_rule) + + self.assertEqual(obs_before.rule_status, "") + self.assertEqual(obs_before.rule_rego_status, "") + self.assertEqual(obs_before.rule_severity, "") + self.assertIsNone(obs_before.rule_priority) + self.assertIsNone(obs_before.rule_rego_priority) + self.assertEqual(obs_before.rule_vex_justification, "") + self.assertEqual(obs_before.rule_rego_vex_justification, "") + self.assertIsNone(obs_before.general_rule) + self.assertIsNone(obs_before.general_rule_rego) + self.assertIsNone(obs_before.product_rule) + self.assertIsNone(obs_before.product_rule_rego) + mock_normalize.assert_called_once_with(obs_before) + + # --- check_rule_for_observation call tests --- + + @patch("application.rules.services.simulator.normalize_observation_fields") + @patch("application.rules.services.simulator.Rule_Engine") + @patch("application.rules.services.simulator.Observation.objects") + def test_check_rule_called_with_simulation_true(self, mock_obs_manager, mock_rule_engine_cls, mock_normalize): + self._setup_queryset_mock(mock_obs_manager, [self.mock_observation]) + mock_engine = mock_rule_engine_cls.return_value + mock_engine.check_rule_for_observation.return_value = True + self.mock_rule.product = self.mock_product + + simulate_rule(self.mock_rule) + + args, kwargs = mock_engine.check_rule_for_observation.call_args + self.assertEqual(args[0], self.mock_rule) + self.assertEqual(args[1], self.mock_observation) + self.assertTrue(args[3]) diff --git a/backend/unittests/vex/api/files/csaf_given_vulnerability.json b/backend/unittests/vex/api/files/csaf_given_vulnerability.json index 3192f2e75..2ed5ec06b 100644 --- a/backend/unittests/vex/api/files/csaf_given_vulnerability.json +++ b/backend/unittests/vex/api/files/csaf_given_vulnerability.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "CSAF_2020_0001_0001", diff --git a/backend/unittests/vex/api/files/csaf_given_vulnerability_update.json b/backend/unittests/vex/api/files/csaf_given_vulnerability_update.json index bdf28789c..734542fea 100644 --- a/backend/unittests/vex/api/files/csaf_given_vulnerability_update.json +++ b/backend/unittests/vex/api/files/csaf_given_vulnerability_update.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "CSAF_2020_0001_0002", diff --git a/backend/unittests/vex/api/files/csaf_product_branches.json b/backend/unittests/vex/api/files/csaf_product_branches.json index 41f48684e..72516dcc1 100644 --- a/backend/unittests/vex/api/files/csaf_product_branches.json +++ b/backend/unittests/vex/api/files/csaf_product_branches.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "CSAF_2020_0001_0001", diff --git a/backend/unittests/vex/api/files/csaf_product_given_branch.json b/backend/unittests/vex/api/files/csaf_product_given_branch.json index 199505d36..9bc537c14 100644 --- a/backend/unittests/vex/api/files/csaf_product_given_branch.json +++ b/backend/unittests/vex/api/files/csaf_product_given_branch.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "CSAF_2020_0001_0001", diff --git a/backend/unittests/vex/api/files/csaf_product_no_branch.json b/backend/unittests/vex/api/files/csaf_product_no_branch.json index dd5d930e5..62fd4791d 100644 --- a/backend/unittests/vex/api/files/csaf_product_no_branch.json +++ b/backend/unittests/vex/api/files/csaf_product_no_branch.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "CSAF_2020_0001_0001", diff --git a/backend/unittests/vex/api/files/csaf_product_no_branch_update.json b/backend/unittests/vex/api/files/csaf_product_no_branch_update.json index 6d4794d6c..9a546ce72 100644 --- a/backend/unittests/vex/api/files/csaf_product_no_branch_update.json +++ b/backend/unittests/vex/api/files/csaf_product_no_branch_update.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "CSAF_2020_0001_0002", diff --git a/backend/unittests/vex/api/files/cyclonedx_given_vulnerability.json b/backend/unittests/vex/api/files/cyclonedx_given_vulnerability.json new file mode 100644 index 000000000..a311da301 --- /dev/null +++ b/backend/unittests/vex/api/files/cyclonedx_given_vulnerability.json @@ -0,0 +1,63 @@ +{ + "metadata": { + "authors": [ + { + "name": "Author" + } + ], + "properties": [ + { + "name": "prefix", + "value": "CycloneDX" + } + ], + "timestamp": "2020-01-01T04:30:00+00:00", + "tools": { + "components": [ + { + "name": "SecObserve / 1.48.0", + "type": "application" + } + ] + } + }, + "serialNumber": "urn:uuid:e8d7b87f-83ec-4e41-af84-25f2b1d2739d", + "version": 1, + "vulnerabilities": [ + { + "affects": [ + { + "ref": "urn:cdx:2/vex_comp_2:2.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "in_triage" + }, + "bom-ref": "6629b2c10f4c07ee383dfeaa72d40522ca364d082975d540ae2a24dbde09ef8b", + "description": "description 2", + "id": "CVE-vulnerability_2", + "recommendation": "Upgrade to release 2.1.0" + }, + { + "affects": [ + { + "ref": "urn:cdx:4/vex_comp_2:2.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "exploitable" + }, + "bom-ref": "c54e29f61cb0781125092b04e7778725c26dc697d7199195ec79289cffaaea2b", + "description": "description 2", + "id": "CVE-vulnerability_2", + "recommendation": "Upgrade to release 2.1.0" + } + ], + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6" +} \ No newline at end of file diff --git a/backend/unittests/vex/api/files/cyclonedx_given_vulnerability_update.json b/backend/unittests/vex/api/files/cyclonedx_given_vulnerability_update.json new file mode 100644 index 000000000..c9ab2fb0b --- /dev/null +++ b/backend/unittests/vex/api/files/cyclonedx_given_vulnerability_update.json @@ -0,0 +1,48 @@ +{ + "metadata": { + "manufacturer": { + "name": "Manufacturer" + }, + "properties": [ + { + "name": "prefix", + "value": "CycloneDX" + } + ], + "timestamp": "2020-02-01T04:30:00+00:00", + "tools": { + "components": [ + { + "name": "SecObserve / 1.48.0", + "type": "application" + } + ] + } + }, + "serialNumber": "urn:uuid:e8d7b87f-83ec-4e41-af84-25f2b1d2739d", + "version": 2, + "vulnerabilities": [ + { + "affects": [ + { + "ref": "urn:cdx:2/vex_comp_2:2.0.0" + }, + { + "ref": "urn:cdx:4/vex_comp_2:2.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-02-01T04:30:00+00:00", + "state": "exploitable" + }, + "bom-ref": "6629b2c10f4c07ee383dfeaa72d40522ca364d082975d540ae2a24dbde09ef8b", + "description": "description 2", + "id": "CVE-vulnerability_2", + "recommendation": "Upgrade to release 2.1.0" + } + ], + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6" +} \ No newline at end of file diff --git a/backend/unittests/vex/api/files/cyclonedx_product_branches.json b/backend/unittests/vex/api/files/cyclonedx_product_branches.json new file mode 100644 index 000000000..f9e5da869 --- /dev/null +++ b/backend/unittests/vex/api/files/cyclonedx_product_branches.json @@ -0,0 +1,76 @@ +{ + "metadata": { + "manufacturer": { + "name": "Manufacturer" + }, + "properties": [ + { + "name": "prefix", + "value": "CycloneDX" + } + ], + "timestamp": "2020-01-01T04:30:00+00:00", + "tools": { + "components": [ + { + "name": "SecObserve / 1.48.0", + "type": "application" + } + ] + } + }, + "serialNumber": "urn:uuid:e8d7b87f-83ec-4e41-af84-25f2b1d2739d", + "version": 1, + "vulnerabilities": [ + { + "affects": [ + { + "ref": "urn:cdx:4/vex_comp_2:2.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "exploitable" + }, + "bom-ref": "c54e29f61cb0781125092b04e7778725c26dc697d7199195ec79289cffaaea2b", + "description": "description 2", + "id": "CVE-vulnerability_2", + "recommendation": "Upgrade to release 2.1.0" + }, + { + "affects": [ + { + "ref": "urn:cdx:6/vex_comp_4:4.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "resolved" + }, + "bom-ref": "5d37a73fa97dac5aca05c237f4097c2d0dc44d55c4ba395eceab0fadcb4cb940", + "description": "description 3", + "id": "vex_vulnerability_3" + }, + { + "affects": [ + { + "ref": "urn:cdx:7/vex_comp_5:5.0.0" + } + ], + "analysis": { + "detail": "Should be no problem", + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "not_affected" + }, + "bom-ref": "714a955462225f0769affd3f3f59e9d409c6072b95c05509626fa0c72f4ad7dd", + "description": "description 4", + "id": "vex_vulnerability_4" + } + ], + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6" +} \ No newline at end of file diff --git a/backend/unittests/vex/api/files/cyclonedx_product_given_branch.json b/backend/unittests/vex/api/files/cyclonedx_product_given_branch.json new file mode 100644 index 000000000..3611904b0 --- /dev/null +++ b/backend/unittests/vex/api/files/cyclonedx_product_given_branch.json @@ -0,0 +1,45 @@ +{ + "metadata": { + "manufacturer": { + "name": "Manufacturer" + }, + "properties": [ + { + "name": "prefix", + "value": "CycloneDX" + } + ], + "timestamp": "2020-01-01T04:30:00+00:00", + "tools": { + "components": [ + { + "name": "SecObserve / 1.48.0", + "type": "application" + } + ] + } + }, + "serialNumber": "urn:uuid:e8d7b87f-83ec-4e41-af84-25f2b1d2739d", + "version": 1, + "vulnerabilities": [ + { + "affects": [ + { + "ref": "urn:cdx:7/vex_comp_5:5.0.0" + } + ], + "analysis": { + "detail": "Should be no problem", + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "not_affected" + }, + "bom-ref": "714a955462225f0769affd3f3f59e9d409c6072b95c05509626fa0c72f4ad7dd", + "description": "description 4", + "id": "vex_vulnerability_4" + } + ], + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6" +} \ No newline at end of file diff --git a/backend/unittests/vex/api/files/cyclonedx_product_no_branch.json b/backend/unittests/vex/api/files/cyclonedx_product_no_branch.json new file mode 100644 index 000000000..24dd0f32e --- /dev/null +++ b/backend/unittests/vex/api/files/cyclonedx_product_no_branch.json @@ -0,0 +1,79 @@ +{ + "metadata": { + "authors": [ + { + "name": "Author" + } + ], + "properties": [ + { + "name": "prefix", + "value": "CycloneDX" + } + ], + "timestamp": "2020-01-01T04:30:00+00:00", + "tools": { + "components": [ + { + "name": "SecObserve / 1.48.0", + "type": "application" + } + ] + } + }, + "serialNumber": "urn:uuid:e8d7b87f-83ec-4e41-af84-25f2b1d2739d", + "version": 1, + "vulnerabilities": [ + { + "affects": [ + { + "ref": "urn:cdx:2/vex_comp_2:2.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "in_triage" + }, + "bom-ref": "6629b2c10f4c07ee383dfeaa72d40522ca364d082975d540ae2a24dbde09ef8b", + "description": "description 2", + "id": "CVE-vulnerability_2", + "recommendation": "Upgrade to release 2.1.0" + }, + { + "affects": [ + { + "ref": "urn:cdx:1/vex_comp_1:1.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "exploitable" + }, + "bom-ref": "914227c039f64674d20d3ccc87837b3a3f4677c69a608fbb8479662b188e0868", + "id": "GHSA-vulnerability_1", + "recommendation": "Upgrade to release 1.1.0" + }, + { + "affects": [ + { + "ref": "urn:cdx:3/vex_comp_3:3.0.0" + } + ], + "analysis": { + "detail": "Should be no problem", + "firstIssued": "2020-01-01T04:30:00+00:00", + "justification": "code_not_reachable", + "lastUpdated": "2020-01-01T04:30:00+00:00", + "state": "not_affected" + }, + "bom-ref": "49f4e05fb18b3629a44b087ed362270c5d385694507197d05e100555238d5971", + "description": "description 3", + "id": "vex_vulnerability_3" + } + ], + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6" +} \ No newline at end of file diff --git a/backend/unittests/vex/api/files/cyclonedx_product_no_branch_update.json b/backend/unittests/vex/api/files/cyclonedx_product_no_branch_update.json new file mode 100644 index 000000000..834b5adc9 --- /dev/null +++ b/backend/unittests/vex/api/files/cyclonedx_product_no_branch_update.json @@ -0,0 +1,78 @@ +{ + "metadata": { + "manufacturer": { + "name": "New manufacturer" + }, + "properties": [ + { + "name": "prefix", + "value": "CycloneDX" + } + ], + "timestamp": "2020-02-01T04:30:00+00:00", + "tools": { + "components": [ + { + "name": "SecObserve / 1.48.0", + "type": "application" + } + ] + } + }, + "serialNumber": "urn:uuid:e8d7b87f-83ec-4e41-af84-25f2b1d2739d", + "version": 2, + "vulnerabilities": [ + { + "affects": [ + { + "ref": "urn:cdx:2/vex_comp_2:2.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-02-01T04:30:00+00:00", + "state": "exploitable" + }, + "bom-ref": "6629b2c10f4c07ee383dfeaa72d40522ca364d082975d540ae2a24dbde09ef8b", + "description": "description 2", + "id": "CVE-vulnerability_2", + "recommendation": "Upgrade to release 2.1.0" + }, + { + "affects": [ + { + "ref": "urn:cdx:1/vex_comp_1:1.0.0" + } + ], + "analysis": { + "firstIssued": "2020-01-01T04:30:00+00:00", + "lastUpdated": "2020-02-01T04:30:00+00:00", + "state": "exploitable" + }, + "bom-ref": "914227c039f64674d20d3ccc87837b3a3f4677c69a608fbb8479662b188e0868", + "description": "new description", + "id": "GHSA-vulnerability_1", + "recommendation": "Upgrade to release 1.1.0" + }, + { + "affects": [ + { + "ref": "urn:cdx:3/vex_comp_3:3.0.0" + } + ], + "analysis": { + "detail": "Should be no problem", + "firstIssued": "2020-01-01T04:30:00+00:00", + "justification": "code_not_reachable", + "lastUpdated": "2020-02-01T04:30:00+00:00", + "state": "not_affected" + }, + "bom-ref": "49f4e05fb18b3629a44b087ed362270c5d385694507197d05e100555238d5971", + "description": "description 3", + "id": "vex_vulnerability_3" + } + ], + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6" +} \ No newline at end of file diff --git a/backend/unittests/vex/api/files/openvex_given_vulnerability.json b/backend/unittests/vex/api/files/openvex_given_vulnerability.json index a6e246ad0..50d0b40b0 100644 --- a/backend/unittests/vex/api/files/openvex_given_vulnerability.json +++ b/backend/unittests/vex/api/files/openvex_given_vulnerability.json @@ -47,6 +47,6 @@ } ], "timestamp": "2020-01-01T04:30:00+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 1 } \ No newline at end of file diff --git a/backend/unittests/vex/api/files/openvex_given_vulnerability_update.json b/backend/unittests/vex/api/files/openvex_given_vulnerability_update.json index df4a59e8c..4aba54634 100644 --- a/backend/unittests/vex/api/files/openvex_given_vulnerability_update.json +++ b/backend/unittests/vex/api/files/openvex_given_vulnerability_update.json @@ -37,6 +37,6 @@ } ], "timestamp": "2020-01-01T04:30:00+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 2 } \ No newline at end of file diff --git a/backend/unittests/vex/api/files/openvex_product_branches.json b/backend/unittests/vex/api/files/openvex_product_branches.json index 66f31e918..3d6f6da0d 100644 --- a/backend/unittests/vex/api/files/openvex_product_branches.json +++ b/backend/unittests/vex/api/files/openvex_product_branches.json @@ -65,6 +65,6 @@ } ], "timestamp": "2020-01-01T04:30:00+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 1 } \ No newline at end of file diff --git a/backend/unittests/vex/api/files/openvex_product_given_branch.json b/backend/unittests/vex/api/files/openvex_product_given_branch.json index fb348490d..3f3c0f06c 100644 --- a/backend/unittests/vex/api/files/openvex_product_given_branch.json +++ b/backend/unittests/vex/api/files/openvex_product_given_branch.json @@ -23,6 +23,6 @@ } ], "timestamp": "2020-01-01T04:30:00+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 1 } \ No newline at end of file diff --git a/backend/unittests/vex/api/files/openvex_product_no_branch.json b/backend/unittests/vex/api/files/openvex_product_no_branch.json index 4fecd8d10..2f04f706b 100644 --- a/backend/unittests/vex/api/files/openvex_product_no_branch.json +++ b/backend/unittests/vex/api/files/openvex_product_no_branch.json @@ -63,6 +63,6 @@ } ], "timestamp": "2020-01-01T04:30:00+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 1 } \ No newline at end of file diff --git a/backend/unittests/vex/api/files/openvex_product_no_branch_update.json b/backend/unittests/vex/api/files/openvex_product_no_branch_update.json index 0bb818cc3..4a1234a32 100644 --- a/backend/unittests/vex/api/files/openvex_product_no_branch_update.json +++ b/backend/unittests/vex/api/files/openvex_product_no_branch_update.json @@ -64,6 +64,6 @@ } ], "timestamp": "2020-01-01T04:30:00+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 2 } \ No newline at end of file diff --git a/backend/unittests/vex/api/test_views_csaf.py b/backend/unittests/vex/api/test_views_csaf.py index 51f0e6cd9..7416233c0 100644 --- a/backend/unittests/vex/api/test_views_csaf.py +++ b/backend/unittests/vex/api/test_views_csaf.py @@ -31,9 +31,7 @@ def setUp(self): self.maxDiff = None @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.csaf_generator.user_has_permission_or_403") @patch("application.vex.services.csaf_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -65,9 +63,7 @@ def test_csaf_document_product_no_branch( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -76,9 +72,7 @@ def test_csaf_document_product_no_branch( response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/csaf_product_no_branch.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/csaf_product_no_branch.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) csaf = CSAF.objects.get(document_id_prefix="CSAF", document_base_id="2020_0001") @@ -96,9 +90,7 @@ def test_csaf_document_product_no_branch( csaf.publisher_category, ) self.assertEqual("https://vex.example.com", csaf.publisher_namespace) - self.assertEqual( - CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status - ) + self.assertEqual(CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status) self.assertEqual( CSAF_TLP_Label.CSAF_TLP_LABEL_WHITE, csaf.tlp_label, @@ -120,9 +112,7 @@ def test_csaf_document_product_no_branch( csaf_revisions = CSAF_Revision.objects.filter(csaf=csaf) self.assertEqual(1, len(csaf_revisions)) - self.assertEqual( - dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date - ) + self.assertEqual(dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date) self.assertEqual(1, csaf_revisions[0].version) self.assertEqual("Initial release", csaf_revisions[0].summary) @@ -137,9 +127,7 @@ def test_csaf_document_product_no_branch( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json") self.assertEqual(204, response.status_code) @@ -165,9 +153,7 @@ def test_csaf_document_product_no_branch( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -176,9 +162,7 @@ def test_csaf_document_product_no_branch( response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/csaf_product_no_branch_update.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/csaf_product_no_branch_update.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) csaf = CSAF.objects.get(document_id_prefix="CSAF", document_base_id="2020_0001") @@ -196,9 +180,7 @@ def test_csaf_document_product_no_branch( csaf.publisher_category, ) self.assertEqual("https://vex.example.com", csaf.publisher_namespace) - self.assertEqual( - CSAF_Tracking_Status.CSAF_TRACKING_STATUS_DRAFT, csaf.tracking_status - ) + self.assertEqual(CSAF_Tracking_Status.CSAF_TRACKING_STATUS_DRAFT, csaf.tracking_status) self.assertEqual( CSAF_TLP_Label.CSAF_TLP_LABEL_AMBER, csaf.tlp_label, @@ -220,16 +202,12 @@ def test_csaf_document_product_no_branch( csaf_revisions = CSAF_Revision.objects.filter(csaf=csaf) self.assertEqual(2, len(csaf_revisions)) - self.assertEqual( - dateparse.parse_datetime("2020-02-01T04:30:00Z"), csaf_revisions[1].date - ) + self.assertEqual(dateparse.parse_datetime("2020-02-01T04:30:00Z"), csaf_revisions[1].date) self.assertEqual(2, csaf_revisions[1].version) self.assertEqual("Update", csaf_revisions[1].summary) @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.csaf_generator.user_has_permission_or_403") @patch("application.vex.services.csaf_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -259,9 +237,7 @@ def test_csaf_document_product_branches( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -270,9 +246,7 @@ def test_csaf_document_product_branches( response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/csaf_product_branches.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/csaf_product_branches.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) csaf = CSAF.objects.get(document_id_prefix="CSAF", document_base_id="2020_0001") @@ -290,9 +264,7 @@ def test_csaf_document_product_branches( csaf.publisher_category, ) self.assertEqual("https://vex.example.com", csaf.publisher_namespace) - self.assertEqual( - CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status - ) + self.assertEqual(CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status) self.assertEqual( CSAF_TLP_Label.CSAF_TLP_LABEL_WHITE, csaf.tlp_label, @@ -314,16 +286,12 @@ def test_csaf_document_product_branches( csaf_revisions = CSAF_Revision.objects.filter(csaf=csaf) self.assertEqual(1, len(csaf_revisions)) - self.assertEqual( - dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date - ) + self.assertEqual(dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date) self.assertEqual(1, csaf_revisions[0].version) self.assertEqual("Initial release", csaf_revisions[0].summary) @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.csaf_generator.user_has_permission_or_403") @patch("application.vex.services.csaf_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -343,7 +311,7 @@ def test_csaf_document_product_given_branch( parameters = { "product": 2, - "branch_names": ["main"], + "branches": [2], "document_id_prefix": "CSAF", "title": "Title", "publisher_name": "Publisher", @@ -354,9 +322,7 @@ def test_csaf_document_product_given_branch( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -365,9 +331,7 @@ def test_csaf_document_product_given_branch( response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/csaf_product_given_branch.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/csaf_product_given_branch.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) csaf = CSAF.objects.get(document_id_prefix="CSAF", document_base_id="2020_0001") @@ -385,9 +349,7 @@ def test_csaf_document_product_given_branch( csaf.publisher_category, ) self.assertEqual("https://vex.example.com", csaf.publisher_namespace) - self.assertEqual( - CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status - ) + self.assertEqual(CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status) self.assertEqual( CSAF_TLP_Label.CSAF_TLP_LABEL_WHITE, csaf.tlp_label, @@ -410,16 +372,12 @@ def test_csaf_document_product_given_branch( csaf_revisions = CSAF_Revision.objects.filter(csaf=csaf) self.assertEqual(1, len(csaf_revisions)) - self.assertEqual( - dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date - ) + self.assertEqual(dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date) self.assertEqual(1, csaf_revisions[0].version) self.assertEqual("Initial release", csaf_revisions[0].summary) @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.csaf_generator.user_has_permission_or_403") @patch("application.vex.services.csaf_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -449,9 +407,7 @@ def test_csaf_document_given_vulnerability( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -460,9 +416,7 @@ def test_csaf_document_given_vulnerability( response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/csaf_given_vulnerability.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/csaf_given_vulnerability.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) csaf = CSAF.objects.get(document_id_prefix="CSAF", document_base_id="2020_0001") @@ -480,9 +434,7 @@ def test_csaf_document_given_vulnerability( csaf.publisher_category, ) self.assertEqual("https://vex.example.com", csaf.publisher_namespace) - self.assertEqual( - CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status - ) + self.assertEqual(CSAF_Tracking_Status.CSAF_TRACKING_STATUS_FINAL, csaf.tracking_status) self.assertEqual( CSAF_TLP_Label.CSAF_TLP_LABEL_WHITE, csaf.tlp_label, @@ -505,9 +457,7 @@ def test_csaf_document_given_vulnerability( csaf_revisions = CSAF_Revision.objects.filter(csaf=csaf) self.assertEqual(1, len(csaf_revisions)) - self.assertEqual( - dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date - ) + self.assertEqual(dateparse.parse_datetime("2020-01-01T04:30:00Z"), csaf_revisions[0].date) self.assertEqual(1, csaf_revisions[0].version) self.assertEqual("Initial release", csaf_revisions[0].summary) @@ -522,9 +472,7 @@ def test_csaf_document_given_vulnerability( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json") self.assertEqual(204, response.status_code) @@ -546,9 +494,7 @@ def test_csaf_document_given_vulnerability( } api_client = APIClient() - response = api_client.post( - "/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json" - ) + response = api_client.post("/api/vex/csaf_document/update/CSAF/2020_0001/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -557,9 +503,7 @@ def test_csaf_document_given_vulnerability( response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/csaf_given_vulnerability_update.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/csaf_given_vulnerability_update.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) csaf = CSAF.objects.get(document_id_prefix="CSAF", document_base_id="2020_0001") @@ -577,9 +521,7 @@ def test_csaf_document_given_vulnerability( csaf.publisher_category, ) self.assertEqual("https://vex.example.com", csaf.publisher_namespace) - self.assertEqual( - CSAF_Tracking_Status.CSAF_TRACKING_STATUS_DRAFT, csaf.tracking_status - ) + self.assertEqual(CSAF_Tracking_Status.CSAF_TRACKING_STATUS_DRAFT, csaf.tracking_status) self.assertEqual( CSAF_TLP_Label.CSAF_TLP_LABEL_AMBER, csaf.tlp_label, @@ -602,8 +544,6 @@ def test_csaf_document_given_vulnerability( csaf_revisions = CSAF_Revision.objects.filter(csaf=csaf) self.assertEqual(2, len(csaf_revisions)) - self.assertEqual( - dateparse.parse_datetime("2020-02-01T04:30:00Z"), csaf_revisions[1].date - ) + self.assertEqual(dateparse.parse_datetime("2020-02-01T04:30:00Z"), csaf_revisions[1].date) self.assertEqual(2, csaf_revisions[1].version) self.assertEqual("Update", csaf_revisions[1].summary) diff --git a/backend/unittests/vex/api/test_views_cyclonedx.py b/backend/unittests/vex/api/test_views_cyclonedx.py new file mode 100644 index 000000000..801436d92 --- /dev/null +++ b/backend/unittests/vex/api/test_views_cyclonedx.py @@ -0,0 +1,464 @@ +from os import path +from unittest.mock import patch +from uuid import UUID + +from django.core.management import call_command +from django.test import TestCase +from django.utils import dateparse +from rest_framework.test import APIClient + +from application.access_control.models import User +from application.core.models import Observation, Product, Product_Member +from application.licenses.models import License_Component +from application.vex.models import CycloneDX, CycloneDX_Branch, CycloneDX_Vulnerability + + +class TestCycloneDX(TestCase): + def setUp(self): + Observation.objects.all().delete() + License_Component.objects.all().delete() + Product.objects.filter(is_product_group=False).delete() + Product.objects.all().delete() + Product_Member.objects.all().delete() + User.objects.all().delete() + + call_command("loaddata", "unittests/fixtures/vex_fixtures.json") + + for observation in Observation.objects.all(): + observation.origin_component_cyclonedx_bom_link = ( + f"urn:cdx:{observation.pk}/{observation.origin_component_name_version}" + ) + observation.save() + + self.maxDiff = None + + @patch("django.utils.timezone.now") + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.vex.services.cyclonedx_generator.user_has_permission_or_403") + @patch("application.vex.services.cyclonedx_generator.get_current_user") + @patch("application.core.queries.observation.get_current_user") + @patch("cyclonedx.model.bom.uuid4") + def test_cyclonedx_document_product_no_branch( + self, + mock_bom_uuid4, + mock_get_current_user_1, + mock_get_current_user_2, + mock_get_user_has_permission_or_403, + mock_authenticate, + mock_now, + ): + mock_bom_uuid4.return_value = UUID("e8d7b87f-83ec-4e41-af84-25f2b1d2739d") + mock_now.return_value = dateparse.parse_datetime("2020-01-01T04:30:00Z") + vex_user = User.objects.get(username="vex_user") + mock_authenticate.return_value = vex_user, None + mock_get_current_user_1.return_value = vex_user + mock_get_current_user_2.return_value = vex_user + + # --- create --- + + parameters = { + "product": 1, + "document_id_prefix": "CycloneDX", + "author": "Author", + "manufacturer": "", + } + + api_client = APIClient() + response = api_client.post("/api/vex/cyclonedx_document/create/", parameters, format="json") + + self.assertEqual(200, response.status_code) + self.assertEqual("application/json", response.headers["Content-Type"]) + self.assertEqual( + "attachment; filename=CycloneDX_e8d7b87f-83ec-4e41-af84-25f2b1d2739d_0001.json", + response.headers["Content-Disposition"], + ) + with open(path.dirname(__file__) + "/files/cyclonedx_product_no_branch.json", "r") as testfile: + self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) + + cyclonedx = CycloneDX.objects.get( + document_id_prefix="CycloneDX", document_base_id="e8d7b87f-83ec-4e41-af84-25f2b1d2739d" + ) + self.assertEqual(vex_user, cyclonedx.user) + self.assertEqual(Product.objects.get(id=1), cyclonedx.product) + self.assertEqual(1, cyclonedx.version) + self.assertEqual( + "47171cd4e4a21045adbf70db1766c3999f99a2256f34d3c2245c76e726de1dae", + cyclonedx.content_hash, + ) + self.assertEqual("Author", cyclonedx.author) + self.assertEqual("", cyclonedx.manufacturer) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.first_issued, + ) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.last_updated, + ) + + cyclonedx_branches = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_branches)) + + cyclonedx_vulnerabilities = CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_vulnerabilities)) + + # --- update without changes --- + + parameters = { + "author": "New Author", + "manufacturer": "", + } + + api_client = APIClient() + response = api_client.post( + "/api/vex/cyclonedx_document/update/CycloneDX/e8d7b87f-83ec-4e41-af84-25f2b1d2739d/", + parameters, + format="json", + ) + + self.assertEqual(204, response.status_code) + + # --- update with changes --- + + mock_now.return_value = dateparse.parse_datetime("2020-02-01T04:30:00Z") + + observation_1 = Observation.objects.get(id=1) + observation_1.description = "new description" + observation_1.save() + + observation_2 = Observation.objects.get(id=2) + observation_2.current_status = "Open" + observation_2.assessment_status = "" + observation_2.save() + + parameters = { + "author": "", + "manufacturer": "New manufacturer", + } + + api_client = APIClient() + response = api_client.post( + "/api/vex/cyclonedx_document/update/CycloneDX/e8d7b87f-83ec-4e41-af84-25f2b1d2739d/", + parameters, + format="json", + ) + + self.assertEqual(200, response.status_code) + self.assertEqual("application/json", response.headers["Content-Type"]) + self.assertEqual( + "attachment; filename=CycloneDX_e8d7b87f-83ec-4e41-af84-25f2b1d2739d_0002.json", + response.headers["Content-Disposition"], + ) + with open(path.dirname(__file__) + "/files/cyclonedx_product_no_branch_update.json", "r") as testfile: + self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) + + cyclonedx = CycloneDX.objects.get( + document_id_prefix="CycloneDX", document_base_id="e8d7b87f-83ec-4e41-af84-25f2b1d2739d" + ) + self.assertEqual(vex_user, cyclonedx.user) + self.assertEqual(Product.objects.get(id=1), cyclonedx.product) + self.assertEqual(2, cyclonedx.version) + self.assertEqual( + "05f17369a5ced1a829b5b09b556ffb95bbe430ff950c1cdbf6f57740e31539e6", + cyclonedx.content_hash, + ) + self.assertEqual("", cyclonedx.author) + self.assertEqual("New manufacturer", cyclonedx.manufacturer) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.first_issued, + ) + self.assertEqual( + dateparse.parse_datetime("2020-02-01T04:30:00Z"), + cyclonedx.last_updated, + ) + + cyclonedx_branches = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_branches)) + + cyclonedx_vulnerabilities = CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_vulnerabilities)) + + @patch("django.utils.timezone.now") + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.vex.services.cyclonedx_generator.user_has_permission_or_403") + @patch("application.vex.services.cyclonedx_generator.get_current_user") + @patch("application.core.queries.observation.get_current_user") + @patch("cyclonedx.model.bom.uuid4") + def test_cyclonedx_document_product_branches( + self, + mock_uuid4, + mock_get_current_user_1, + mock_get_current_user_2, + mock_get_user_has_permission_or_403, + mock_authenticate, + mock_now, + ): + mock_uuid4.return_value = UUID("e8d7b87f-83ec-4e41-af84-25f2b1d2739d") + mock_now.return_value = dateparse.parse_datetime("2020-01-01T04:30:00Z") + vex_user = User.objects.get(username="vex_user") + mock_authenticate.return_value = vex_user, None + mock_get_current_user_1.return_value = vex_user + mock_get_current_user_2.return_value = vex_user + + parameters = { + "product": 2, + "document_id_prefix": "CycloneDX", + "author": "", + "manufacturer": "Manufacturer", + } + + api_client = APIClient() + response = api_client.post("/api/vex/cyclonedx_document/create/", parameters, format="json") + + self.assertEqual(200, response.status_code) + self.assertEqual("application/json", response.headers["Content-Type"]) + self.assertEqual( + "attachment; filename=CycloneDX_e8d7b87f-83ec-4e41-af84-25f2b1d2739d_0001.json", + response.headers["Content-Disposition"], + ) + with open(path.dirname(__file__) + "/files/cyclonedx_product_branches.json", "r") as testfile: + self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) + + cyclonedx = CycloneDX.objects.get( + document_id_prefix="CycloneDX", document_base_id="e8d7b87f-83ec-4e41-af84-25f2b1d2739d" + ) + self.assertEqual(vex_user, cyclonedx.user) + self.assertEqual(Product.objects.get(id=2), cyclonedx.product) + self.assertEqual(1, cyclonedx.version) + self.assertEqual( + "f6ace394bcafbd4c857f74a507185d13a37c2d191e006c132979d933263bc43c", + cyclonedx.content_hash, + ) + self.assertEqual("", cyclonedx.author) + self.assertEqual("Manufacturer", cyclonedx.manufacturer) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.first_issued, + ) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.last_updated, + ) + + cyclonedx_branches = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_branches)) + + cyclonedx_vulnerabilities = CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_vulnerabilities)) + + @patch("django.utils.timezone.now") + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.vex.services.cyclonedx_generator.user_has_permission_or_403") + @patch("application.vex.services.cyclonedx_generator.get_current_user") + @patch("application.core.queries.observation.get_current_user") + @patch("cyclonedx.model.bom.uuid4") + def test_cyclonedx_document_product_given_branch( + self, + mock_bom_uuid4, + mock_get_current_user_1, + mock_get_current_user_2, + mock_get_user_has_permission_or_403, + mock_authenticate, + mock_now, + ): + mock_bom_uuid4.return_value = UUID("e8d7b87f-83ec-4e41-af84-25f2b1d2739d") + mock_now.return_value = dateparse.parse_datetime("2020-01-01T04:30:00Z") + vex_user = User.objects.get(username="vex_user") + mock_authenticate.return_value = vex_user, None + mock_get_current_user_1.return_value = vex_user + mock_get_current_user_2.return_value = vex_user + + parameters = { + "product": 2, + "branches": [2], + "document_id_prefix": "CycloneDX", + "author": "", + "manufacturer": "Manufacturer", + } + + api_client = APIClient() + response = api_client.post("/api/vex/cyclonedx_document/create/", parameters, format="json") + + self.assertEqual(200, response.status_code) + self.assertEqual("application/json", response.headers["Content-Type"]) + self.assertEqual( + "attachment; filename=CycloneDX_e8d7b87f-83ec-4e41-af84-25f2b1d2739d_0001.json", + response.headers["Content-Disposition"], + ) + with open(path.dirname(__file__) + "/files/cyclonedx_product_given_branch.json", "r") as testfile: + self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) + + cyclonedx = CycloneDX.objects.get( + document_id_prefix="CycloneDX", document_base_id="e8d7b87f-83ec-4e41-af84-25f2b1d2739d" + ) + self.assertEqual(vex_user, cyclonedx.user) + self.assertEqual(Product.objects.get(id=2), cyclonedx.product) + self.assertEqual(1, cyclonedx.version) + self.assertEqual( + "44a6d3ef47b4b718c1fa3a5439b16832e794c4b6fc4c1e4af9fbe427a258b36b", + cyclonedx.content_hash, + ) + self.assertEqual("", cyclonedx.author) + self.assertEqual("Manufacturer", cyclonedx.manufacturer) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.first_issued, + ) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.last_updated, + ) + + cyclonedx_branches = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(1, len(cyclonedx_branches)) + self.assertEqual(2, cyclonedx_branches[0].branch.pk) + + cyclonedx_vulnerabilities = CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_vulnerabilities)) + + @patch("django.utils.timezone.now") + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") + @patch("application.vex.services.cyclonedx_generator.user_has_permission_or_403") + @patch("application.vex.services.cyclonedx_generator.get_current_user") + @patch("application.core.queries.observation.get_current_user") + @patch("cyclonedx.model.bom.uuid4") + def test_cyclonedx_document_given_vulnerability( + self, + mock_bom_uuid4, + mock_get_current_user_1, + mock_get_current_user_2, + mock_get_user_has_permission_or_403, + mock_authenticate, + mock_now, + ): + mock_bom_uuid4.return_value = UUID("e8d7b87f-83ec-4e41-af84-25f2b1d2739d") + mock_now.return_value = dateparse.parse_datetime("2020-01-01T04:30:00Z") + vex_user = User.objects.get(username="vex_user") + mock_authenticate.return_value = vex_user, None + mock_get_current_user_1.return_value = vex_user + mock_get_current_user_2.return_value = vex_user + + parameters = { + "vulnerability_names": ["CVE-vulnerability_2"], + "document_id_prefix": "CycloneDX", + "author": "Author", + "manufacturer": "", + } + + api_client = APIClient() + response = api_client.post("/api/vex/cyclonedx_document/create/", parameters, format="json") + + self.assertEqual(200, response.status_code) + self.assertEqual("application/json", response.headers["Content-Type"]) + self.assertEqual( + "attachment; filename=CycloneDX_e8d7b87f-83ec-4e41-af84-25f2b1d2739d_0001.json", + response.headers["Content-Disposition"], + ) + with open(path.dirname(__file__) + "/files/cyclonedx_given_vulnerability.json", "r") as testfile: + self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) + + cyclonedx = CycloneDX.objects.get( + document_id_prefix="CycloneDX", document_base_id="e8d7b87f-83ec-4e41-af84-25f2b1d2739d" + ) + self.assertEqual(vex_user, cyclonedx.user) + self.assertEqual(None, cyclonedx.product) + self.assertEqual(1, cyclonedx.version) + self.assertEqual( + "7868880f131bfd05255c9a4bd471818f2e0025ad969e64b4c8f9a325e832c0f7", + cyclonedx.content_hash, + ) + self.assertEqual("Author", cyclonedx.author) + self.assertEqual("", cyclonedx.manufacturer) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.first_issued, + ) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.last_updated, + ) + + cyclonedx_branches = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_branches)) + + cyclonedx_vulnerabilities = CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(1, len(cyclonedx_vulnerabilities)) + self.assertEqual("CVE-vulnerability_2", cyclonedx_vulnerabilities[0].name) + + # --- update without changes --- + + parameters = { + "author": "New Author", + "manufacturer": "", + } + + api_client = APIClient() + response = api_client.post( + "/api/vex/cyclonedx_document/update/CycloneDX/e8d7b87f-83ec-4e41-af84-25f2b1d2739d/", + parameters, + format="json", + ) + + self.assertEqual(204, response.status_code) + + # --- update with changes --- + + mock_now.return_value = dateparse.parse_datetime("2020-02-01T04:30:00Z") + + observation_2 = Observation.objects.get(id=2) + observation_2.current_status = "Open" + observation_2.assessment_status = "" + observation_2.save() + + parameters = { + "author": "", + "manufacturer": "Manufacturer", + } + + api_client = APIClient() + response = api_client.post( + "/api/vex/cyclonedx_document/update/CycloneDX/e8d7b87f-83ec-4e41-af84-25f2b1d2739d/", + parameters, + format="json", + ) + + self.assertEqual(200, response.status_code) + self.assertEqual("application/json", response.headers["Content-Type"]) + self.assertEqual( + "attachment; filename=CycloneDX_e8d7b87f-83ec-4e41-af84-25f2b1d2739d_0002.json", + response.headers["Content-Disposition"], + ) + with open( + path.dirname(__file__) + "/files/cyclonedx_given_vulnerability_update.json", + "r", + ) as testfile: + self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) + + cyclonedx = CycloneDX.objects.get( + document_id_prefix="CycloneDX", document_base_id="e8d7b87f-83ec-4e41-af84-25f2b1d2739d" + ) + self.assertEqual(vex_user, cyclonedx.user) + self.assertEqual(None, cyclonedx.product) + self.assertEqual(2, cyclonedx.version) + self.assertEqual( + "a00b1db24ebf636052927506116feaae0e538b7e3d3f6744a5ecab840e1897ed", + cyclonedx.content_hash, + ) + self.assertEqual("", cyclonedx.author) + self.assertEqual("Manufacturer", cyclonedx.manufacturer) + self.assertEqual( + dateparse.parse_datetime("2020-01-01T04:30:00Z"), + cyclonedx.first_issued, + ) + self.assertEqual( + dateparse.parse_datetime("2020-02-01T04:30:00Z"), + cyclonedx.last_updated, + ) + + cyclonedx_branches = CycloneDX_Branch.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(0, len(cyclonedx_branches)) + + cyclonedx_vulnerabilities = CycloneDX_Vulnerability.objects.filter(cyclonedx=cyclonedx) + self.assertEqual(1, len(cyclonedx_vulnerabilities)) + self.assertEqual("CVE-vulnerability_2", cyclonedx_vulnerabilities[0].name) diff --git a/backend/unittests/vex/api/test_views_openvex.py b/backend/unittests/vex/api/test_views_openvex.py index 84215415d..67fee8420 100644 --- a/backend/unittests/vex/api/test_views_openvex.py +++ b/backend/unittests/vex/api/test_views_openvex.py @@ -26,9 +26,7 @@ def setUp(self): self.maxDiff = None @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.openvex_generator.user_has_permission_or_403") @patch("application.vex.services.openvex_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -57,9 +55,7 @@ def test_openvex_document_product_no_branch( } api_client = APIClient() - response = api_client.post( - "/api/vex/openvex_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/openvex_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -67,14 +63,10 @@ def test_openvex_document_product_no_branch( "attachment; filename=OpenVEX_2020_0001_0001.json", response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/openvex_product_no_branch.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/openvex_product_no_branch.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) - openvex = OpenVEX.objects.get( - document_id_prefix="OpenVEX", document_base_id="2020_0001" - ) + openvex = OpenVEX.objects.get(document_id_prefix="OpenVEX", document_base_id="2020_0001") self.assertEqual(vex_user, openvex.user) self.assertEqual(Product.objects.get(id=1), openvex.product) self.assertEqual(1, openvex.version) @@ -147,14 +139,10 @@ def test_openvex_document_product_no_branch( "attachment; filename=OpenVEX_2020_0001_0002.json", response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/openvex_product_no_branch_update.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/openvex_product_no_branch_update.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) - openvex = OpenVEX.objects.get( - document_id_prefix="OpenVEX", document_base_id="2020_0001" - ) + openvex = OpenVEX.objects.get(document_id_prefix="OpenVEX", document_base_id="2020_0001") self.assertEqual(vex_user, openvex.user) self.assertEqual(Product.objects.get(id=1), openvex.product) self.assertEqual(2, openvex.version) @@ -181,9 +169,7 @@ def test_openvex_document_product_no_branch( self.assertEqual(0, len(openvex_vulnerabilities)) @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.openvex_generator.user_has_permission_or_403") @patch("application.vex.services.openvex_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -210,9 +196,7 @@ def test_openvex_document_product_branches( } api_client = APIClient() - response = api_client.post( - "/api/vex/openvex_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/openvex_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -220,14 +204,10 @@ def test_openvex_document_product_branches( "attachment; filename=OpenVEX_2020_0001_0001.json", response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/openvex_product_branches.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/openvex_product_branches.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) - openvex = OpenVEX.objects.get( - document_id_prefix="OpenVEX", document_base_id="2020_0001" - ) + openvex = OpenVEX.objects.get(document_id_prefix="OpenVEX", document_base_id="2020_0001") self.assertEqual(vex_user, openvex.user) self.assertEqual(Product.objects.get(id=2), openvex.product) self.assertEqual(1, openvex.version) @@ -254,9 +234,7 @@ def test_openvex_document_product_branches( self.assertEqual(0, len(openvex_vulnerabilities)) @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.openvex_generator.user_has_permission_or_403") @patch("application.vex.services.openvex_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -284,9 +262,7 @@ def test_openvex_document_product_given_branch( } api_client = APIClient() - response = api_client.post( - "/api/vex/openvex_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/openvex_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -294,14 +270,10 @@ def test_openvex_document_product_given_branch( "attachment; filename=OpenVEX_2020_0001_0001.json", response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/openvex_product_given_branch.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/openvex_product_given_branch.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) - openvex = OpenVEX.objects.get( - document_id_prefix="OpenVEX", document_base_id="2020_0001" - ) + openvex = OpenVEX.objects.get(document_id_prefix="OpenVEX", document_base_id="2020_0001") self.assertEqual(vex_user, openvex.user) self.assertEqual(Product.objects.get(id=2), openvex.product) self.assertEqual(1, openvex.version) @@ -329,9 +301,7 @@ def test_openvex_document_product_given_branch( self.assertEqual(0, len(openvex_vulnerabilities)) @patch("django.utils.timezone.now") - @patch( - "application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate" - ) + @patch("application.access_control.services.api_token_authentication.APITokenAuthentication.authenticate") @patch("application.vex.services.openvex_generator.user_has_permission_or_403") @patch("application.vex.services.openvex_generator.get_current_user") @patch("application.core.queries.observation.get_current_user") @@ -358,9 +328,7 @@ def test_openvex_document_given_vulnerability( } api_client = APIClient() - response = api_client.post( - "/api/vex/openvex_document/create/", parameters, format="json" - ) + response = api_client.post("/api/vex/openvex_document/create/", parameters, format="json") self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -368,14 +336,10 @@ def test_openvex_document_given_vulnerability( "attachment; filename=OpenVEX_2020_0001_0001.json", response.headers["Content-Disposition"], ) - with open( - path.dirname(__file__) + "/files/openvex_given_vulnerability.json", "r" - ) as testfile: + with open(path.dirname(__file__) + "/files/openvex_given_vulnerability.json", "r") as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) - openvex = OpenVEX.objects.get( - document_id_prefix="OpenVEX", document_base_id="2020_0001" - ) + openvex = OpenVEX.objects.get(document_id_prefix="OpenVEX", document_base_id="2020_0001") self.assertEqual(vex_user, openvex.user) self.assertEqual(None, openvex.product) self.assertEqual(1, openvex.version) @@ -451,9 +415,7 @@ def test_openvex_document_given_vulnerability( ) as testfile: self.assertEqual(testfile.read(), response._container[0].decode("utf-8")) - openvex = OpenVEX.objects.get( - document_id_prefix="OpenVEX", document_base_id="2020_0001" - ) + openvex = OpenVEX.objects.get(document_id_prefix="OpenVEX", document_base_id="2020_0001") self.assertEqual(vex_user, openvex.user) self.assertEqual(None, openvex.product) self.assertEqual(2, openvex.version) diff --git a/backend/unittests/vex/services/basetest_vex_import.py b/backend/unittests/vex/services/basetest_vex_import.py index 19a230697..6aeee3e27 100644 --- a/backend/unittests/vex/services/basetest_vex_import.py +++ b/backend/unittests/vex/services/basetest_vex_import.py @@ -2,41 +2,36 @@ from unittest import TestCase from application.core.models import Observation, Product -from application.core.types import Status -from application.import_observations.models import Parser +from application.core.types import Status, VEX_Justification from application.import_observations.services.import_observations import ( FileUploadParameters, file_upload_observations, ) +from application.licenses.models import License_Component from application.vex.models import VEX_Document, VEX_Statement -from application.vex.types import VEX_Document_Type, VEX_Justification, VEX_Status +from application.vex.types import VEX_Document_Type, VEX_Status class BaseTestVEXImport(TestCase): def load_vex_test(self, short: bool = False) -> None: - purl_vex_test = ( - "pkg:github/MaibornWolff/VEX_Test" - if short - else "pkg:github/MaibornWolff/VEX_Test@v1.7.0" - ) + purl_vex_test = "pkg:github/SecObserve/VEX_Test" if short else "pkg:github/SecObserve/VEX_Test@v1.7.0" product = Product.objects.create( purl=purl_vex_test, name="VEX_Test", description="VEX Test Product", ) - filename = ( - "/files/trivy_poetry_short.json" if short else "/files/trivy_poetry.json" - ) + filename = "/files/trivy_poetry_short.json" if short else "/files/trivy_poetry.json" with open(path.dirname(__file__) + filename) as testfile: file_upload_parameter = FileUploadParameters( product=product, branch=None, file=testfile, - service="", + service_name="", docker_image_name_tag="", endpoint_url="", kubernetes_cluster="", suppress_licenses=False, + sbom=False, ) file_upload_observations(file_upload_parameter) @@ -44,32 +39,25 @@ def tearDown(self): VEX_Statement.objects.all().delete() VEX_Document.objects.all().delete() Observation.objects.filter(product__name="VEX_Test").delete() + License_Component.objects.filter(product__name="VEX_Test").delete() Product.objects.filter(name="VEX_Test").delete() - def check_vex_document( - self, vex_document: VEX_Document, document_type: str, short: bool = False - ) -> None: + def check_vex_document(self, vex_document: VEX_Document, document_type: str, short: bool = False) -> None: self.assertEqual(document_type, vex_document.type) self.assertEqual("1", vex_document.version) - self.assertEqual("vendor", vex_document.role) + if document_type == VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF: - self.assertEqual( - "2024-07-14T11:12:19.671904+00:00", - vex_document.initial_release_date.isoformat(), - ) - self.assertEqual( - "2024-07-14T11:12:19.671919+00:00", - vex_document.current_release_date.isoformat(), - ) - else: - self.assertEqual( - "2024-07-14T11:17:57.668593+00:00", - vex_document.initial_release_date.isoformat(), - ) - self.assertEqual( - "2024-07-14T11:17:57.668609+00:00", - vex_document.current_release_date.isoformat(), - ) + self.assertEqual("vendor", vex_document.role) + self.assertEqual("2024-07-14T11:12:19.671904+00:00", vex_document.initial_release_date.isoformat()) + self.assertEqual("2024-07-14T11:12:19.671919+00:00", vex_document.current_release_date.isoformat()) + elif document_type == VEX_Document_Type.VEX_DOCUMENT_TYPE_OPENVEX: + self.assertEqual("vendor", vex_document.role) + self.assertEqual("2024-07-14T11:17:57.668593+00:00", vex_document.initial_release_date.isoformat()) + self.assertEqual("2024-07-14T11:17:57.668609+00:00", vex_document.current_release_date.isoformat()) + elif document_type == VEX_Document_Type.VEX_DOCUMENT_TYPE_CYCLONEDX: + self.assertEqual("", vex_document.role) + self.assertEqual("2024-07-14T11:17:57.668593+00:00", vex_document.initial_release_date.isoformat()) + self.assertEqual("2024-07-14T11:17:57.668593+00:00", vex_document.current_release_date.isoformat()) vex_statements = VEX_Statement.objects.filter(document=vex_document) self.assertEqual(13, len(vex_statements)) @@ -78,20 +66,21 @@ def check_vex_document( found_0727 = False found_4340 = False - purl_vex_test = ( - "pkg:github/MaibornWolff/VEX_Test" - if short - else "pkg:github/MaibornWolff/VEX_Test@v1.7.0" - ) - purl_cryptography = ( - "pkg:pypi/cryptography" if short else "pkg:pypi/cryptography@41.0.5" - ) + purl_vex_test = "pkg:github/SecObserve/VEX_Test" if short else "pkg:github/SecObserve/VEX_Test@v1.7.0" + purl_cryptography = "pkg:pypi/cryptography" if short else "pkg:pypi/cryptography@41.0.5" purl_sqlparse = "pkg:pypi/sqlparse" if short else "pkg:pypi/sqlparse@0.4.4" for vex_statement in vex_statements: - if ( - vex_statement.vulnerability_id == "CVE-2023-49083" - and vex_statement.component_purl == purl_cryptography + if vex_statement.vulnerability_id == "CVE-2023-49083" and ( + ( + vex_statement.component_purl + and vex_statement.component_purl == purl_cryptography + or ( + vex_statement.component_cyclonedx_bom_link + and vex_statement.component_cyclonedx_bom_link + == "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/cryptography@41.0.5" + ) + ) ): found_49083 = True self.assertTrue( @@ -99,29 +88,35 @@ def check_vex_document( "cryptography is a package designed to expose cryptographic primitives and recipes to Python developers." ) ) - self.assertEqual( - VEX_Status.VEX_STATUS_NOT_AFFECTED, vex_statement.status - ) - self.assertEqual( - VEX_Justification.STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + self.assertEqual(VEX_Status.VEX_STATUS_NOT_AFFECTED, vex_statement.status) + self.assertIn( vex_statement.justification, + [ + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION, + ], ) if document_type == VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF: self.assertEqual("", vex_statement.impact) + else: + self.assertEqual("Not affected for VEX test case", vex_statement.impact) + + self.assertEqual("", vex_statement.remediation) + if vex_statement.component_cyclonedx_bom_link: + self.assertEqual("", vex_statement.product_purl) else: self.assertEqual( - "Not affected for VEX test case", vex_statement.impact + purl_vex_test, + vex_statement.product_purl, ) - self.assertEqual("", vex_statement.remediation) - self.assertEqual( - purl_vex_test, - vex_statement.product_purl, + if vex_statement.vulnerability_id == "CVE-2024-0727" and ( + (vex_statement.component_purl and vex_statement.component_purl == purl_cryptography) + or ( + vex_statement.component_cyclonedx_bom_link + and vex_statement.component_cyclonedx_bom_link + == "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/cryptography@41.0.5" ) - - if ( - vex_statement.vulnerability_id == "CVE-2024-0727" - and vex_statement.component_purl == purl_cryptography ): found_0727 = True self.assertTrue( @@ -135,57 +130,53 @@ def check_vex_document( vex_statement.justification, ) self.assertEqual("", vex_statement.impact) - self.assertEqual( - "Upgrade cryptography to version 42.0.2", vex_statement.remediation - ) - self.assertEqual( - purl_vex_test, - vex_statement.product_purl, - ) + self.assertEqual("Upgrade cryptography to version 42.0.2", vex_statement.remediation) + if vex_statement.component_cyclonedx_bom_link: + self.assertEqual("", vex_statement.product_purl) + else: + self.assertEqual( + purl_vex_test, + vex_statement.product_purl, + ) - if ( - vex_statement.vulnerability_id == "CVE-2024-4340" - and vex_statement.component_purl == purl_sqlparse + if vex_statement.vulnerability_id == "CVE-2024-4340" and ( + (vex_statement.component_purl and vex_statement.component_purl == purl_sqlparse) + or ( + vex_statement.component_cyclonedx_bom_link + and vex_statement.component_cyclonedx_bom_link + == "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/sqlparse@0.4.4" + ) ): found_4340 = True self.assertTrue( - vex_statement.description.startswith( - "Passing a heavily nested list to sqlparse.parse()" - ) - ) - self.assertEqual( - VEX_Status.VEX_STATUS_UNDER_INVESTIGATION, vex_statement.status + vex_statement.description.startswith("Passing a heavily nested list to sqlparse.parse()") ) + self.assertEqual(VEX_Status.VEX_STATUS_UNDER_INVESTIGATION, vex_statement.status) self.assertEqual( "", vex_statement.justification, ) self.assertEqual("", vex_statement.impact) self.assertEqual("", vex_statement.remediation) - self.assertEqual( - purl_vex_test, - vex_statement.product_purl, - ) + if vex_statement.component_cyclonedx_bom_link: + self.assertEqual("", vex_statement.product_purl) + else: + self.assertEqual( + purl_vex_test, + vex_statement.product_purl, + ) self.assertTrue(found_49083) self.assertTrue(found_0727) self.assertTrue(found_4340) def check_product(self, short: bool = False) -> None: - purl_vex_test = ( - "pkg:github/MaibornWolff/VEX_Test" - if short - else "pkg:github/MaibornWolff/VEX_Test@v1.7.0" - ) - purl_cryptography = ( - "pkg:pypi/cryptography" if short else "pkg:pypi/cryptography@41.0.5" - ) + purl_vex_test = "pkg:github/SecObserve/VEX_Test" if short else "pkg:github/SecObserve/VEX_Test@v1.7.0" + purl_cryptography = "pkg:pypi/cryptography" if short else "pkg:pypi/cryptography@41.0.5" purl_sqlparse = "pkg:pypi/sqlparse" if short else "pkg:pypi/sqlparse@0.4.4" product = Product.objects.get(purl=purl_vex_test) - observations = Observation.objects.filter( - product=product, current_status=Status.STATUS_OPEN - ) + observations = Observation.objects.filter(product=product, current_status=Status.STATUS_OPEN) self.assertEqual(11, len(observations)) observation = Observation.objects.get( @@ -195,13 +186,19 @@ def check_product(self, short: bool = False) -> None: ) self.assertEqual(Status.STATUS_NOT_AFFECTED, observation.current_status) self.assertEqual(Status.STATUS_NOT_AFFECTED, observation.vex_status) - self.assertEqual( - VEX_Justification.STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + self.assertIn( observation.current_vex_justification, + [ + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION, + ], ) - self.assertEqual( - VEX_Justification.STATUS_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + self.assertIn( observation.vex_vex_justification, + [ + VEX_Justification.JUSTIFICATION_VULNERABLE_CODE_CANNOT_BE_CONTROLLED_BY_ADVERSARY, + VEX_Justification.JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION, + ], ) observation = Observation.objects.get( diff --git a/backend/unittests/vex/services/files/cyclonedx_vex_integrated.json b/backend/unittests/vex/services/files/cyclonedx_vex_integrated.json new file mode 100644 index 000000000..e52f83532 --- /dev/null +++ b/backend/unittests/vex/services/files/cyclonedx_vex_integrated.json @@ -0,0 +1,270 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "serialNumber": "urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", + "version": 1, + "metadata": { + "timestamp": "2024-07-14T11:17:57.668593+00:00", + "authors": [ + { + "name": "SecObserve" + } + ], + "component": { + "bom-ref": "product", + "type": "application", + "group": "SecObserve", + "name": "VEX_Test", + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" + } + }, + "components": [ + { + "bom-ref": "comp-cryptography", + "type": "library", + "name": "cryptography", + "version": "41.0.5", + "purl": "pkg:pypi/cryptography@41.0.5" + }, + { + "bom-ref": "comp-gunicorn", + "type": "library", + "name": "gunicorn", + "version": "21.2.0", + "purl": "pkg:pypi/gunicorn@21.2.0" + }, + { + "bom-ref": "comp-black", + "type": "library", + "name": "black", + "version": "23.11.0", + "purl": "pkg:pypi/black@23.11.0" + }, + { + "bom-ref": "comp-django", + "type": "library", + "name": "django", + "version": "4.2.8", + "purl": "pkg:pypi/django@4.2.8" + }, + { + "bom-ref": "comp-werkzeug", + "type": "library", + "name": "werkzeug", + "version": "3.0.1", + "purl": "pkg:pypi/werkzeug@3.0.1" + }, + { + "bom-ref": "comp-requests", + "type": "library", + "name": "requests", + "version": "2.31.0", + "purl": "pkg:pypi/requests@2.31.0" + }, + { + "bom-ref": "comp-idna", + "type": "library", + "name": "idna", + "version": "3.6", + "purl": "pkg:pypi/idna@3.6" + }, + { + "bom-ref": "comp-urllib3", + "type": "library", + "name": "urllib3", + "version": "2.1.0", + "purl": "pkg:pypi/urllib3@2.1.0" + }, + { + "bom-ref": "comp-sqlparse", + "type": "library", + "name": "sqlparse", + "version": "0.4.4", + "purl": "pkg:pypi/sqlparse@0.4.4" + } + ], + "vulnerabilities": [ + { + "id": "CVE-2023-49083", + "description": "cryptography is a package designed to expose cryptographic primitives and recipes to Python developers. Calling `load_pem_pkcs7_certificates` or `load_der_pkcs7_certificates` could lead to a NULL-pointer dereference and segfault. Exploitation of this vulnerability poses a serious risk of Denial of Service (DoS) for any application attempting to deserialize a PKCS7 blob/certificate. The consequences extend to potential disruptions in system availability and stability. This vulnerability has been patched in version 41.0.6.", + "analysis": { + "state": "not_affected", + "justification": "requires_configuration", + "response": [], + "detail": "Not affected for VEX test case" + }, + "affects": [ + { + "ref": "comp-cryptography" + } + ] + }, + { + "id": "CVE-2023-50782", + "description": "A flaw was found in the python-cryptography package. This issue may allow a remote attacker to decrypt captured messages in TLS servers that use RSA key exchanges, which may lead to exposure of confidential or sensitive data.", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade cryptography to version 42.0.0", + "affects": [ + { + "ref": "comp-cryptography" + } + ] + }, + { + "id": "CVE-2024-0727", + "description": "Issue summary: Processing a maliciously formatted PKCS12 file may lead OpenSSL to crash leading to a potential Denial of Service attack", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade cryptography to version 42.0.2", + "affects": [ + { + "ref": "comp-cryptography" + } + ] + }, + { + "id": "CVE-2024-1135", + "description": "Gunicorn fails to properly validate Transfer-Encoding headers, leading to HTTP Request Smuggling (HRS) vulnerabilities...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade gunicorn to version 22.0.0", + "affects": [ + { + "ref": "comp-gunicorn" + } + ] + }, + { + "id": "CVE-2024-21503", + "description": "Versions of the package black before 24.3.0 are vulnerable to Regular Expression Denial of Service (ReDoS) via the lines_with_leading_tabs_expanded function...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade black to version 24.3.0", + "affects": [ + { + "ref": "comp-black" + } + ] + }, + { + "id": "CVE-2024-24680", + "description": "An issue was discovered in Django 3.2 before 3.2.24, 4.2 before 4.2.10, and Django 5.0 before 5.0.2...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade django to version 3.2.24, 4.2.10, 5.0.2", + "affects": [ + { + "ref": "comp-django" + } + ] + }, + { + "id": "CVE-2024-26130", + "description": "cryptography is a package designed to expose cryptographic primitives and recipes to Python developers...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade cryptography to version 42.0.4", + "affects": [ + { + "ref": "comp-cryptography" + } + ] + }, + { + "id": "CVE-2024-27351", + "description": "In Django 3.2 before 3.2.25, 4.2 before 4.2.11, and 5.0 before 5.0.3...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade django to version 3.2.25, 4.2.11, 5.0.3", + "affects": [ + { + "ref": "comp-django" + } + ] + }, + { + "id": "CVE-2024-34069", + "description": "Werkzeug is a comprehensive WSGI web application library. The debugger in affected versions of Werkzeug can allow an attacker to execute code...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade werkzeug to version 3.0.3", + "affects": [ + { + "ref": "comp-werkzeug" + } + ] + }, + { + "id": "CVE-2024-35195", + "description": "Requests is a HTTP library. Prior to 2.32.0, when making requests through a Requests Session, if the first request is made with verify=False...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade requests to version 2.32.0", + "affects": [ + { + "ref": "comp-requests" + } + ] + }, + { + "id": "CVE-2024-3651", + "description": "A flaw was found in the python-idna library...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade idna to version 3.7", + "affects": [ + { + "ref": "comp-idna" + } + ] + }, + { + "id": "CVE-2024-37891", + "description": "urllib3 is a user-friendly HTTP client library for Python...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade urllib3 to version 1.26.19, 2.2.2", + "affects": [ + { + "ref": "comp-urllib3" + } + ] + }, + { + "id": "CVE-2024-4340", + "description": "Passing a heavily nested list to sqlparse.parse() leads to a Denial of Service due to RecursionError.", + "analysis": { + "state": "in_triage", + "response": [] + }, + "affects": [ + { + "ref": "comp-sqlparse" + } + ] + } + ] +} \ No newline at end of file diff --git a/backend/unittests/vex/services/files/cyclonedx_vex_test dedicated.json b/backend/unittests/vex/services/files/cyclonedx_vex_test dedicated.json new file mode 100644 index 000000000..5ae8d9189 --- /dev/null +++ b/backend/unittests/vex/services/files/cyclonedx_vex_test dedicated.json @@ -0,0 +1,198 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "serialNumber": "urn:uuid:fa9f9148-2935-422a-b058-20afa8cafa82", + "version": 1, + "metadata": { + "timestamp": "2024-07-14T11:17:57.668593+00:00", + "authors": [ + { + "name": "SecObserve" + } + ] + }, + "vulnerabilities": [ + { + "id": "CVE-2023-49083", + "description": "cryptography is a package designed to expose cryptographic primitives and recipes to Python developers. Calling `load_pem_pkcs7_certificates` or `load_der_pkcs7_certificates` could lead to a NULL-pointer dereference and segfault. Exploitation of this vulnerability poses a serious risk of Denial of Service (DoS) for any application attempting to deserialize a PKCS7 blob/certificate. The consequences extend to potential disruptions in system availability and stability. This vulnerability has been patched in version 41.0.6.", + "analysis": { + "state": "not_affected", + "justification": "requires_configuration", + "response": [], + "detail": "Not affected for VEX test case" + }, + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/cryptography@41.0.5" + } + ] + }, + { + "id": "CVE-2023-50782", + "description": "A flaw was found in the python-cryptography package. This issue may allow a remote attacker to decrypt captured messages in TLS servers that use RSA key exchanges, which may lead to exposure of confidential or sensitive data.", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade cryptography to version 42.0.0", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/cryptography@41.0.5" + } + ] + }, + { + "id": "CVE-2024-0727", + "description": "Issue summary: Processing a maliciously formatted PKCS12 file may lead OpenSSL to crash leading to a potential Denial of Service attack", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade cryptography to version 42.0.2", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/cryptography@41.0.5" + } + ] + }, + { + "id": "CVE-2024-1135", + "description": "Gunicorn fails to properly validate Transfer-Encoding headers, leading to HTTP Request Smuggling (HRS) vulnerabilities...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade gunicorn to version 22.0.0", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/gunicorn@21.2.0" + } + ] + }, + { + "id": "CVE-2024-21503", + "description": "Versions of the package black before 24.3.0 are vulnerable to Regular Expression Denial of Service (ReDoS) via the lines_with_leading_tabs_expanded function...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade black to version 24.3.0", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/black@23.11.0" + } + ] + }, + { + "id": "CVE-2024-24680", + "description": "An issue was discovered in Django 3.2 before 3.2.24, 4.2 before 4.2.10, and Django 5.0 before 5.0.2...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade django to version 3.2.24, 4.2.10, 5.0.2", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/django@4.2.8" + } + ] + }, + { + "id": "CVE-2024-26130", + "description": "cryptography is a package designed to expose cryptographic primitives and recipes to Python developers...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade cryptography to version 42.0.4", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/cryptography@41.0.5" + } + ] + }, + { + "id": "CVE-2024-27351", + "description": "In Django 3.2 before 3.2.25, 4.2 before 4.2.11, and 5.0 before 5.0.3...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade django to version 3.2.25, 4.2.11, 5.0.3", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/django@4.2.8" + } + ] + }, + { + "id": "CVE-2024-34069", + "description": "Werkzeug is a comprehensive WSGI web application library. The debugger in affected versions of Werkzeug can allow an attacker to execute code...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade werkzeug to version 3.0.3", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/werkzeug@3.0.1" + } + ] + }, + { + "id": "CVE-2024-35195", + "description": "Requests is a HTTP library. Prior to 2.32.0, when making requests through a Requests Session, if the first request is made with verify=False...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade requests to version 2.32.0", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/requests@2.31.0" + } + ] + }, + { + "id": "CVE-2024-3651", + "description": "A flaw was found in the python-idna library...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade idna to version 3.7", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/idna@3.6" + } + ] + }, + { + "id": "CVE-2024-37891", + "description": "urllib3 is a user-friendly HTTP client library for Python...", + "analysis": { + "state": "exploitable", + "response": [] + }, + "recommendation": "Upgrade urllib3 to version 1.26.19, 2.2.2", + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/urllib3@2.1.0" + } + ] + }, + { + "id": "CVE-2024-4340", + "description": "Passing a heavily nested list to sqlparse.parse() leads to a Denial of Service due to RecursionError.", + "analysis": { + "state": "in_triage", + "response": [] + }, + "affects": [ + { + "ref": "urn:cdx:aebccdfe-fab4-4210-acce-bac771d4842d/1#pkg:pypi/sqlparse@0.4.4" + } + ] + } + ] +} \ No newline at end of file diff --git a/backend/unittests/vex/services/files/so_csaf_2020_0001_0001.json b/backend/unittests/vex/services/files/so_csaf_2020_0001_0001.json index 88d78b555..ab024d163 100644 --- a/backend/unittests/vex/services/files/so_csaf_2020_0001_0001.json +++ b/backend/unittests/vex/services/files/so_csaf_2020_0001_0001.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "so_2020_0001_0001", @@ -43,9 +43,9 @@ "name": "VEX Test", "product": { "name": "VEX Test", - "product_id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "product_id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "product_identification_helper": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" } } } @@ -167,7 +167,7 @@ "product_id": "cryptography:41.0.5@VEX Test" }, "product_reference": "pkg:pypi/cryptography@41.0.5", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -176,7 +176,7 @@ "product_id": "gunicorn:21.2.0@VEX Test" }, "product_reference": "pkg:pypi/gunicorn@21.2.0", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -185,7 +185,7 @@ "product_id": "black:23.11.0@VEX Test" }, "product_reference": "pkg:pypi/black@23.11.0", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -194,7 +194,7 @@ "product_id": "django:4.2.8@VEX Test" }, "product_reference": "pkg:pypi/django@4.2.8", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -203,7 +203,7 @@ "product_id": "werkzeug:3.0.1@VEX Test" }, "product_reference": "pkg:pypi/werkzeug@3.0.1", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -212,7 +212,7 @@ "product_id": "requests:2.31.0@VEX Test" }, "product_reference": "pkg:pypi/requests@2.31.0", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -221,7 +221,7 @@ "product_id": "idna:3.6@VEX Test" }, "product_reference": "pkg:pypi/idna@3.6", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -230,7 +230,7 @@ "product_id": "urllib3:2.1.0@VEX Test" }, "product_reference": "pkg:pypi/urllib3@2.1.0", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, { "category": "default_component_of", @@ -239,7 +239,7 @@ "product_id": "sqlparse:0.4.4@VEX Test" }, "product_reference": "pkg:pypi/sqlparse@0.4.4", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test@v1.7.0" } ] }, diff --git a/backend/unittests/vex/services/files/so_csaf_2020_0001_0001_short.json b/backend/unittests/vex/services/files/so_csaf_2020_0001_0001_short.json index 813ca3f27..b46a448b6 100644 --- a/backend/unittests/vex/services/files/so_csaf_2020_0001_0001_short.json +++ b/backend/unittests/vex/services/files/so_csaf_2020_0001_0001_short.json @@ -18,7 +18,7 @@ "generator": { "engine": { "name": "SecObserve", - "version": "1.26.0" + "version": "1.48.0" } }, "id": "so_2020_0001_0001", @@ -43,9 +43,9 @@ "name": "VEX Test", "product": { "name": "VEX Test", - "product_id": "pkg:github/MaibornWolff/VEX_Test", + "product_id": "pkg:github/SecObserve/VEX_Test", "product_identification_helper": { - "purl": "pkg:github/MaibornWolff/VEX_Test" + "purl": "pkg:github/SecObserve/VEX_Test" } } } @@ -167,7 +167,7 @@ "product_id": "cryptography:41.0.5@VEX Test" }, "product_reference": "pkg:pypi/cryptography", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -176,7 +176,7 @@ "product_id": "gunicorn:21.2.0@VEX Test" }, "product_reference": "pkg:pypi/gunicorn", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -185,7 +185,7 @@ "product_id": "black:23.11.0@VEX Test" }, "product_reference": "pkg:pypi/black", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -194,7 +194,7 @@ "product_id": "django:4.2.8@VEX Test" }, "product_reference": "pkg:pypi/django", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -203,7 +203,7 @@ "product_id": "werkzeug:3.0.1@VEX Test" }, "product_reference": "pkg:pypi/werkzeug", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -212,7 +212,7 @@ "product_id": "requests:2.31.0@VEX Test" }, "product_reference": "pkg:pypi/requests", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -221,7 +221,7 @@ "product_id": "idna:3.6@VEX Test" }, "product_reference": "pkg:pypi/idna", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -230,7 +230,7 @@ "product_id": "urllib3:2.1.0@VEX Test" }, "product_reference": "pkg:pypi/urllib3", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" }, { "category": "default_component_of", @@ -239,7 +239,7 @@ "product_id": "sqlparse:0.4.4@VEX Test" }, "product_reference": "pkg:pypi/sqlparse", - "relates_to_product_reference": "pkg:github/MaibornWolff/VEX_Test" + "relates_to_product_reference": "pkg:github/SecObserve/VEX_Test" } ] }, diff --git a/backend/unittests/vex/services/files/so_openvex_2020_0001_0001.json b/backend/unittests/vex/services/files/so_openvex_2020_0001_0001.json index f2e627132..bd3def834 100644 --- a/backend/unittests/vex/services/files/so_openvex_2020_0001_0001.json +++ b/backend/unittests/vex/services/files/so_openvex_2020_0001_0001.json @@ -10,9 +10,9 @@ "justification": "vulnerable_code_cannot_be_controlled_by_adversary", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -32,9 +32,9 @@ "action_statement": "Upgrade cryptography to version 42.0.0", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -54,9 +54,9 @@ "action_statement": "Upgrade cryptography to version 42.0.2", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -76,9 +76,9 @@ "action_statement": "Upgrade gunicorn to version 22.0.0", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -98,9 +98,9 @@ "action_statement": "Upgrade black to version 24.3.0", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -120,9 +120,9 @@ "action_statement": "Upgrade django to version 3.2.24, 4.2.10, 5.0.2", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -142,9 +142,9 @@ "action_statement": "Upgrade cryptography to version 42.0.4", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -164,9 +164,9 @@ "action_statement": "Upgrade django to version 3.2.25, 4.2.11, 5.0.3", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -186,9 +186,9 @@ "action_statement": "Upgrade werkzeug to version 3.0.3", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -208,9 +208,9 @@ "action_statement": "Upgrade requests to version 2.32.0", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -230,9 +230,9 @@ "action_statement": "Upgrade idna to version 3.7", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -252,9 +252,9 @@ "action_statement": "Upgrade urllib3 to version 1.26.19, 2.2.2", "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -273,9 +273,9 @@ { "products": [ { - "@id": "pkg:github/MaibornWolff/VEX_Test@v1.7.0", + "@id": "pkg:github/SecObserve/VEX_Test@v1.7.0", "identifiers": { - "purl": "pkg:github/MaibornWolff/VEX_Test@v1.7.0" + "purl": "pkg:github/SecObserve/VEX_Test@v1.7.0" }, "subcomponents": [ { @@ -294,6 +294,6 @@ } ], "timestamp": "2024-07-14T11:17:57.668593+00:00", - "tooling": "SecObserve / 1.26.0", + "tooling": "SecObserve / 1.48.0", "version": 1 } \ No newline at end of file diff --git a/backend/unittests/vex/services/test_vex_import_csaf.py b/backend/unittests/vex/services/test_vex_import_csaf.py index a189bbeda..fcbea3e74 100644 --- a/backend/unittests/vex/services/test_vex_import_csaf.py +++ b/backend/unittests/vex/services/test_vex_import_csaf.py @@ -11,71 +11,55 @@ class TestVEXImportCSAF(BaseTestVEXImport): def test_import_long_long(self): self.load_vex_test() - with open( - path.dirname(__file__) + "/files/so_csaf_2020_0001_0001.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/so_csaf_2020_0001_0001.json") as testfile: import_vex(testfile) vex_document = VEX_Document.objects.get( document_id="https://csaf.example.com/so_2020_0001_0001", author="SecObserve", ) - self.check_vex_document( - vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF - ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF) self.check_product() def test_import_long_short(self): self.load_vex_test() - with open( - path.dirname(__file__) + "/files/so_csaf_2020_0001_0001_short.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/so_csaf_2020_0001_0001_short.json") as testfile: import_vex(testfile) vex_document = VEX_Document.objects.get( document_id="https://csaf.example.com/so_2020_0001_0001", author="SecObserve", ) - self.check_vex_document( - vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF, short=True - ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF, short=True) self.check_product() def test_import_short_long(self): self.load_vex_test(short=True) - with open( - path.dirname(__file__) + "/files/so_csaf_2020_0001_0001.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/so_csaf_2020_0001_0001.json") as testfile: import_vex(testfile) vex_document = VEX_Document.objects.get( document_id="https://csaf.example.com/so_2020_0001_0001", author="SecObserve", ) - self.check_vex_document( - vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF - ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF) self.check_product(short=True) def test_import_short_short(self): self.load_vex_test(short=True) - with open( - path.dirname(__file__) + "/files/so_csaf_2020_0001_0001_short.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/so_csaf_2020_0001_0001_short.json") as testfile: import_vex(testfile) vex_document = VEX_Document.objects.get( document_id="https://csaf.example.com/so_2020_0001_0001", author="SecObserve", ) - self.check_vex_document( - vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF, short=True - ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CSAF, short=True) self.check_product(short=True) diff --git a/backend/unittests/vex/services/test_vex_import_cyclonedx.py b/backend/unittests/vex/services/test_vex_import_cyclonedx.py new file mode 100644 index 000000000..28e08f3b0 --- /dev/null +++ b/backend/unittests/vex/services/test_vex_import_cyclonedx.py @@ -0,0 +1,35 @@ +from os import path + +from application.vex.models import VEX_Document, VEX_Statement +from application.vex.services.vex_import import import_vex +from application.vex.types import VEX_Document_Type +from unittests.vex.services.basetest_vex_import import BaseTestVEXImport + + +class TestVEXImportCycloneDX(BaseTestVEXImport): + def test_import_integrated(self): + self.load_vex_test() + + with open(path.dirname(__file__) + "/files/cyclonedx_vex_integrated.json") as testfile: + import_vex(testfile) + + vex_document = VEX_Document.objects.get( + document_id="urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", + author="SecObserve", + ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CYCLONEDX) + + self.check_product() + + def test_import_dedicated(self): + self.load_vex_test() + + with open(path.dirname(__file__) + "/files/cyclonedx_vex_test dedicated.json") as testfile: + import_vex(testfile) + + vex_document = VEX_Document.objects.get( + document_id="urn:uuid:fa9f9148-2935-422a-b058-20afa8cafa82", + author="SecObserve", + ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_CYCLONEDX) + self.check_product() diff --git a/backend/unittests/vex/services/test_vex_import_openvex.py b/backend/unittests/vex/services/test_vex_import_openvex.py index 79f30fb46..20e6f3e4b 100644 --- a/backend/unittests/vex/services/test_vex_import_openvex.py +++ b/backend/unittests/vex/services/test_vex_import_openvex.py @@ -10,17 +10,13 @@ class TestVEXImportOpenVEX(BaseTestVEXImport): def test_import(self): self.load_vex_test() - with open( - path.dirname(__file__) + "/files/so_openvex_2020_0001_0001.json" - ) as testfile: + with open(path.dirname(__file__) + "/files/so_openvex_2020_0001_0001.json") as testfile: import_vex(testfile) vex_document = VEX_Document.objects.get( document_id="https://openvex.example.com/so_openvex_2020_0001", author="SecObserve", ) - self.check_vex_document( - vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_OPENVEX - ) + self.check_vex_document(vex_document, VEX_Document_Type.VEX_DOCUMENT_TYPE_OPENVEX) self.check_product() diff --git a/bin/dev.sh b/bin/dev.sh index ff75efb87..3b044a881 100755 --- a/bin/dev.sh +++ b/bin/dev.sh @@ -1,3 +1,6 @@ #!/bin/sh +cd ./frontend +npm install --no-audit --no-fund && +cd .. docker compose -f docker-compose-dev.yml up --build diff --git a/bin/mkdocs.sh b/bin/mkdocs.sh new file mode 100755 index 000000000..f4b8f2b45 --- /dev/null +++ b/bin/mkdocs.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +export PYTHONPATH="docs/mkdocs_plugins" +mkdocs serve \ No newline at end of file diff --git a/charts/secobserve/.helmignore b/charts/secobserve/.helmignore new file mode 100644 index 000000000..898df4886 --- /dev/null +++ b/charts/secobserve/.helmignore @@ -0,0 +1,24 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ + diff --git a/charts/secobserve/Chart.lock b/charts/secobserve/Chart.lock new file mode 100644 index 000000000..fb6dd503d --- /dev/null +++ b/charts/secobserve/Chart.lock @@ -0,0 +1,6 @@ +dependencies: +- name: postgresql + repository: oci://registry-1.docker.io/bitnamicharts + version: 16.7.27 +digest: sha256:e91754406fd7f90e16d5de9bca9562215ba7e04b96ceb0c82ad488a08e4f5856 +generated: "2025-09-23T11:30:35.326208+05:30" diff --git a/charts/secobserve/Chart.yaml b/charts/secobserve/Chart.yaml new file mode 100644 index 000000000..2a3802421 --- /dev/null +++ b/charts/secobserve/Chart.yaml @@ -0,0 +1,33 @@ +apiVersion: v2 +name: secobserve +description: | + A Helm chart to deploy SecObserve, an open-source vulnerability and license management system + designed for software development teams and cloud-native environments. + + SecObserve helps teams identify, manage, and remediate security vulnerabilities and license compliance issues + across their software projects, enhancing visibility and improving DevSecOps workflows. + +home: https://github.com/SecObserve/SecObserve +sources: + - https://github.com/SecObserve/SecObserve + +maintainers: + - name: SecObserve community + +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 1.0.18 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +appVersion: 1.48.0 + +dependencies: +- condition: postgresql.enabled + name: postgresql + repository: oci://registry-1.docker.io/bitnamicharts + version: 16.x.x diff --git a/charts/secobserve/README.md b/charts/secobserve/README.md new file mode 100644 index 000000000..47a6a870f --- /dev/null +++ b/charts/secobserve/README.md @@ -0,0 +1,1089 @@ +# secobserve + +## Installing the chart + +The chart can be installed as from the OCI repository using `helm install secobserve --version 1.48.0 oci://ghcr.io/SecObserve/charts/secobserve`. + +![Version: 1.0.18](https://img.shields.io/badge/Version-1.0.18-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 1.48.0](https://img.shields.io/badge/AppVersion-1.48.0-informational?style=flat-square) + +A Helm chart to deploy SecObserve, an open-source vulnerability and license management system +designed for software development teams and cloud-native environments. + +SecObserve helps teams identify, manage, and remediate security vulnerabilities and license compliance issues +across their software projects, enhancing visibility and improving DevSecOps workflows. + +**Homepage:** + +## Maintainers + +| Name | Email | Url | +| ---- | ------ | --- | +| SecObserve community | | | + +## Source Code + +* + +## Requirements + +| Repository | Name | Version | +|------------|------|---------| +| oci://registry-1.docker.io/bitnamicharts | postgresql | 16.x.x | + +## Values + +### Pod + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Sets the affinity for the secobserve pod For more information on affinity, see https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity | +| nodeSelector | object | `{}` | Node labels to select for secobserve pod assignment | +| replicaCount | int | `1` | number of replicas to deploy | +| tolerations | object | `{}` | Toleration labels for pod assignment | + +### Backend + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| backend.env[0] | object | `{"name":"ADMIN_USER","value":"admin"}` | admin user name | +| backend.env[10] | object | `{"name":"CORS_ALLOWED_ORIGINS","value":"https://secobserve.dev"}` | CORS allowed origins | +| backend.env[11] | object | `{"name":"DJANGO_SECRET_KEY","valueFrom":{"secretKeyRef":{"key":"django_secret_key","name":"secobserve-secrets"}}}` | django secret key | +| backend.env[11].valueFrom.secretKeyRef | object | `{"key":"django_secret_key","name":"secobserve-secrets"}` | secret name containing the django secret key | +| backend.env[12] | object | `{"name":"FIELD_ENCRYPTION_KEY","valueFrom":{"secretKeyRef":{"key":"field_encryption_key","name":"secobserve-secrets"}}}` | encryption key for fields | +| backend.env[12].valueFrom.secretKeyRef | object | `{"key":"field_encryption_key","name":"secobserve-secrets"}` | secret name containig the field encryption key | +| backend.env[13] | object | `{"name":"OIDC_AUTHORITY","value":"https://oidc.secobserve.dev"}` | admin OIDC authority | +| backend.env[14] | object | `{"name":"OIDC_CLIENT_ID","value":"secobserve"}` | OIDC client id | +| backend.env[15] | object | `{"name":"OIDC_USERNAME","value":"preferred_username"}` | OIDC user name | +| backend.env[16] | object | `{"name":"OIDC_FIRST_NAME","value":"given_name"}` | OIDC first name | +| backend.env[17] | object | `{"name":"OIDC_LAST_NAME","value":"family_name"}` | OIDC last name | +| backend.env[18] | object | `{"name":"OIDC_FULL_NAME","value":"preferred_username"}` | OIDC full name | +| backend.env[19] | object | `{"name":"OIDC_EMAIL","value":"email"}` | OIDC email address | +| backend.env[1] | object | `{"name":"ADMIN_PASSWORD","valueFrom":{"secretKeyRef":{"key":"password","name":"secobserve-secrets"}}}` | admin password | +| backend.env[20] | object | `{"name":"OIDC_GROUPS","value":"groups"}` | OIDC groups | +| backend.env[2] | object | `{"name":"ADMIN_EMAIL","value":"admin@admin.com"}` | admin email address | +| backend.env[3] | object | `{"name":"DATABASE_ENGINE","value":"django.db.backends.postgresql"}` | database engine | +| backend.env[4] | object | `{"name":"DATABASE_HOST","value":"secobserve-postgresql"}` | database host/service | +| backend.env[5] | object | `{"name":"DATABASE_PORT","value":"5432"}` | database port | +| backend.env[6] | object | `{"name":"DATABASE_DB","value":"secobserve"}` | database name | +| backend.env[7] | object | `{"name":"DATABASE_USER","value":"secobserve"}` | database user | +| backend.env[8] | object | `{"name":"DATABASE_PASSWORD","valueFrom":{"secretKeyRef":{"key":"password","name":"secobserve-postgresql"}}}` | database password | +| backend.env[8].valueFrom.secretKeyRef | object | `{"key":"password","name":"secobserve-postgresql"}` | reference to secret containing db credentials | +| backend.env[9] | object | `{"name":"ALLOWED_HOSTS","value":"secobserve.dev"}` | allowed hosts | +| backend.image | object | `{"pullPolicy":"IfNotPresent","registry":"ghcr.io","repository":"secobserve/secobserve-backend","tag":null}` | image registry | +| backend.image.pullPolicy | string | `"IfNotPresent"` | image pull policy | +| backend.image.repository | string | `"secobserve/secobserve-backend"` | image repository | +| backend.image.tag | string | `nil` | image tag (uses appVersion value of Chart.yaml if not specified) | +| backend.resources | object | `{"limits":{"cpu":"1000m","memory":"1500Mi"},"requests":{"cpu":"1000m","memory":"1500Mi"}}` | resource requirements and limits | +| backend.securityContext | object | `{"allowPrivilegeEscalation":false,"enabled":true,"runAsGroup":1001,"runAsNonRoot":true,"runAsUser":1001}` | security context to use for backend pod | +| backend.service.port | int | `5000` | service port | + +### dbchecker + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| dbchecker.enabled | bool | `true` | enable dbchecker init container | +| dbchecker.hostname | string | `"secobserve-postgresql"` | enable dbchecker init container | +| dbchecker.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy for the dbchecker image | +| dbchecker.image.repository | string | `"busybox"` | Docker image used to check Database readiness at startup | +| dbchecker.image.tag | string | `"latest"` | Image tag for the dbchecker image | +| dbchecker.port | int | `5432` | enable dbchecker init container | +| dbchecker.resources | object | `{"limits":{"cpu":"20m","memory":"32Mi"},"requests":{"cpu":"20m","memory":"32Mi"}}` | Resource requests and limits for the dbchecker container | +| dbchecker.securityContext | object | `{"allowPrivilegeEscalation":false,"runAsGroup":1001,"runAsNonRoot":true,"runAsUser":1001}` | SecurityContext for the dbchecker container | + +### Frontend + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| frontend.env[0] | object | `{"name":"API_BASE_URL","value":"https://secobserve.dev/api"}` | Base URL for API | +| frontend.env[1] | object | `{"name":"OIDC_ENABLED","value":"false"}` | enable OIDC authentication | +| frontend.env[2] | object | `{"name":"OIDC_AUTHORITY","value":"https://oidc.secobserve.dev"}` | oidc metadata endpoint | +| frontend.env[3] | object | `{"name":"OIDC_CLIENT_ID","value":"secobserve"}` | OIDC client ID | +| frontend.env[4] | object | `{"name":"OIDC_REDIRECT_URI","value":"https://secobserve.dev/"}` | OIDC client redirect URL | +| frontend.env[5] | object | `{"name":"OIDC_POST_LOGOUT_REDIRECT_URI","value":"https://secobserve.dev/"}` | URI to redirect to after logout | +| frontend.env[6] | object | `{"name":"OIDC_PROMPT","value":null}` | OIDC prompt | +| frontend.image.pullPolicy | string | `"IfNotPresent"` | image pull policy | +| frontend.image.registry | string | `"ghcr.io"` | image registry | +| frontend.image.repository | string | `"secobserve/secobserve-frontend"` | image repository | +| frontend.image.tag | string | `nil` | image tag (uses appVersion value of Chart.yaml if not specified) | +| frontend.resources | object | `{"limits":{"cpu":"500m","memory":"1000Mi"},"requests":{"cpu":"500m","memory":"1000Mi"}}` | resource requirements and limits | +| frontend.securityContext | object | `{"allowPrivilegeEscalation":false,"enabled":true,"runAsGroup":1001,"runAsNonRoot":true,"runAsUser":1001}` | securityContext to use for frontend container | +| frontend.service.port | int | `3000` | service port | + +### Ingress + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| ingress.annotations | object | `{"kubernetes.io/ingress.class":"nginx","nginx.ingress.kubernetes.io/proxy-read-timeout":"600","nginx.ingress.kubernetes.io/proxy-send-timeout":"600","nginx.ingress.kubernetes.io/ssl-redirect":"true"}` | annotations to add to ingress | +| ingress.enabled | bool | `true` | If true, a Kubernetes Ingress resource will be created to the http port of the secobserve Service | +| ingress.hostname | string | `"secobserve.dev"` | hostname of ingress | +| ingress.ingressClassName | string | `"nginx"` | Example configuration for using an Amazon Load Balancer controller ingressClassName: alb annotations: alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS": 443}]' alb.ingress.kubernetes.io/ssl-policy: 'ELBSecurityPolicy-TLS13-1-2-FIPS-2023-04' alb.ingress.kubernetes.io/healthcheck-path: / | + +### Postgresql + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| postgresql.architecture | string | `"standalone"` | PostgreSQL architecture (`standalone` or `replication`) | +| postgresql.auth | object | `{"database":"secobserve","existingSecret":"","password":"","postgresPassword":"","secretKeys":{"userPasswordKey":"password"},"username":"secobserve"}` | enable postgresql subchart | +| postgresql.auth.database | string | `"secobserve"` | Name for a custom database to create | +| postgresql.auth.existingSecret | string | `""` | Name of existing secret to use for PostgreSQL credentials | +| postgresql.auth.password | string | `""` | Password for the custom user to create | +| postgresql.auth.postgresPassword | string | `""` | Password for the "postgres" admin user. Ignored if `auth.existingSecret` with key `postgres-password` is provided | +| postgresql.auth.secretKeys.userPasswordKey | string | `"password"` | Name of key in existing secret to use for PostgreSQL credentials. Only used when `auth.existingSecret` is set. | +| postgresql.auth.username | string | `"secobserve"` | Name for a custom user to create | +| postgresql.enabled | bool | `true` | Switch to enable or disable the PostgreSQL helm chart | +| postgresql.image | object | `{"repository":"bitnamilegacy/postgresql"}` | enable postgresql subchart | +| postgresql.metrics | object | `{"image":{"repository":"bitnamilegacy/postgres-exporter"}}` | enable postgresql subchart | +| postgresql.volumePermissions | object | `{"image":{"repository":"bitnamilegacy/os-shell"}}` | enable postgresql subchart | + +### Service + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| service | object | `{"type":"ClusterIP"}` | defines the secobserve http service | +| service.type | string | `"ClusterIP"` | Service type of service | + +## Values + +

Pod

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
affinityobject
+{}
+
+
Sets the affinity for the secobserve pod For more information on affinity, see https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity
nodeSelectorobject
+{}
+
+
Node labels to select for secobserve pod assignment
replicaCountint
+1
+
+
number of replicas to deploy
tolerationsobject
+{}
+
+
Toleration labels for pod assignment
+

Backend

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
backend.env[0]object
+{
+  "name": "ADMIN_USER",
+  "value": "admin"
+}
+
+
admin user name
backend.env[10]object
+{
+  "name": "CORS_ALLOWED_ORIGINS",
+  "value": "https://secobserve.dev"
+}
+
+
CORS allowed origins
backend.env[11]object
+{
+  "name": "DJANGO_SECRET_KEY",
+  "valueFrom": {
+    "secretKeyRef": {
+      "key": "django_secret_key",
+      "name": "secobserve-secrets"
+    }
+  }
+}
+
+
django secret key
backend.env[11].valueFrom.secretKeyRefobject
+{
+  "key": "django_secret_key",
+  "name": "secobserve-secrets"
+}
+
+
secret name containing the django secret key
backend.env[12]object
+{
+  "name": "FIELD_ENCRYPTION_KEY",
+  "valueFrom": {
+    "secretKeyRef": {
+      "key": "field_encryption_key",
+      "name": "secobserve-secrets"
+    }
+  }
+}
+
+
encryption key for fields
backend.env[12].valueFrom.secretKeyRefobject
+{
+  "key": "field_encryption_key",
+  "name": "secobserve-secrets"
+}
+
+
secret name containig the field encryption key
backend.env[13]object
+{
+  "name": "OIDC_AUTHORITY",
+  "value": "https://oidc.secobserve.dev"
+}
+
+
admin OIDC authority
backend.env[14]object
+{
+  "name": "OIDC_CLIENT_ID",
+  "value": "secobserve"
+}
+
+
OIDC client id
backend.env[15]object
+{
+  "name": "OIDC_USERNAME",
+  "value": "preferred_username"
+}
+
+
OIDC user name
backend.env[16]object
+{
+  "name": "OIDC_FIRST_NAME",
+  "value": "given_name"
+}
+
+
OIDC first name
backend.env[17]object
+{
+  "name": "OIDC_LAST_NAME",
+  "value": "family_name"
+}
+
+
OIDC last name
backend.env[18]object
+{
+  "name": "OIDC_FULL_NAME",
+  "value": "preferred_username"
+}
+
+
OIDC full name
backend.env[19]object
+{
+  "name": "OIDC_EMAIL",
+  "value": "email"
+}
+
+
OIDC email address
backend.env[1]object
+{
+  "name": "ADMIN_PASSWORD",
+  "valueFrom": {
+    "secretKeyRef": {
+      "key": "password",
+      "name": "secobserve-secrets"
+    }
+  }
+}
+
+
admin password
backend.env[20]object
+{
+  "name": "OIDC_GROUPS",
+  "value": "groups"
+}
+
+
OIDC groups
backend.env[2]object
+{
+  "name": "ADMIN_EMAIL",
+  "value": "admin@admin.com"
+}
+
+
admin email address
backend.env[3]object
+{
+  "name": "DATABASE_ENGINE",
+  "value": "django.db.backends.postgresql"
+}
+
+
database engine
backend.env[4]object
+{
+  "name": "DATABASE_HOST",
+  "value": "secobserve-postgresql"
+}
+
+
database host/service
backend.env[5]object
+{
+  "name": "DATABASE_PORT",
+  "value": "5432"
+}
+
+
database port
backend.env[6]object
+{
+  "name": "DATABASE_DB",
+  "value": "secobserve"
+}
+
+
database name
backend.env[7]object
+{
+  "name": "DATABASE_USER",
+  "value": "secobserve"
+}
+
+
database user
backend.env[8]object
+{
+  "name": "DATABASE_PASSWORD",
+  "valueFrom": {
+    "secretKeyRef": {
+      "key": "password",
+      "name": "secobserve-postgresql"
+    }
+  }
+}
+
+
database password
backend.env[8].valueFrom.secretKeyRefobject
+{
+  "key": "password",
+  "name": "secobserve-postgresql"
+}
+
+
reference to secret containing db credentials
backend.env[9]object
+{
+  "name": "ALLOWED_HOSTS",
+  "value": "secobserve.dev"
+}
+
+
allowed hosts
backend.imageobject
+{
+  "pullPolicy": "IfNotPresent",
+  "registry": "ghcr.io",
+  "repository": "secobserve/secobserve-backend",
+  "tag": null
+}
+
+
image registry
backend.image.pullPolicystring
+"IfNotPresent"
+
+
image pull policy
backend.image.repositorystring
+"secobserve/secobserve-backend"
+
+
image repository
backend.image.tagstring
+null
+
+
image tag (uses appVersion value of Chart.yaml if not specified)
backend.resourcesobject
+{
+  "limits": {
+    "cpu": "1000m",
+    "memory": "1500Mi"
+  },
+  "requests": {
+    "cpu": "1000m",
+    "memory": "1500Mi"
+  }
+}
+
+
resource requirements and limits
backend.securityContextobject
+{
+  "allowPrivilegeEscalation": false,
+  "enabled": true,
+  "runAsGroup": 1001,
+  "runAsNonRoot": true,
+  "runAsUser": 1001
+}
+
+
security context to use for backend pod
backend.service.portint
+5000
+
+
service port
+

dbchecker

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
dbchecker.enabledbool
+true
+
+
enable dbchecker init container
dbchecker.hostnamestring
+"secobserve-postgresql"
+
+
enable dbchecker init container
dbchecker.image.pullPolicystring
+"IfNotPresent"
+
+
Image pull policy for the dbchecker image
dbchecker.image.repositorystring
+"busybox"
+
+
Docker image used to check Database readiness at startup
dbchecker.image.tagstring
+"latest"
+
+
Image tag for the dbchecker image
dbchecker.portint
+5432
+
+
enable dbchecker init container
dbchecker.resourcesobject
+{
+  "limits": {
+    "cpu": "20m",
+    "memory": "32Mi"
+  },
+  "requests": {
+    "cpu": "20m",
+    "memory": "32Mi"
+  }
+}
+
+
Resource requests and limits for the dbchecker container
dbchecker.securityContextobject
+{
+  "allowPrivilegeEscalation": false,
+  "runAsGroup": 1001,
+  "runAsNonRoot": true,
+  "runAsUser": 1001
+}
+
+
SecurityContext for the dbchecker container
+

Frontend

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
frontend.env[0]object
+{
+  "name": "API_BASE_URL",
+  "value": "https://secobserve.dev/api"
+}
+
+
Base URL for API
frontend.env[1]object
+{
+  "name": "OIDC_ENABLED",
+  "value": "false"
+}
+
+
enable OIDC authentication
frontend.env[2]object
+{
+  "name": "OIDC_AUTHORITY",
+  "value": "https://oidc.secobserve.dev"
+}
+
+
oidc metadata endpoint
frontend.env[3]object
+{
+  "name": "OIDC_CLIENT_ID",
+  "value": "secobserve"
+}
+
+
OIDC client ID
frontend.env[4]object
+{
+  "name": "OIDC_REDIRECT_URI",
+  "value": "https://secobserve.dev/"
+}
+
+
OIDC client redirect URL
frontend.env[5]object
+{
+  "name": "OIDC_POST_LOGOUT_REDIRECT_URI",
+  "value": "https://secobserve.dev/"
+}
+
+
URI to redirect to after logout
frontend.env[6]object
+{
+  "name": "OIDC_PROMPT",
+  "value": null
+}
+
+
OIDC prompt
frontend.image.pullPolicystring
+"IfNotPresent"
+
+
image pull policy
frontend.image.registrystring
+"ghcr.io"
+
+
image registry
frontend.image.repositorystring
+"secobserve/secobserve-frontend"
+
+
image repository
frontend.image.tagstring
+null
+
+
image tag (uses appVersion value of Chart.yaml if not specified)
frontend.resourcesobject
+{
+  "limits": {
+    "cpu": "500m",
+    "memory": "1000Mi"
+  },
+  "requests": {
+    "cpu": "500m",
+    "memory": "1000Mi"
+  }
+}
+
+
resource requirements and limits
frontend.securityContextobject
+{
+  "allowPrivilegeEscalation": false,
+  "enabled": true,
+  "runAsGroup": 1001,
+  "runAsNonRoot": true,
+  "runAsUser": 1001
+}
+
+
securityContext to use for frontend container
frontend.service.portint
+3000
+
+
service port
+

Ingress

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
ingress.annotationsobject
+{
+  "kubernetes.io/ingress.class": "nginx",
+  "nginx.ingress.kubernetes.io/proxy-read-timeout": "600",
+  "nginx.ingress.kubernetes.io/proxy-send-timeout": "600",
+  "nginx.ingress.kubernetes.io/ssl-redirect": "true"
+}
+
+
annotations to add to ingress
ingress.enabledbool
+true
+
+
If true, a Kubernetes Ingress resource will be created to the http port of the secobserve Service
ingress.hostnamestring
+"secobserve.dev"
+
+
hostname of ingress
ingress.ingressClassNamestring
+"nginx"
+
+
Example configuration for using an Amazon Load Balancer controller ingressClassName: alb annotations: alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS": 443}]' alb.ingress.kubernetes.io/ssl-policy: 'ELBSecurityPolicy-TLS13-1-2-FIPS-2023-04' alb.ingress.kubernetes.io/healthcheck-path: /
+

Postgresql

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
postgresql.architecturestring
+"standalone"
+
+
PostgreSQL architecture (`standalone` or `replication`)
postgresql.authobject
+{
+  "database": "secobserve",
+  "existingSecret": "",
+  "password": "",
+  "postgresPassword": "",
+  "secretKeys": {
+    "userPasswordKey": "password"
+  },
+  "username": "secobserve"
+}
+
+
enable postgresql subchart
postgresql.auth.databasestring
+"secobserve"
+
+
Name for a custom database to create
postgresql.auth.existingSecretstring
+""
+
+
Name of existing secret to use for PostgreSQL credentials
postgresql.auth.passwordstring
+""
+
+
Password for the custom user to create
postgresql.auth.postgresPasswordstring
+""
+
+
Password for the "postgres" admin user. Ignored if `auth.existingSecret` with key `postgres-password` is provided
postgresql.auth.secretKeys.userPasswordKeystring
+"password"
+
+
Name of key in existing secret to use for PostgreSQL credentials. Only used when `auth.existingSecret` is set.
postgresql.auth.usernamestring
+"secobserve"
+
+
Name for a custom user to create
postgresql.enabledbool
+true
+
+
Switch to enable or disable the PostgreSQL helm chart
postgresql.imageobject
+{
+  "repository": "bitnamilegacy/postgresql"
+}
+
+
enable postgresql subchart
postgresql.metricsobject
+{
+  "image": {
+    "repository": "bitnamilegacy/postgres-exporter"
+  }
+}
+
+
enable postgresql subchart
postgresql.volumePermissionsobject
+{
+  "image": {
+    "repository": "bitnamilegacy/os-shell"
+  }
+}
+
+
enable postgresql subchart
+

Service

+ + + + + + + + + + + + + + + + + + + + + +
KeyTypeDefaultDescription
serviceobject
+{
+  "type": "ClusterIP"
+}
+
+
defines the secobserve http service
service.typestring
+"ClusterIP"
+
+
Service type of service
+ +---------------------------------------------- +Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2) diff --git a/charts/secobserve/README.md.gotmpl b/charts/secobserve/README.md.gotmpl new file mode 100644 index 000000000..cbbfbb377 --- /dev/null +++ b/charts/secobserve/README.md.gotmpl @@ -0,0 +1,25 @@ +{{ template "chart.header" . }} + +## Installing the chart + +The chart can be installed as from the OCI repository using `helm install {{ template "chart.name" . }} --version {{ template "chart.appVersion" . }} oci://ghcr.io/SecObserve/charts/{{ template "chart.name" . }}`. + +{{ template "chart.deprecationWarning" . }} + +{{ template "chart.badgesSection" . }} + +{{ template "chart.description" . }} + +{{ template "chart.homepageLine" . }} + +{{ template "chart.maintainersSection" . }} + +{{ template "chart.sourcesSection" . }} + +{{ template "chart.requirementsSection" . }} + +{{ template "chart.valuesSection" . }} + +{{ template "chart.valuesSectionHtml" . }} + +{{ template "helm-docs.versionFooter" . }} diff --git a/charts/secobserve/templates/_helpers.tpl b/charts/secobserve/templates/_helpers.tpl new file mode 100644 index 000000000..b7aae41f3 --- /dev/null +++ b/charts/secobserve/templates/_helpers.tpl @@ -0,0 +1,39 @@ +{{/* +Expand the name of the chart. +*/}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "secobserve.fullname" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "secobserve.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "secobserve.labels" -}} +helm.sh/chart: {{ include "secobserve.chart" . }} +{{ include "secobserve.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "secobserve.selectorLabels" -}} +com.secobserve.tenant: {{ include "secobserve.fullname" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} \ No newline at end of file diff --git a/charts/secobserve/templates/ingress.yaml b/charts/secobserve/templates/ingress.yaml new file mode 100644 index 000000000..618a18022 --- /dev/null +++ b/charts/secobserve/templates/ingress.yaml @@ -0,0 +1,50 @@ +{{- if or .Values.ingress.enabled }} +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ include "secobserve.fullname" . }}-ing + namespace: "{{ .Release.Namespace }}" + labels: + {{- include "secobserve.labels" . | nindent 4 }} + annotations: + {{- with .Values.ingress.annotations }} + {{ toYaml . | nindent 4 }} + {{- end }} +spec: + ingressClassName: {{ .Values.ingress.ingressClassName }} + rules: + - host: {{ .Values.ingress.hostname }} + http: + paths: + - path: /api/ + pathType: Prefix + backend: + service: + name: {{ include "secobserve.fullname" . }}-svc + port: + name: backend-http + - path: /django-static/ + pathType: Prefix + backend: + service: + name: {{ include "secobserve.fullname" . }}-svc + port: + name: backend-http + - path: / + pathType: Prefix + backend: + service: + name: {{ include "secobserve.fullname" . }}-svc + port: + name: frontend-http + {{- if .Values.ingress.tls }} + tls: + - hosts: + - {{ .Values.ingress.hostname }} + secretName: {{ .Values.ingress.tls.secretName }} + {{- end }} +{{- if .Values.ingress.paths }} +{{- tpl (toYaml .Values.ingress.paths | indent 8) . }} +{{- end }} +{{- end }} diff --git a/charts/secobserve/templates/service.yaml b/charts/secobserve/templates/service.yaml new file mode 100644 index 000000000..5536b1889 --- /dev/null +++ b/charts/secobserve/templates/service.yaml @@ -0,0 +1,24 @@ +apiVersion: v1 +kind: Service +metadata: + name: "{{ include "secobserve.fullname" . }}-svc" + namespace: "{{ .Release.Namespace }}" + labels: + {{- include "secobserve.labels" . | nindent 4 }} + {{- if .Values.service.annotations }} + annotations: + {{ toYaml .Values.service.annotations | indent 4 }} + {{- end }} +spec: + type: {{ .Values.service.type | default "ClusterIP" }} + ports: + - name: frontend-http + port: {{ .Values.frontend.service.port }} + targetPort: frontend-http + protocol: TCP + - name: backend-http + port: {{ .Values.backend.service.port }} + targetPort: backend-http + protocol: TCP + selector: + {{- include "secobserve.selectorLabels" . | nindent 4 }} diff --git a/charts/secobserve/templates/statefulset.yaml b/charts/secobserve/templates/statefulset.yaml new file mode 100644 index 000000000..c845132ca --- /dev/null +++ b/charts/secobserve/templates/statefulset.yaml @@ -0,0 +1,147 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "secobserve.fullname" . }}-sts + namespace: "{{ .Release.Namespace }}" + labels: + {{- include "secobserve.labels" . | nindent 4 }} +spec: + serviceName: {{ include "secobserve.fullname" . }}-svc + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "secobserve.selectorLabels" . | nindent 6 }} + updateStrategy: + type: RollingUpdate + template: + metadata: + annotations: + {{- with .Values.podAnnotations }} + {{ toYaml . | nindent 8 }} + {{- end }} + labels: + {{- with .Values.labels }} + {{ toYaml . | nindent 8 }} + {{- end }} + {{- include "secobserve.selectorLabels" . | nindent 8 }} + spec: + {{- if or .Values.dbchecker.enabled .Values.extraInitContainers }} + initContainers: + {{- if and .Values.dbchecker.enabled }} + - name: dbchecker + image: "{{ .Values.dbchecker.image.repository }}{{- if (.Values.dbchecker.image.digest) -}}@{{ .Values.dbchecker.image.digest }}{{- else -}}:{{ .Values.dbchecker.image.tag }} {{- end }}" + imagePullPolicy: {{ .Values.dbchecker.image.pullPolicy }} + securityContext: + {{- toYaml .Values.dbchecker.securityContext | nindent 12 }} + command: + - sh + - -c + - | + echo 'Waiting for Database to become ready...' + + until printf "." && nc -z -w 2 {{ required ".Values.dbchecker.hostname is required if dbchecker is enabled!" .Values.dbchecker.hostname }} {{ required ".Values.database.port is required if dbchecker is enabled!" .Values.dbchecker.port }}; do + sleep 2; + done; + + echo 'Database OK ✓' + resources: + {{- toYaml .Values.dbchecker.resources | nindent 12 }} + {{- end }} + {{- with .Values.extraInitContainers }} + {{- tpl . $ | nindent 8 }} + {{- end }} + {{- end }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{ toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{ toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{ toYaml . | nindent 8 }} + {{- end }} + {{- if .Values.securityContext }} + securityContext: + {{ toYaml .Values.securityContext | nindent 8 }} + {{- end }} + containers: + - name: frontend + image: "{{ .Values.frontend.image.registry }}/{{ .Values.frontend.image.repository }}:{{ .Values.frontend.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.frontend.image.pullPolicy }} + {{- with .Values.frontend.env }} + env: + {{- toYaml . | nindent 12 }} + {{- end }} + ports: + - name: frontend-http + containerPort: 3000 + livenessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + failureThreshold: 12 + readinessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + failureThreshold: 12 + resources: + {{- toYaml .Values.frontend.resources | nindent 12 }} + volumeMounts: + {{- if .Values.frontend.volumeMounts }} + {{ toYaml .Values.frontend.volumeMounts | indent 12 }} + {{- end }} + {{- if .Values.frontend.securityContext.enabled }} + securityContext: + {{ toYaml .Values.frontend.securityContext | nindent 12 }} + {{- end }} + + - name: backend + image: "{{ .Values.backend.image.registry }}/{{ .Values.backend.image.repository }}:{{ .Values.backend.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.backend.image.pullPolicy }} + {{- with .Values.backend.env }} + env: + {{- toYaml . | nindent 12 }} + {{- end }} + ports: + - name: backend-http + containerPort: 5000 + livenessProbe: + httpGet: + path: /api/status/health/ + port: 5000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + failureThreshold: 12 + readinessProbe: + httpGet: + path: /api/status/health/ + port: 5000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + failureThreshold: 12 + resources: + {{- toYaml .Values.backend.resources | nindent 12 }} + volumeMounts: + {{- if .Values.backend.volumeMounts }} + {{ toYaml .Values.backend.volumeMounts | indent 12 }} + {{- end }} + {{- if .Values.backend.securityContext.enabled }} + securityContext: + {{ toYaml .Values.backend.securityContext | nindent 12 }} + {{- end }} + + volumes: + {{- if .Values.frontend.volumes }} + {{ toYaml .Values.frontend.volumes | indent 8 }} + {{- end }} + {{- if .Values.backend.volumes }} + {{ toYaml .Values.backend.volumes | indent 8 }} + {{- end }} diff --git a/charts/secobserve/tests/__snapshot__/ingress_test.yaml.snap b/charts/secobserve/tests/__snapshot__/ingress_test.yaml.snap new file mode 100644 index 000000000..1b6e5e409 --- /dev/null +++ b/charts/secobserve/tests/__snapshot__/ingress_test.yaml.snap @@ -0,0 +1,44 @@ +check manifest with custom host: + 1: | + apiVersion: networking.k8s.io/v1 + kind: Ingress + metadata: + annotations: + alb.ingress.kubernetes.io/healthcheck-path: / + alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS": 443}]' + alb.ingress.kubernetes.io/ssl-policy: ELBSecurityPolicy-TLS13-1-2-FIPS-2023-04 + labels: + app.kubernetes.io/instance: test-release + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/version: 1.30.1 + com.secobserve.tenant: secobserve + helm.sh/chart: secobserve-1.0.0 + name: secobserve-ing + namespace: test-namespace + spec: + ingressClassName: alb + rules: + - host: test.example.com + http: + paths: + - backend: + service: + name: secobserve-svc + port: + name: backend-http + path: /api/ + pathType: Prefix + - backend: + service: + name: secobserve-svc + port: + name: backend-http + path: /django-static/ + pathType: Prefix + - backend: + service: + name: secobserve-svc + port: + name: frontend-http + path: / + pathType: Prefix diff --git a/charts/secobserve/tests/__snapshot__/statefulset_test.yaml.snap b/charts/secobserve/tests/__snapshot__/statefulset_test.yaml.snap new file mode 100644 index 000000000..2a56c59aa --- /dev/null +++ b/charts/secobserve/tests/__snapshot__/statefulset_test.yaml.snap @@ -0,0 +1,895 @@ +should exclude initContainer dbchecker if disabled: + 1: | + apiVersion: apps/v1 + kind: StatefulSet + metadata: + labels: + app.kubernetes.io/instance: RELEASE-NAME + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/version: 1.30.1 + com.secobserve.tenant: secobserve + helm.sh/chart: secobserve-1.0.0 + name: secobserve-sts + namespace: NAMESPACE + spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + serviceName: secobserve-svc + template: + metadata: + annotations: null + labels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + spec: + containers: + - env: + - name: API_BASE_URL + value: https://secobserve.dev/api + - name: OIDC_ENABLED + value: "true" + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_REDIRECT_URI + value: https://secobserve.dev/ + - name: OIDC_POST_LOGOUT_REDIRECT_URI + value: https://secobserve.dev/ + image: ghcr.io/secobserve/secobserve-frontend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: frontend + ports: + - containerPort: 3000 + name: frontend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 500m + memory: 1000Mi + requests: + cpu: 500m + memory: 1000Mi + volumeMounts: null + - env: + - name: ADMIN_USER + value: admin + - name: ADMIN_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: secobserve-secrets + - name: ADMIN_EMAIL + value: admin@admin.com + - name: DATABASE_ENGINE + value: django.db.backends.postgresql + - name: DATABASE_HOST + value: release-name-postgresql + - name: DATABASE_PORT + value: "5432" + - name: DATABASE_DB + value: secobserve + - name: DATABASE_USER + value: secobserve + - name: DATABASE_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: release-name-postgresql + - name: ALLOWED_HOSTS + value: secobserve.dev + - name: CORS_ALLOWED_ORIGINS + value: https://secobserve.dev + - name: DJANGO_SECRET_KEY + valueFrom: + secretKeyRef: + key: django_secret_key + name: secobserve-secrets + - name: FIELD_ENCRYPTION_KEY + valueFrom: + secretKeyRef: + key: field_encryption_key + name: secobserve-secrets + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_USERNAME + value: preferred_username + - name: OIDC_FIRST_NAME + value: given_name + - name: OIDC_LAST_NAME + value: family_name + - name: OIDC_FULL_NAME + value: preferred_username + - name: OIDC_EMAIL + value: email + - name: OIDC_GROUPS + value: groups + image: ghcr.io/secobserve/secobserve-backend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: backend + ports: + - containerPort: 5000 + name: backend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 1000m + memory: 1500Mi + requests: + cpu: 1000m + memory: 1500Mi + volumeMounts: null + volumes: null + updateStrategy: + type: RollingUpdate +should include affinity if specified: + 1: | + apiVersion: apps/v1 + kind: StatefulSet + metadata: + labels: + app.kubernetes.io/instance: RELEASE-NAME + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/version: 1.30.1 + com.secobserve.tenant: secobserve + helm.sh/chart: secobserve-1.0.0 + name: secobserve-sts + namespace: NAMESPACE + spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + serviceName: secobserve-svc + template: + metadata: + annotations: null + labels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/e2e-az-name + operator: In + values: + - e2e-az1 + - e2e-az2 + containers: + - env: + - name: API_BASE_URL + value: https://secobserve.dev/api + - name: OIDC_ENABLED + value: "true" + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_REDIRECT_URI + value: https://secobserve.dev/ + - name: OIDC_POST_LOGOUT_REDIRECT_URI + value: https://secobserve.dev/ + image: ghcr.io/secobserve/secobserve-frontend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: frontend + ports: + - containerPort: 3000 + name: frontend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 500m + memory: 1000Mi + requests: + cpu: 500m + memory: 1000Mi + volumeMounts: null + - env: + - name: ADMIN_USER + value: admin + - name: ADMIN_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: secobserve-secrets + - name: ADMIN_EMAIL + value: admin@admin.com + - name: DATABASE_ENGINE + value: django.db.backends.postgresql + - name: DATABASE_HOST + value: release-name-postgresql + - name: DATABASE_PORT + value: "5432" + - name: DATABASE_DB + value: secobserve + - name: DATABASE_USER + value: secobserve + - name: DATABASE_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: release-name-postgresql + - name: ALLOWED_HOSTS + value: secobserve.dev + - name: CORS_ALLOWED_ORIGINS + value: https://secobserve.dev + - name: DJANGO_SECRET_KEY + valueFrom: + secretKeyRef: + key: django_secret_key + name: secobserve-secrets + - name: FIELD_ENCRYPTION_KEY + valueFrom: + secretKeyRef: + key: field_encryption_key + name: secobserve-secrets + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_USERNAME + value: preferred_username + - name: OIDC_FIRST_NAME + value: given_name + - name: OIDC_LAST_NAME + value: family_name + - name: OIDC_FULL_NAME + value: preferred_username + - name: OIDC_EMAIL + value: email + - name: OIDC_GROUPS + value: groups + image: ghcr.io/secobserve/secobserve-backend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: backend + ports: + - containerPort: 5000 + name: backend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 1000m + memory: 1500Mi + requests: + cpu: 1000m + memory: 1500Mi + volumeMounts: null + initContainers: + - command: + - sh + - -c + - | + echo 'Waiting for Database to become ready...' + + until printf "." && nc -z -w 2 release-name-postgresql 5432; do + sleep 2; + done; + + echo 'Database OK ✓' + image: busybox:latest + imagePullPolicy: IfNotPresent + name: dbchecker + resources: + limits: + cpu: 20m + memory: 32Mi + requests: + cpu: 20m + memory: 32Mi + securityContext: + allowPrivilegeEscalation: false + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + volumes: null + updateStrategy: + type: RollingUpdate +should include initContainer dbchecker if enabled: + 1: | + apiVersion: apps/v1 + kind: StatefulSet + metadata: + labels: + app.kubernetes.io/instance: RELEASE-NAME + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/version: 1.30.1 + com.secobserve.tenant: secobserve + helm.sh/chart: secobserve-1.0.0 + name: secobserve-sts + namespace: NAMESPACE + spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + serviceName: secobserve-svc + template: + metadata: + annotations: null + labels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + spec: + containers: + - env: + - name: API_BASE_URL + value: https://secobserve.dev/api + - name: OIDC_ENABLED + value: "true" + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_REDIRECT_URI + value: https://secobserve.dev/ + - name: OIDC_POST_LOGOUT_REDIRECT_URI + value: https://secobserve.dev/ + image: ghcr.io/secobserve/secobserve-frontend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: frontend + ports: + - containerPort: 3000 + name: frontend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 500m + memory: 1000Mi + requests: + cpu: 500m + memory: 1000Mi + volumeMounts: null + - env: + - name: ADMIN_USER + value: admin + - name: ADMIN_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: secobserve-secrets + - name: ADMIN_EMAIL + value: admin@admin.com + - name: DATABASE_ENGINE + value: django.db.backends.postgresql + - name: DATABASE_HOST + value: release-name-postgresql + - name: DATABASE_PORT + value: "5432" + - name: DATABASE_DB + value: secobserve + - name: DATABASE_USER + value: secobserve + - name: DATABASE_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: release-name-postgresql + - name: ALLOWED_HOSTS + value: secobserve.dev + - name: CORS_ALLOWED_ORIGINS + value: https://secobserve.dev + - name: DJANGO_SECRET_KEY + valueFrom: + secretKeyRef: + key: django_secret_key + name: secobserve-secrets + - name: FIELD_ENCRYPTION_KEY + valueFrom: + secretKeyRef: + key: field_encryption_key + name: secobserve-secrets + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_USERNAME + value: preferred_username + - name: OIDC_FIRST_NAME + value: given_name + - name: OIDC_LAST_NAME + value: family_name + - name: OIDC_FULL_NAME + value: preferred_username + - name: OIDC_EMAIL + value: email + - name: OIDC_GROUPS + value: groups + image: ghcr.io/secobserve/secobserve-backend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: backend + ports: + - containerPort: 5000 + name: backend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 1000m + memory: 1500Mi + requests: + cpu: 1000m + memory: 1500Mi + volumeMounts: null + initContainers: + - command: + - sh + - -c + - | + echo 'Waiting for Database to become ready...' + + until printf "." && nc -z -w 2 localhost 5432; do + sleep 2; + done; + + echo 'Database OK ✓' + image: busybox:latest + imagePullPolicy: IfNotPresent + name: dbchecker + resources: + limits: + cpu: 20m + memory: 32Mi + requests: + cpu: 20m + memory: 32Mi + securityContext: + allowPrivilegeEscalation: false + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + volumes: null + updateStrategy: + type: RollingUpdate +should render frontend and backend containers: + 1: | + apiVersion: apps/v1 + kind: StatefulSet + metadata: + labels: + app.kubernetes.io/instance: RELEASE-NAME + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/version: 1.30.1 + com.secobserve.tenant: secobserve + helm.sh/chart: secobserve-1.0.0 + name: secobserve-sts + namespace: NAMESPACE + spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + serviceName: secobserve-svc + template: + metadata: + annotations: null + labels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + spec: + containers: + - env: + - name: API_BASE_URL + value: https://secobserve.dev/api + - name: OIDC_ENABLED + value: "true" + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_REDIRECT_URI + value: https://secobserve.dev/ + - name: OIDC_POST_LOGOUT_REDIRECT_URI + value: https://secobserve.dev/ + image: docker.io/secobserve/frontend:v1.2.3 + imagePullPolicy: Always + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: frontend + ports: + - containerPort: 3000 + name: frontend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 500m + memory: 1000Mi + requests: + cpu: 500m + memory: 1000Mi + volumeMounts: null + - env: + - name: ADMIN_USER + value: admin + - name: ADMIN_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: secobserve-secrets + - name: ADMIN_EMAIL + value: admin@admin.com + - name: DATABASE_ENGINE + value: django.db.backends.postgresql + - name: DATABASE_HOST + value: release-name-postgresql + - name: DATABASE_PORT + value: "5432" + - name: DATABASE_DB + value: secobserve + - name: DATABASE_USER + value: secobserve + - name: DATABASE_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: release-name-postgresql + - name: ALLOWED_HOSTS + value: secobserve.dev + - name: CORS_ALLOWED_ORIGINS + value: https://secobserve.dev + - name: DJANGO_SECRET_KEY + valueFrom: + secretKeyRef: + key: django_secret_key + name: secobserve-secrets + - name: FIELD_ENCRYPTION_KEY + valueFrom: + secretKeyRef: + key: field_encryption_key + name: secobserve-secrets + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_USERNAME + value: preferred_username + - name: OIDC_FIRST_NAME + value: given_name + - name: OIDC_LAST_NAME + value: family_name + - name: OIDC_FULL_NAME + value: preferred_username + - name: OIDC_EMAIL + value: email + - name: OIDC_GROUPS + value: groups + image: docker.io/secobserve/backend:v1.2.3 + imagePullPolicy: Always + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: backend + ports: + - containerPort: 5000 + name: backend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 1000m + memory: 1500Mi + requests: + cpu: 1000m + memory: 1500Mi + volumeMounts: null + initContainers: + - command: + - sh + - -c + - | + echo 'Waiting for Database to become ready...' + + until printf "." && nc -z -w 2 release-name-postgresql 5432; do + sleep 2; + done; + + echo 'Database OK ✓' + image: busybox:latest + imagePullPolicy: IfNotPresent + name: dbchecker + resources: + limits: + cpu: 20m + memory: 32Mi + requests: + cpu: 20m + memory: 32Mi + securityContext: + allowPrivilegeEscalation: false + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + volumes: null + updateStrategy: + type: RollingUpdate +should render nodeSelector if provided: + 1: | + apiVersion: apps/v1 + kind: StatefulSet + metadata: + labels: + app.kubernetes.io/instance: RELEASE-NAME + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/version: 1.30.1 + com.secobserve.tenant: secobserve + helm.sh/chart: secobserve-1.0.0 + name: secobserve-sts + namespace: NAMESPACE + spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + serviceName: secobserve-svc + template: + metadata: + annotations: null + labels: + app.kubernetes.io/instance: RELEASE-NAME + com.secobserve.tenant: secobserve + spec: + containers: + - env: + - name: API_BASE_URL + value: https://secobserve.dev/api + - name: OIDC_ENABLED + value: "true" + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_REDIRECT_URI + value: https://secobserve.dev/ + - name: OIDC_POST_LOGOUT_REDIRECT_URI + value: https://secobserve.dev/ + image: ghcr.io/secobserve/secobserve-frontend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: frontend + ports: + - containerPort: 3000 + name: frontend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 500m + memory: 1000Mi + requests: + cpu: 500m + memory: 1000Mi + volumeMounts: null + - env: + - name: ADMIN_USER + value: admin + - name: ADMIN_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: secobserve-secrets + - name: ADMIN_EMAIL + value: admin@admin.com + - name: DATABASE_ENGINE + value: django.db.backends.postgresql + - name: DATABASE_HOST + value: release-name-postgresql + - name: DATABASE_PORT + value: "5432" + - name: DATABASE_DB + value: secobserve + - name: DATABASE_USER + value: secobserve + - name: DATABASE_PASSWORD + valueFrom: + secretKeyRef: + key: password + name: release-name-postgresql + - name: ALLOWED_HOSTS + value: secobserve.dev + - name: CORS_ALLOWED_ORIGINS + value: https://secobserve.dev + - name: DJANGO_SECRET_KEY + valueFrom: + secretKeyRef: + key: django_secret_key + name: secobserve-secrets + - name: FIELD_ENCRYPTION_KEY + valueFrom: + secretKeyRef: + key: field_encryption_key + name: secobserve-secrets + - name: OIDC_AUTHORITY + value: https://oidc.secobserve.dev + - name: OIDC_CLIENT_ID + value: secobserve + - name: OIDC_USERNAME + value: preferred_username + - name: OIDC_FIRST_NAME + value: given_name + - name: OIDC_LAST_NAME + value: family_name + - name: OIDC_FULL_NAME + value: preferred_username + - name: OIDC_EMAIL + value: email + - name: OIDC_GROUPS + value: groups + image: ghcr.io/secobserve/secobserve-backend:1.48.0 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 300 + timeoutSeconds: 5 + name: backend + ports: + - containerPort: 5000 + name: backend-http + readinessProbe: + failureThreshold: 12 + httpGet: + path: / + port: 5000 + initialDelaySeconds: 20 + timeoutSeconds: 5 + resources: + limits: + cpu: 1000m + memory: 1500Mi + requests: + cpu: 1000m + memory: 1500Mi + volumeMounts: null + initContainers: + - command: + - sh + - -c + - | + echo 'Waiting for Database to become ready...' + + until printf "." && nc -z -w 2 release-name-postgresql 5432; do + sleep 2; + done; + + echo 'Database OK ✓' + image: busybox:latest + imagePullPolicy: IfNotPresent + name: dbchecker + resources: + limits: + cpu: 20m + memory: 32Mi + requests: + cpu: 20m + memory: 32Mi + securityContext: + allowPrivilegeEscalation: false + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + nodeSelector: + disktype: ssd + volumes: null + updateStrategy: + type: RollingUpdate diff --git a/charts/secobserve/tests/ingress_test.yaml b/charts/secobserve/tests/ingress_test.yaml new file mode 100644 index 000000000..73e927706 --- /dev/null +++ b/charts/secobserve/tests/ingress_test.yaml @@ -0,0 +1,16 @@ +suite: test ingress +templates: + - ingress.yaml +release: + name: test-release + namespace: test-namespace +tests: + - it: check manifest with custom host + set: + ingress: + enabled: true + ingressClassName: alb + hostname: test.example.com + asserts: + - matchSnapshot: {} + diff --git a/charts/secobserve/tests/statefulset_test.yaml b/charts/secobserve/tests/statefulset_test.yaml new file mode 100644 index 000000000..72d57d776 --- /dev/null +++ b/charts/secobserve/tests/statefulset_test.yaml @@ -0,0 +1,88 @@ +suite: Test secobserve StatefulSet + +templates: + - templates/statefulset.yaml + +tests: + - it: should set correct StatefulSet name + set: + nameOverride: secobserve + asserts: + - equal: + path: metadata.name + value: secobserve-sts + + - it: should set the correct serviceName + set: + nameOverride: secobserve + asserts: + - equal: + path: spec.serviceName + value: secobserve-svc + + - it: should set the correct number of replicas + set: + replicaCount: 2 + asserts: + - equal: + path: spec.replicas + value: 2 + + - it: should render nodeSelector if provided + set: + nodeSelector: + disktype: ssd + asserts: + - matchSnapshot: {} + + - it: should include affinity if specified + set: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/e2e-az-name + operator: In + values: + - e2e-az1 + - e2e-az2 + asserts: + - matchSnapshot: {} + + - it: should render frontend and backend containers + set: + frontend: + image: + registry: docker.io + repository: secobserve/frontend + tag: v1.2.3 + pullPolicy: Always + backend: + image: + registry: docker.io + repository: secobserve/backend + tag: v1.2.3 + pullPolicy: Always + asserts: + - matchSnapshot: {} + + - it: should include initContainer dbchecker if enabled + set: + dbchecker: + enabled: true + hostname: "localhost" + port: 5432 + image: + repository: "busybox" + tag: "latest" + pullPolicy: "IfNotPresent" + asserts: + - matchSnapshot: {} + + - it: should exclude initContainer dbchecker if disabled + set: + dbchecker: + enabled: false + asserts: + - matchSnapshot: {} \ No newline at end of file diff --git a/charts/secobserve/values.yaml b/charts/secobserve/values.yaml new file mode 100644 index 000000000..e8e84f42c --- /dev/null +++ b/charts/secobserve/values.yaml @@ -0,0 +1,385 @@ +# -- number of replicas to deploy +# @section -- Pod +replicaCount: 1 + +# @section -- Frontend +frontend: + image: + # -- image registry + # @section -- Frontend + registry: ghcr.io + + # -- image repository + # @section -- Frontend + repository: secobserve/secobserve-frontend + + # -- image tag (uses appVersion value of Chart.yaml if not specified) + # @section -- Frontend + tag: + + # -- image pull policy + # @section -- Frontend + pullPolicy: IfNotPresent + + # -- securityContext to use for frontend container + # @section -- Frontend + securityContext: + enabled: true + allowPrivilegeEscalation: false + runAsUser: 1001 + runAsGroup: 1001 + runAsNonRoot: true + + env: + # -- Base URL for API + # @section -- Frontend + - { name: API_BASE_URL, value: https://secobserve.dev/api } + + # -- enable OIDC authentication + # @section -- Frontend + - { name: OIDC_ENABLED, value: "false" } + + # -- oidc metadata endpoint + # @section -- Frontend + - { name: OIDC_AUTHORITY, value: https://oidc.secobserve.dev } + + # -- OIDC client ID + # @section -- Frontend + - { name: OIDC_CLIENT_ID, value: secobserve } + + # -- OIDC client redirect URL + # @section -- Frontend + - { name: OIDC_REDIRECT_URI, value: https://secobserve.dev/ } + + # -- URI to redirect to after logout + # @section -- Frontend + - { name: OIDC_POST_LOGOUT_REDIRECT_URI, value: https://secobserve.dev/ } + + # -- OIDC prompt + # @section -- Frontend + - { name: OIDC_PROMPT, value: null } + + # -- resource requirements and limits + # @section -- Frontend + resources: + limits: + cpu: 500m + memory: 1000Mi + requests: + cpu: 500m + memory: 1000Mi + + service: + # -- service port + # @section -- Frontend + port: 3000 + +# @section -- Backend +backend: + # -- image registry + # @section -- Backend + image: + registry: ghcr.io + # -- image repository + # @section -- Backend + repository: secobserve/secobserve-backend + + # -- image tag (uses appVersion value of Chart.yaml if not specified) + # @section -- Backend + tag: + + # -- image pull policy + # @section -- Backend + pullPolicy: IfNotPresent + + # -- security context to use for backend pod + # @section -- Backend + securityContext: + enabled: true + allowPrivilegeEscalation: false + runAsUser: 1001 + runAsGroup: 1001 + runAsNonRoot: true + + # @section -- Backend + env: + # -- admin user name + # @section -- Backend + - { name: ADMIN_USER, value: admin } + + # -- admin password + # @section -- Backend + - { + name: ADMIN_PASSWORD, + valueFrom: + { secretKeyRef: { name: secobserve-secrets, key: password } }, + } + + # -- admin email address + # @section -- Backend + - { name: ADMIN_EMAIL, value: admin@admin.com } + + # -- database engine + # @section -- Backend + - { name: DATABASE_ENGINE, value: "django.db.backends.postgresql" } + + # -- database host/service + # @section -- Backend + - { name: DATABASE_HOST, value: secobserve-postgresql } + + # -- database port + # @section -- Backend + - { name: DATABASE_PORT, value: "5432" } + + # -- database name + # @section -- Backend + - { name: DATABASE_DB, value: secobserve } + + # -- database user + # @section -- Backend + - { name: DATABASE_USER, value: secobserve } + + # -- database password + # @section -- Backend + - { name: DATABASE_PASSWORD, valueFrom: { + # -- reference to secret containing db credentials + # @section -- Backend + secretKeyRef: { name: secobserve-postgresql, key: password }, + } } + + # -- allowed hosts + # @section -- Backend + - { name: ALLOWED_HOSTS, value: secobserve.dev } + + # -- CORS allowed origins + # @section -- Backend + - { name: CORS_ALLOWED_ORIGINS, value: https://secobserve.dev } + + # -- django secret key + # @section -- Backend + - { name: DJANGO_SECRET_KEY, valueFrom: { + # -- secret name containing the django secret key + # @section -- Backend + secretKeyRef: { name: secobserve-secrets, key: django_secret_key }, + } } + + # -- encryption key for fields + # @section -- Backend + - { name: FIELD_ENCRYPTION_KEY, valueFrom: { + # -- secret name containig the field encryption key + # @section -- Backend + secretKeyRef: + { name: secobserve-secrets, key: field_encryption_key }, + } } + + # -- admin OIDC authority + # @section -- Backend + - { name: OIDC_AUTHORITY, value: https://oidc.secobserve.dev } + + # -- OIDC client id + # @section -- Backend + - { name: OIDC_CLIENT_ID, value: secobserve } + + # -- OIDC user name + # @section -- Backend + - { name: OIDC_USERNAME, value: preferred_username } + + # -- OIDC first name + # @section -- Backend + - { name: OIDC_FIRST_NAME, value: given_name } + + # -- OIDC last name + # @section -- Backend + - { name: OIDC_LAST_NAME, value: family_name } + + # -- OIDC full name + # @section -- Backend + - { name: OIDC_FULL_NAME, value: preferred_username } + + # -- OIDC email address + # @section -- Backend + - { name: OIDC_EMAIL, value: email } + + # -- OIDC groups + # @section -- Backend + - { name: OIDC_GROUPS, value: groups } + + # -- resource requirements and limits + # @section -- Backend + resources: + limits: + cpu: 1000m + memory: 1500Mi + requests: + cpu: 1000m + memory: 1500Mi + + service: + # -- service port + # @section -- Backend + port: 5000 + +# -- defines the secobserve http service +# @section -- Service +service: + # -- Service type of service + # @section -- Service + type: ClusterIP + # If you want to use a NodePort service for ALB, uncomment the following line + #type: NodePort + +ingress: + # -- If true, a Kubernetes Ingress resource will be created to the http port of the secobserve Service + # @section -- Ingress + enabled: true + + # -- hostname of ingress + # @section -- Ingress + hostname: "secobserve.dev" + + # -- Example configuration for using an Amazon Load Balancer controller + # ingressClassName: alb + # annotations: + # alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS": 443}]' + # alb.ingress.kubernetes.io/ssl-policy: 'ELBSecurityPolicy-TLS13-1-2-FIPS-2023-04' + # alb.ingress.kubernetes.io/healthcheck-path: / + # @section -- Ingress + ingressClassName: nginx + + # -- annotations to add to ingress + # @section -- Ingress + annotations: + # Use the NGINX ingress class + kubernetes.io/ingress.class: nginx + # Enable SSL redirect for HTTPS + nginx.ingress.kubernetes.io/ssl-redirect: "true" + # Configure a custom timeout for the proxy + nginx.ingress.kubernetes.io/proxy-read-timeout: "600" + nginx.ingress.kubernetes.io/proxy-send-timeout: "600" + # -- # TLS configuration for HTTPS + # @section -- Ingress + # tls: + # secretName: secobserve-tls + +# -- Node labels to select for secobserve pod assignment +# @section -- Pod +nodeSelector: {} + +# -- Toleration labels for pod assignment +# @section -- Pod +tolerations: {} + +# -- Sets the affinity for the secobserve pod +# For more information on affinity, see https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity +# @section -- Pod +affinity: {} + +## PostgreSQL chart configuration +## ref: https://github.com/bitnami/charts/blob/main/bitnami/postgresql/values.yaml +## @param postgresql.enabled Switch to enable or disable the PostgreSQL helm chart +## @param postgresql.auth.postgresPassword Password for the "postgres" admin user. Ignored if `auth.existingSecret` with key `postgres-password` is provided +## @param postgresql.auth.username Name for a custom user to create +## @param postgresql.auth.password Password for the custom user to create +## @param postgresql.auth.database Name for a custom database to create +## @param postgresql.auth.existingSecret Name of existing secret to use for PostgreSQL credentials +## @param postgresql.auth.secretKeys.userPasswordKey Name of key in existing secret to use for PostgreSQL credentials. Only used when `auth.existingSecret` is set. +## @param postgresql.architecture PostgreSQL architecture (`standalone` or `replication`) +## +# @section -- Postgresql +postgresql: + # -- Switch to enable or disable the PostgreSQL helm chart + # @section -- Postgresql + enabled: true + + # -- enable postgresql subchart + # @section -- Postgresql + image: + repository: bitnamilegacy/postgresql + + # -- enable postgresql subchart + # @section -- Postgresql + volumePermissions: + image: + repository: bitnamilegacy/os-shell + + # -- enable postgresql subchart + # @section -- Postgresql + metrics: + image: + repository: bitnamilegacy/postgres-exporter + + # -- enable postgresql subchart + # @section -- Postgresql + auth: + # -- Password for the "postgres" admin user. Ignored if `auth.existingSecret` with key `postgres-password` is provided + # @section -- Postgresql + postgresPassword: "" + + # -- Name for a custom user to create + # @section -- Postgresql + username: secobserve + + # -- Password for the custom user to create + # @section -- Postgresql + password: "" + + # -- Name for a custom database to create + # @section -- Postgresql + database: secobserve + + # -- Name of existing secret to use for PostgreSQL credentials + # @section -- Postgresql + existingSecret: "" + + secretKeys: + # -- Name of key in existing secret to use for PostgreSQL credentials. Only used when `auth.existingSecret` is set. + # @section -- Postgresql + userPasswordKey: password + + # -- PostgreSQL architecture (`standalone` or `replication`) + # @section -- Postgresql + architecture: standalone + +dbchecker: + # -- enable dbchecker init container + # @section -- dbchecker + enabled: true + + # -- enable dbchecker init container + # @section -- dbchecker + hostname: secobserve-postgresql + + # -- enable dbchecker init container + # @section -- dbchecker + port: 5432 + + image: + # -- Docker image used to check Database readiness at startup + # @section -- dbchecker + repository: busybox + + # -- Image tag for the dbchecker image + # @section -- dbchecker + tag: latest + + # -- Image pull policy for the dbchecker image + # @section -- dbchecker + pullPolicy: IfNotPresent + + # -- SecurityContext for the dbchecker container + # @section -- dbchecker + securityContext: + allowPrivilegeEscalation: false + runAsUser: 1001 + runAsGroup: 1001 + runAsNonRoot: true + + # -- Resource requests and limits for the dbchecker container + # @section -- dbchecker + resources: + requests: + cpu: "20m" + memory: "32Mi" + limits: + cpu: "20m" + memory: "32Mi" diff --git a/docker-compose-dev-keycloak.yml b/docker-compose-dev-keycloak.yml index fce6a3e39..b736abd9c 100644 --- a/docker-compose-dev-keycloak.yml +++ b/docker-compose-dev-keycloak.yml @@ -21,6 +21,7 @@ services: OIDC_CLIENT_ID: ${SO_OIDC_CLIENT_ID:-secobserve} OIDC_REDIRECT_URI: ${SO_OIDC_REDIRECT_URI:-http://localhost:3000} OIDC_POST_LOGOUT_REDIRECT_URI: ${SO_OIDC_POST_LOGOUT_REDIRECT_URI:-http://localhost:3000} + OIDC_PROMPT: ${SO_OIDC_PROMPT:-} backend: build: @@ -56,6 +57,9 @@ services: OIDC_FULL_NAME: ${SO_OIDC_FULL_NAME:-} OIDC_EMAIL: ${SO_OIDC_EMAIL:-email} OIDC_GROUPS: ${SO_OIDC_GROUPS:-groups} + # --- Email --- + EMAIL_HOST: mailhog + EMAIL_PORT: 1025 # --- Development --- USE_DOCKER: yes volumes: @@ -63,10 +67,9 @@ services: - dev_huey_data:/var/lib/huey ports: - "8000:8000" - command: /start postgres: - image: postgres:15.10-alpine + image: postgres:15.16-alpine volumes: - dev_postgres_data:/var/lib/postgresql/data environment: @@ -84,14 +87,14 @@ services: - "8025:8025" keycloak: - image: keycloak/keycloak:26.0.8 + image: keycloak/keycloak:26.5.3 environment: - - KEYCLOAK_ADMIN=admin - - KEYCLOAK_ADMIN_PASSWORD=admin + - KC_BOOTSTRAP_ADMIN_USERNAME=admin + - KC_BOOTSTRAP_ADMIN_PASSWORD=admin - KC_HTTPS_ENABLED=false - KC_HOSTNAME_STRICT_HTTPS=false volumes: - - ./keycloak:/opt/keycloak/data/ + - ./keycloak:/opt/keycloak/data/import ports: - 8080:8080 - command: start-dev + command: start-dev --import-realm diff --git a/docker-compose-dev-mysql.yml b/docker-compose-dev-mysql.yml index 2571f298f..2299e9273 100644 --- a/docker-compose-dev-mysql.yml +++ b/docker-compose-dev-mysql.yml @@ -1,6 +1,7 @@ volumes: dev_huey_data: dev_mysql_data: + dev_node_modules: services: frontend: @@ -12,7 +13,6 @@ services: - "3000:3000" volumes: - ./frontend/:/app - - /app/node_modules command : sh -c "npm run start -- --host" backend: @@ -46,10 +46,9 @@ services: - dev_huey_data:/var/lib/huey ports: - "8000:8000" - command: /start mysql: - image: mysql:8.4.3 + image: mysql:8.4.8 environment: MYSQL_RANDOM_ROOT_PASSWORD: ${SO_MYSQL_RANDOM_ROOT_PASSWORD:-yes} MYSQL_DATABASE: ${SO_MYSQL_DATABASE:-secobserve} diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index febbba8f7..11e789edc 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -13,7 +13,6 @@ services: - "3000:3000" volumes: - ./frontend/:/app - - dev_node_modules:/app/node_modules command : sh -c "npm run start -- --host" backend: @@ -47,10 +46,9 @@ services: - dev_huey_data:/var/lib/huey ports: - "8000:8000" - command: /start - + postgres: - image: postgres:15.10-alpine + image: postgres:15.16-alpine volumes: - dev_postgres_data:/var/lib/postgresql/data environment: diff --git a/docker-compose-playwright.yml b/docker-compose-playwright.yml index b152aa168..0010de442 100644 --- a/docker-compose-playwright.yml +++ b/docker-compose-playwright.yml @@ -18,6 +18,8 @@ services: OIDC_CLIENT_ID: dummy OIDC_REDIRECT_URI: dummy OIDC_POST_LOGOUT_REDIRECT_URI: dummy + OIDC_PROMPT: null + networks: - secobserve @@ -35,6 +37,7 @@ services: # --- Gunicorn --- GUNICORN_WORKERS: 3 GUNICORN_THREADS: 10 + GUNICORN_LIMIT_REQUEST_FIELD_SIZE: 16380 # --- Database --- DATABASE_ENGINE: django.db.backends.sqlite3 # --- Security --- @@ -58,7 +61,7 @@ services: playwright: - image: mcr.microsoft.com/playwright:v1.49.1 + image: mcr.microsoft.com/playwright:v1.58.2 depends_on: - frontend environment: diff --git a/docker-compose-prod-mysql.yml b/docker-compose-prod-mysql.yml index a84df3bca..925e575de 100644 --- a/docker-compose-prod-mysql.yml +++ b/docker-compose-prod-mysql.yml @@ -10,7 +10,7 @@ networks: services: traefik: - image: "traefik:v3.3.2" + image: "traefik:v3.6.8" container_name: "prod_traefik" command: - "--log.level=INFO" @@ -35,7 +35,7 @@ services: - traefik frontend: - image: maibornwolff/secobserve-frontend:1.26.0 + image: ghcr.io/secobserve/secobserve-frontend:1.48.0 container_name: "prod_secobserve_frontend" labels: - "traefik.enable=true" @@ -50,11 +50,12 @@ services: OIDC_REDIRECT_URI: ${SO_OIDC_REDIRECT_URI:-http://secobserve.localhost} OIDC_POST_LOGOUT_REDIRECT_URI: ${SO_OIDC_POST_LOGOUT_REDIRECT_URI:-http://secobserve.localhost} OIDC_SCOPE: ${SO_OIDC_SCOPE:-openid profile email} + OIDC_PROMPT: ${SO_OIDC_PROMPT:-} networks: - traefik backend: - image: maibornwolff/secobserve-backend:1.26.0 + image: ghcr.io/secobserve/secobserve-backend:1.48.0 container_name: "prod_secobserve_backend" labels: - "traefik.enable=true" @@ -70,6 +71,7 @@ services: # --- Gunicorn --- GUNICORN_WORKERS: ${SO_GUNICORN_WORKERS:-3} GUNICORN_THREADS: ${SO_GUNICORN_THREADS:-10} + GUNICORN_LIMIT_REQUEST_FIELD_SIZE: ${SO_GUNICORN_LIMIT_REQUEST_FIELD_SIZE:-16380} # --- Database --- DATABASE_ENGINE: ${SO_DATABASE_ENGINE:-django.db.backends.mysql} DATABASE_HOST: ${SO_DATABASE_HOST:-mysql} @@ -96,7 +98,7 @@ services: - database mysql: - image: mysql:8.4.3 + image: mysql:8.4.8 container_name: "prod_mysql" environment: MYSQL_RANDOM_ROOT_PASSWORD: ${SO_MYSQL_RANDOM_ROOT_PASSWORD:-yes} diff --git a/docker-compose-prod-postgres.yml b/docker-compose-prod-postgres.yml index 822fbd13d..20987632d 100644 --- a/docker-compose-prod-postgres.yml +++ b/docker-compose-prod-postgres.yml @@ -10,7 +10,7 @@ networks: services: traefik: - image: "traefik:v3.3.2" + image: "traefik:v3.6.8" container_name: "prod_traefik" command: - "--log.level=INFO" @@ -35,7 +35,7 @@ services: - traefik frontend: - image: maibornwolff/secobserve-frontend:1.26.0 + image: ghcr.io/secobserve/secobserve-frontend:1.48.0 container_name: "prod_secobserve_frontend" labels: - "traefik.enable=true" @@ -50,11 +50,12 @@ services: OIDC_REDIRECT_URI: ${SO_OIDC_REDIRECT_URI:-http://secobserve.localhost} OIDC_POST_LOGOUT_REDIRECT_URI: ${SO_OIDC_POST_LOGOUT_REDIRECT_URI:-http://secobserve.localhost} OIDC_SCOPE: ${SO_OIDC_SCOPE:-openid profile email} + OIDC_PROMPT: ${SO_OIDC_PROMPT:-} networks: - traefik backend: - image: maibornwolff/secobserve-backend:1.26.0 + image: ghcr.io/secobserve/secobserve-backend:1.48.0 container_name: "prod_secobserve_backend" labels: - "traefik.enable=true" @@ -70,6 +71,7 @@ services: # --- Gunicorn --- GUNICORN_WORKERS: ${SO_GUNICORN_WORKERS:-3} GUNICORN_THREADS: ${SO_GUNICORN_THREADS:-10} + GUNICORN_LIMIT_REQUEST_FIELD_SIZE: ${SO_GUNICORN_LIMIT_REQUEST_FIELD_SIZE:-16380} # --- Database --- DATABASE_ENGINE: ${SO_DATABASE_ENGINE:-django.db.backends.postgresql} DATABASE_HOST: ${SO_DATABASE_HOST:-postgres} @@ -96,7 +98,7 @@ services: - database postgres: - image: postgres:15.10-alpine + image: postgres:15.16-alpine container_name: "prod_postgres" environment: POSTGRES_DB: ${SO_POSTGRES_DB:-secobserve} diff --git a/docker-compose-prod-test.yml b/docker-compose-prod-test.yml index 666787302..be978f850 100644 --- a/docker-compose-prod-test.yml +++ b/docker-compose-prod-test.yml @@ -17,6 +17,7 @@ services: OIDC_CLIENT_ID: ${SO_OIDC_CLIENT_ID:-dummy} OIDC_REDIRECT_URI: ${SO_OIDC_REDIRECT_URI:-http://localhost:3000} OIDC_POST_LOGOUT_REDIRECT_URI: ${SO_OIDC_POST_LOGOUT_REDIRECT_URI:-http://localhost:3000} + OIDC_PROMPT: ${SO_OIDC_PROMPT:-} ports: - "3000:3000" @@ -37,6 +38,7 @@ services: # --- Gunicorn --- GUNICORN_WORKERS: ${SO_GUNICORN_WORKERS:-3} GUNICORN_THREADS: ${SO_GUNICORN_THREADS:-10} + GUNICORN_LIMIT_REQUEST_FIELD_SIZE: ${SO_GUNICORN_LIMIT_REQUEST_FIELD_SIZE:-16380} # --- Database --- DATABASE_ENGINE: ${SO_DATABASE_ENGINE:-django.db.backends.postgresql} DATABASE_HOST: ${SO_DATABASE_HOST:-postgres} @@ -62,7 +64,7 @@ services: - "5000:5000" mysql: - image: mysql:8.4.3 + image: mysql:8.4.8 profiles: - mysql environment: @@ -76,7 +78,7 @@ services: - "3306:3306" postgres: - image: postgres:15.10-alpine + image: postgres:15.16-alpine environment: POSTGRES_DB: ${SO_POSTGRES_DB:-secobserve} POSTGRES_USER: ${SO_POSTGRES_USER:-secobserve} diff --git a/docker-compose-unittests.yml b/docker-compose-unittests.yml index 98761bc67..aec3bfc49 100644 --- a/docker-compose-unittests.yml +++ b/docker-compose-unittests.yml @@ -27,4 +27,5 @@ services: OIDC_FULL_NAME: name OIDC_EMAIL: email OIDC_GROUPS: groups - command: /start + volumes: + - ./backend:/home diff --git a/docker/backend/dev/django/Dockerfile b/docker/backend/dev/django/Dockerfile index 7c6c5baf1..87cf838c9 100644 --- a/docker/backend/dev/django/Dockerfile +++ b/docker/backend/dev/django/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12.8-alpine@sha256:54bec49592c8455de8d5983d984efff76b6417a6af9b5dcc8d0237bf6ad3bd20 AS python-base +FROM python:3.14-alpine@sha256:faee120f7885a06fcc9677922331391fa690d911c020abb9e8025ff3d908e510 AS python-base # Python build stage FROM python-base AS python-build-stage @@ -9,7 +9,7 @@ ARG BUILD_ENVIRONMENT=dev # kics-scan ignore-block # versions of dependencies from distribution are ok RUN apk add --no-cache --virtual .build-deps \ - ca-certificates gcc postgresql-dev linux-headers musl-dev libffi-dev mariadb-dev + ca-certificates gcc libffi-dev linux-headers mariadb-dev musl-dev postgresql-dev # install dependencies with poetry COPY ./backend/poetry_requirements.txt ./ @@ -23,7 +23,7 @@ ENV POETRY_NO_INTERACTION=1 \ WORKDIR / COPY backend/pyproject.toml backend/poetry.lock ./ -RUN poetry install --extras "dev" --no-root && rm -rf $POETRY_CACHE_DIR +RUN poetry install --extras "dev" --no-root && rm -rf "$POETRY_CACHE_DIR" # Python 'run' stage @@ -43,11 +43,6 @@ WORKDIR ${APP_HOME} ARG user=django ARG group=django -# Install binaries and libs for PostgreSQL -# kics-scan ignore-block -# versions of dependencies from distribution are ok -RUN apk add --no-cache --virtual postgresql-libs postgresql-client mariadb-dev mysql-client bash - # copy dependencies from build stage ENV VIRTUAL_ENV=/.venv \ PATH="/.venv/bin:$PATH" @@ -55,7 +50,6 @@ COPY --from=python-build-stage ${VIRTUAL_ENV} ${VIRTUAL_ENV} # copy startup scripts COPY ./docker/backend/dev/django/entrypoint /entrypoint -COPY ./docker/backend/dev/django/start /start COPY ./docker/backend/dev/django/wait-for-it.sh /wait-for-it.sh # copy application code to WORKDIR @@ -63,11 +57,14 @@ COPY ./backend ${APP_HOME} # use dev settings COPY ./backend/config/settings/dev.py ${APP_HOME}/config/settings/dist.py +# kics-scan ignore-block +# versions of dependencies from distribution are ok RUN addgroup -g 1001 -S ${group} \ && adduser -u 1001 -S ${user} -G ${group} \ + # Install binaries and libs for PostgreSQL and MySQL + && apk add --no-cache --virtual postgresql-libs postgresql-client mariadb-dev mysql-client bash \ # make startup scripts executable && chmod +x /entrypoint \ - && chmod +x /start \ && chmod +x /wait-for-it.sh \ # insert commit id for status endpoint && sed -i 's/placeholder/'"${COMMIT_ID}"'/g' ${APP_HOME}/application/commons/api/views.py \ diff --git a/docker/backend/dev/django/entrypoint b/docker/backend/dev/django/entrypoint index 95786c381..34ccabe20 100644 --- a/docker/backend/dev/django/entrypoint +++ b/docker/backend/dev/django/entrypoint @@ -13,4 +13,36 @@ if [ "${DATABASE_ENGINE}" != "django.db.backends.sqlite3" ]; then >&2 echo 'Database is available' fi -exec "$@" +python manage.py migrate + +echo "Admin user: ${ADMIN_USER}" +ADMIN_EXISTS=$(echo "SELECT * from access_control_user;" | python manage.py dbshell | grep "${ADMIN_USER}" || :) +if [ ! -z "$ADMIN_EXISTS" ] +then + echo "Initialization detected that the admin user ${ADMIN_USER} already exists in your database." +else + if [ -z "${ADMIN_PASSWORD}" ] + then + export ADMIN_PASSWORD="$(cat /dev/urandom | LC_ALL=C tr -dc a-zA-Z0-9 | head -c 22)" + echo "Admin password: ${ADMIN_PASSWORD}" + fi + + echo "Initialization detected no admin user ${ADMIN_USER}, creating it now in your database." + +cat < + + + + + + + + + + + diff --git a/docs/assets/images/secobserve_integrations.drawio b/docs/assets/images/secobserve_integrations.drawio index 82ac07519..06fe0b385 100644 --- a/docs/assets/images/secobserve_integrations.drawio +++ b/docs/assets/images/secobserve_integrations.drawio @@ -1,196 +1,275 @@ - + - + - - + + - - + + - - + + - - + + - - + + - - - - + + + + + + + + + + + + + + + + + + + - - + + - - - - + + + + - - - - + + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + + + + + + - - + + - - - - - - - + + + + + + + + + - - + + - - - - - - - + + + + + + + + + + + + + + + + + - - - + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + diff --git a/docs/assets/images/secobserve_integrations.svg b/docs/assets/images/secobserve_integrations.svg index 30a66d530..f7cb204d0 100644 --- a/docs/assets/images/secobserve_integrations.svg +++ b/docs/assets/images/secobserve_integrations.svg @@ -1,3 +1,3 @@ -
Export
Issues
Code link
Notifications
Microsoft Teams
Email
GitLab
CodeCharta
Microsoft Excel
CSV
Jira (Cloud)
Azure DevOps
Slack
NVD
Information
link
GitHub
GitHub
Advisories
open/source/
insights
CSAF  (VEX)
OpenVEX
Import/
Export
SecObserve
OpenID Connect
ScanCode
LicenseDB
Open Source
Vulnerability
Scanners
License
Groups
Vulnerabilities
Authentication
SPDX
Licenses
Scores
EPSS
Licenses
FIRST.org
CVSS calculator
\ No newline at end of file +
Export
Issues
Code link
Notifications
Microsoft Teams
Email
GitLab
CodeCharta
Microsoft Excel
CSV
Jira (Cloud)
Azure DevOps
Slack
NVD
Information
links
GitHub
GitHub
Advisories
open/source/
insights
CycloneDX  (VEX)
OpenVEX
Import/
Export
SecObserve
OpenID Connect Providers
ScanCode
LicenseDB
Open Source
Vulnerability
Scanners
Licenses,
License Groups
Authentication
SPDX
Licenses
Scores,
Exploits
EPSS
FIRST.org
CVSS calculator
cvss-bt
Components
OSV
(osv.dev)
Vulnerabilities
SBOM 
(CycloneDX,
SPDX)
ecosyste.ms
CSAF  (VEX)
LicenseLynx
\ No newline at end of file diff --git a/docs/assets/images/secobserve_process.drawio b/docs/assets/images/secobserve_process.drawio index 91a1834aa..3177837c9 100644 --- a/docs/assets/images/secobserve_process.drawio +++ b/docs/assets/images/secobserve_process.drawio @@ -1,54 +1,57 @@ - + - + - - + + - + - + - - + + - - + + - - + + - - + + - - - - + - + - + - + - + - + - + + + + + + + diff --git a/docs/assets/images/secobserve_process.svg b/docs/assets/images/secobserve_process.svg index 97df3073e..9bbff9c8f 100644 --- a/docs/assets/images/secobserve_process.svg +++ b/docs/assets/images/secobserve_process.svg @@ -1,3 +1,3 @@ -
Project's CI/CD Pipeline
View observations
Assess observations
Upload reports manually
GitLab CI Templates / GitHub Actions
SCA


Dep. Check
Dep. Track
Grype
Trivy (Operator)
SAST
Application

Bandit
ESLint
FindSecBugs
Semgrep
SAST
Infrastructure

Checkov
KICS
Trivy
DAST


Cryptolyzer
DrHeader
ZAP
Cloud
Infastructure


Prowler
Azure Defender
Trivy Operator
Secrets


GitLeaks
Trivy (Operator)
Upload
Check
Security Gate
Scan
\ No newline at end of file +
Project's CI/CD Pipeline
GitLab CI Templates / GitHub Actions
SCA / SBOM


Dep. Check
Dep. Track
Grype
Trivy (Operator)

CycloneDX
SPDX
SAST
Application

Bandit
ESLint
FindSecBugs
Semgrep

SARIF
SAST
Infrastructure

Checkov
KICS
Trivy


SARIF
DAST


Cryptolyzer
DrHeader
ZAP
Cloud
Infastructure


Prowler
Azure Defender
Trivy Operator
Secrets


GitLeaks
Trivy (Operator)
Upload
Check
Security Gate
Scan
View observations and licenses
Assess observations and licenses
Upload reports manually
\ No newline at end of file diff --git a/docs/assets/images/secobserve_white.svg b/docs/assets/images/secobserve_white.svg index eb4d07d5d..36a3ad0a4 100644 --- a/docs/assets/images/secobserve_white.svg +++ b/docs/assets/images/secobserve_white.svg @@ -1,14 +1,14 @@ - + style="font-size:617.931px;line-height:386.207px;font-family:'Bitstream Vera Sans';letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke-width:15.4483px" + transform="translate(-56.370621,1.3427734e-5)"> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path448" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path450" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path452" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path454" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path456" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path458" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path460" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path462" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path464" /> + style="font-family:AllRoundGothicW01-Bold;-inkscape-font-specification:'AllRoundGothicW01-Bold, Normal';fill:#ffffff;fill-opacity:1" + id="path466" /> + + + + + + + diff --git a/docs/assets/images/secobserve_white_icon.svg b/docs/assets/images/secobserve_white_icon.svg index e7b37a68f..31b946a79 100644 --- a/docs/assets/images/secobserve_white_icon.svg +++ b/docs/assets/images/secobserve_white_icon.svg @@ -1,10 +1,15 @@ - + + + + + + + diff --git a/docs/assets/images/settings_cron_housekeeping.png b/docs/assets/images/settings_cron_housekeeping.png new file mode 100644 index 000000000..1a22d409c Binary files /dev/null and b/docs/assets/images/settings_cron_housekeeping.png differ diff --git a/docs/development/docker_compose.md b/docs/development/docker_compose.md index 530f8d057..652f547c3 100644 --- a/docs/development/docker_compose.md +++ b/docs/development/docker_compose.md @@ -4,29 +4,29 @@ Docker Compose is a tool for defining and running multi-container Docker applica ## Development -* [`docker-compose-dev-keycloak.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose-dev-keycloak.yml) +* [`docker-compose-dev-keycloak.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose-dev-keycloak.yml) - Starts the PostgreSQL database, the SecObserve backend, Keycloak and Mailhog - The frontend is only started, when the parameter `--profile frontend` is given - - The file `keyloak/h2/keycloakdb.mv.db.dist` should be copied to `keyloak/h2/keycloakdb.mv.db` before starting the services, to work with predefined settings. There is an administrator configured (username: `admin`, password: `admin`) and a regular user for Secobserve (username: `keycloak_user`, password: `keycloak`). -* [`docker-compose-dev-mysql.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose-dev-mysql.yml) + - A predefined realm calles `secobserve` is imported on start-up. There is an administrator configured (username: `admin`, password: `admin`) and a regular user for SecObserve (username: `keycloak_user`, password: `keycloak`). +* [`docker-compose-dev-mysql.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose-dev-mysql.yml) - Starts the MySQL database, as well as SecObserve's backend and frontend - Backend and frontend are build automatically if necessary and are started in development mode with hot reloading -* [`docker-compose-dev.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose-dev.yml) +* [`docker-compose-dev.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose-dev.yml) - Starts the PostgreSQL database, as well as SecObserve's backend and frontend - Backend and frontend are build automatically if necessary and are started in development mode with hot reloading -* [`docker-compose-playwright.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose-playwright.yml) +* [`docker-compose-playwright.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose-playwright.yml) - Starts the end-to-end tests with Playwright -* [`docker-compose-prod-test.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose-prod-test.yml) +* [`docker-compose-prod-test.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose-prod-test.yml) - Starts the PostgreSQL database, as well as SecObserve's backend and frontend - Backend and frontend are build automatically if necessary with the production Dockerfiles -* [`docker-compose-unittests.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose-unittests.yml) +* [`docker-compose-unittests.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose-unittests.yml) - Starts the unit tests for the backend -* [`docker-compose.yml`](https://github.com/MaibornWolff/SecObserve/blob/dev/docker-compose.yml) +* [`docker-compose.yml`](https://github.com/SecObserve/SecObserve/blob/dev/docker-compose.yml) - This is a link to `docker-compose-dev.yml` and is used as a default for the `docker compose` command ## Production See the [installation](../getting_started/installation.md) guide how to use the productive Docker Compose files. -* [`docker-compose-prod-mysql.yml`](https://github.com/MaibornWolff/SecObserve/blob/main/docker-compose-prod-mysql.yml) -* [`docker-compose-prod-postgres.yml`](https://github.com/MaibornWolff/SecObserve/blob/main/docker-compose-prod-postgres.yml) +* [`docker-compose-prod-mysql.yml`](https://github.com/SecObserve/SecObserve/blob/main/docker-compose-prod-mysql.yml) +* [`docker-compose-prod-postgres.yml`](https://github.com/SecObserve/SecObserve/blob/main/docker-compose-prod-postgres.yml) diff --git a/docs/getting_started/about.md b/docs/getting_started/about.md index 541a60528..f4ad15124 100644 --- a/docs/getting_started/about.md +++ b/docs/getting_started/about.md @@ -17,18 +17,29 @@ The aim of SecObserve is to make vulnerability scanning and vulnerability manage All actions and templates run the scanner, upload the results into SecObserve and make the results of the scans available for download as artefacts in JSON format. - The sources of the GitHub actions and GitLab CI templates can be found in [https://github.com/MaibornWolff/secobserve_actions_templates](https://github.com/MaibornWolff/secobserve_actions_templates). + The sources of the GitHub actions and GitLab CI templates can be found in [https://github.com/SecObserve/secobserve_actions_templates](https://github.com/SecObserve/secobserve_actions_templates). ![How it works](../assets/images/secobserve_process.svg){ width="700" style="display: block; margin: 0 auto" } -## Procedure +## Workflows -A sequence diagram explains the basic procedure how to work with SecObserve: +There are 2 main workflows when working with SecObserve: + +1. **Consuming results of vulnerability scanners:** In this workflow, a pipeline runs one or more vulnerability scanners and uploads the results into SecObserve. The developers can then view and assess the observations and take action accordingly. + + This workflow is typically used in a CI/CD pipeline, but can also be used manually. + +2. **Consuming SBOMs for license and vulnerability management:** Here a Software Bill of Materials (SBOM) is ingested into SecObserve to provide insights into the components and licenses used in the product. It is a precondition for scanning the components for vulnerabilities with the OSV scanner. + + This workflow can also be used in a CI/CD pipeline or manually. + +#### Consuming results of vulnerability scanners ``` mermaid sequenceDiagram autonumber actor Developer + alt Pipeline flow Developer ->> Repository: Check in source activate Repository Repository ->> Pipeline: Start pipeline @@ -46,6 +57,12 @@ sequenceDiagram Pipeline ->> Pipeline: Stop or continue Pipeline -->> Developer: Feedback deactivate Pipeline + else Manual flow + Developer ->> SecObserve: Upload scan results + activate SecObserve + SecObserve ->> SecObserve: Apply rules + deactivate SecObserve + end Developer ->> SecObserve: View observations Developer ->> SecObserve: Assess observations Developer ->> Developer: Implement fixes @@ -54,7 +71,7 @@ sequenceDiagram 1. A developer implements a feature and checks in his code to the repository 2. The repository starts a pipeline for the change -3. The pipeline runs several of the supported vulnerability scanners. To make integration easy, SecObserve provides predefined templates for the most relevant scanners, see [GitHub actions and GitLab CI templates](../integrations/github_actions_and_templates.md). +3. The pipeline runs one or more of the supported vulnerability scanners. To make integration easy, SecObserve provides predefined GitHub actions and GitLab templates for the most relevant scanners, see [GitHub actions and GitLab CI templates](../integrations/github_actions_and_templates.md). 4. The scanners store their results in files, which are [uploaded into SecObserve](../usage/import_observations.md). 5. SecObserve applies [rules](../usage/rule_engine.md) to adjust severity and status of observations during the upload process. 6. The pipeline can call SecObserve to check the status of the [security gate](../usage/security_gates.md). @@ -63,7 +80,44 @@ sequenceDiagram * `0` if the security gate has **passed** or is **disabled** 8. The pipeline can stop or continue based on the exit code, depending on the configuration of the check step. Default is to stop the pipeline if the security gate has failed. 9. The developer can see the result of the pipeline. -10. The developer can now look at the observations in SecObserve, to see what has changed ... -11. ... and if necessary [assess observations](../usage/assess_observations.md) to change their status (eg. false positive or risk accepted) or severity. -12. If fixes are needed to close vulnerabilities, the developer will implement the fixes ... -13. ... and check them in to the repository. Now the cycle starts again. +10. Alternatively, the developer can upload the results manually into SecObserve ... +11. ... and SecObserve applies the [rules](../usage/rule_engine.md) to adjust severity and status of observations. +12. The developer can now look at the observations in SecObserve, to see what has changed ... +13. ... and if necessary [assess observations](../usage/assess_observations.md) to change their status (eg. false positive or risk accepted) or severity. +14. If fixes are needed to close vulnerabilities, the developer will implement the fixes ... +15. ... and check them in to the repository. Now the cycle starts again. + + +#### Consuming SBOMs for license and vulnerability management + +``` mermaid +sequenceDiagram + autonumber + actor Developer + alt Pipeline flow + Developer ->> Pipeline: Start pipeline + activate Pipeline + Pipeline ->> Pipeline: Generate SBOM + Pipeline ->> SecObserve: Upload SBOM + deactivate Pipeline + else Manual flow + Developer ->> SecObserve: Upload SBOM + end + Note right of Developer: License management + SecObserve ->> SecObserve: Apply license policy + Developer ->> SecObserve: View licenses + Developer ->> SecObserve: Adjust license policy + Note right of Developer: Vulnerability management + Developer ->> SecObserve: Run manual OSV scan + SecObserve ->> SecObserve: Run nightly OSV scan +``` + +1. A developer starts a pipeline ... +2. ... to generate a Software Bill of Materials (SBOM) for the project. +3. The pipeline [uploads the SBOM](../usage/upload_sbom.md) into SecObserve. +4. Alternatively, the developer can upload the SBOM manually into SecObserve. +5. SecObserve applies the [license policy](../usage/license_management.md#evaluation-of-licenses) to the SBOM to check for license compliance. +6. The developer can [view the licenses and their evaluation results](../usage/license_management.md#managing-licenses-in-products) in SecObserve ... +7. ... and [adjust the license policy](../usage/license_management.md#managing-license-policies) if necessary. +8. The developer can run a [manual OSV scan](../integrations/osv_scan.md#manual-scan) to check for vulnerabilities in the components of the SBOM. +9. Alternatively, SecObserve can run a [nightly OSV scan](../integrations/osv_scan.md#automatic-scan) to check for vulnerabilities in the components of the SBOM. diff --git a/docs/getting_started/acknowledgements.md b/docs/getting_started/acknowledgements.md new file mode 100644 index 000000000..493636fe9 --- /dev/null +++ b/docs/getting_started/acknowledgements.md @@ -0,0 +1,4 @@ +# Acknowledgements + +* A huge shoutout to all the fabulous Open Source libraries that made this project possible. Too many to mention them all here, but they are listed in the current [SBOM](https://github.com/SecObserve/SecObserve/blob/main/sbom/sbom_1.48.0.json). +* The binoculars logo has been derived from the National Park Service's [Birding/Wildlife Viewing icon](https://github.com/nationalparkservice/symbol-library/blob/gh-pages/src/standalone/birding-wildlife-viewing-black-30.svg), published under the [BSD-3-Clause license](https://github.com/nationalparkservice/symbol-library/blob/gh-pages/LICENSE.txt). diff --git a/docs/getting_started/anatomy_of_an_observation.md b/docs/getting_started/anatomy_of_an_observation.md index e10ab4f4c..d97365fb5 100644 --- a/docs/getting_started/anatomy_of_an_observation.md +++ b/docs/getting_started/anatomy_of_an_observation.md @@ -12,6 +12,10 @@ As for the severity, if a [rule](../usage/rule_engine.md) matches the observation or a user sets a different status in an [assessment](../usage/assess_observations.md), these changes will override the status set by the import. * **Title** and **Description** are short and long explanations what the observation is about. * Scanners might suggest a **Recommendation** *(not shown in the screenshot)*. +* If there is a recommendation for a component with a version number, the **Update impact score** is calculated: + * If there is a major update, the difference between the current major version and the recommended major version is multiplied by 100 + * Otherwise if there is a minor update, the difference between the current minor version and the recommended minor version is multiplied by 10 + * Otherwise if there is a patch update, the *update impact score* is the difference between the current patch version and the recommended minor version ## Vulnerability diff --git a/docs/getting_started/configuration.md b/docs/getting_started/configuration.md index 6d4e77130..aa0d371f5 100644 --- a/docs/getting_started/configuration.md +++ b/docs/getting_started/configuration.md @@ -24,6 +24,7 @@ A part of the configuration is done with environment variables, which need to be | `FIELD_ENCRYPTION_KEY` | mandatory | Key to encrypt fields like the JWT secret. See [Generating an Encryption Key](https://gitlab.com/lansharkconsulting/django/django-encrypted-model-fields#generating-an-encryption-key) how to generate the key. | | `GUNICORN_WORKERS` | optional | Number of worker processes for the Gunicorn web server, see [Gunicorn documentation](https://docs.gunicorn.org/en/stable/design.html#how-many-workers). Default is 3. | | `GUNICORN_THREADS` | optional | Number of worker threads for the Gunicorn web server, default is 10. | +| `GUNICORN_LIMIT_REQUEST_FIELD_SIZE` | optional | Limits the allowed size of an HTTP request header field, default is 16380. | | `OIDC_AUTHORITY` | mandatory | The authority is a URL that hosts the OpenID configuration well-known endpoint. | | `OIDC_CLIENT_ID` | mandatory | The client ID is the unique *Application (client) ID* assigned to your app by the OpenID Connect provider when the app was registered. | | `OIDC_USERNAME` | mandatory | The claim that contains the username to find or create the user. | @@ -45,6 +46,7 @@ A part of the configuration is done with environment variables, which need to be | `OIDC_REDIRECT_URI` | mandatory | The redirect URI is the URI the identity provider will send the security tokens back to. To be set with the URL of the frontend. | | `OIDC_POST_LOGOUT_REDIRECT_URI` | mandatory | The post logout redirect URI is the URI that will be called after logout. To be set with the URL of the frontend. | | `OIDC_SCOPE` | optional | OpenID Connect (OIDC) scopes are used by an application during authentication to authorize access to a user's details, like name or email. If the variable is not set, the standard scopes `openid profile email` will be used. | +| `OIDC_PROMPT` | optional | The prompt parameter allows to request specific interactions with the user during the authentication process, values can be `none`, `login`, `consent` and `select_account`. Default is not to set the prompt parameter. | All the `OIDC_*` environment variables are needed for technical reasons. If `OIDC_ENABLE` is set to `false`, the other `OIDC_*` environment variables can be set to `dummy` or something similar. diff --git a/docs/getting_started/features.md b/docs/getting_started/features.md index 15a260f48..698e119fe 100644 --- a/docs/getting_started/features.md +++ b/docs/getting_started/features.md @@ -19,7 +19,7 @@ | Feature | Supported | |---------|:------------:| -| [Import of license information from CycloneDX and SPDX SBOMs](../usage/license_management.md#managing-licenses-in-products) | :material-plus-circle-outline: | +| [Import of components with license information from CycloneDX and SPDX SBOMs](../usage/upload_sbom.md) | :material-plus-circle-outline: | | [Flexible license policies to evaluate the impact of different license conditions](../usage/license_management.md#managing-license-policies) | :material-plus-circle-outline: | | [Organize licenses with similar conditions in license groups](../usage/license_management.md#managing-license-groups) | :material-plus-circle-outline: | @@ -30,11 +30,13 @@ | [Import from many SAST, SCA, DAST, infrastructure and secrets scanners](../integrations/supported_scanners.md) | :material-plus-circle-outline: | | [GitLab CI integration of scanners with predefined templates](../integrations/github_actions_and_templates.md#examplary-pipeline-for-gitlab-ci-templates)
[GitHub integration of scanners with predefined actions](../integrations/github_actions_and_templates.md#examplary-workflow-for-github-actions) | :material-plus-circle-outline: | | [Data enrichment from Exploit Prediction Scoring System (EPSS)](../integrations/epss.md) | :material-plus-circle-outline: | +| [Data enrichment with exploit information](../integrations/exploit_information.md) | :material-plus-circle-outline: | | [Always up-to-date SPDX licenses](../integrations/license_data.md#spdx-licenses) | :material-plus-circle-outline: | | [License groups generated from ScanCode LicenseDB data](../integrations/license_data.md#scancode-licensedb) | :material-plus-circle-outline: | | [Direct link to source code](../integrations/source_code_repositories.md) | :material-plus-circle-outline: | | [Export vulnerabilities to issue trackers (Jira, GitLab, GitHub)](../integrations/issue_trackers.md) | :material-plus-circle-outline: | | [Import/export vulnerabilities from/to VEX documents (CSAF, OpenVEX)](../integrations/vex.md) | :material-plus-circle-outline: | +| [Vulnerability scanning from OSV database](../integrations/osv_scan.md) | :material-plus-circle-outline: | | [Export of data to Microsoft Excel and CSV](../integrations/observations_export.md) | :material-plus-circle-outline: | | [Export metrics to CodeCharta](../integrations/codecharta.md) | :material-plus-circle-outline: | | [Notifications to Microsoft Teams, Slack and email](../integrations/notifications.md) | :material-plus-circle-outline: | diff --git a/docs/getting_started/installation.md b/docs/getting_started/installation.md index 17fe370db..59d453f40 100644 --- a/docs/getting_started/installation.md +++ b/docs/getting_started/installation.md @@ -1,5 +1,10 @@ # Installation +!!! warning + + Both provided installation options serve as templates for productive use only. Even though they can run *out of the box*, they will need proper configuration for the requirements of the environment they will be installed in. This includes additional hardening and security measures. + + ## Docker Compose SecObserve provides 2 Docker Compose files as templates for productive use: `docker-compose-prod-mysql.yml` and `docker-compose-prod-postgres.yml`. Both start [Traefik](https://doc.traefik.io/traefik/v3.0/) as an edge router as well as the SecObserve frontend and backend plus a database (either MySQL or PostgreSQL). @@ -11,105 +16,8 @@ Without any changes to the Docker Compose file, 3 URL's are available: * **Traefik**: [http://traefik.localhost](http://traefik.localhost) (dashboard) -``` yaml title="docker-compose-prod-postgres.yml" -volumes: - prod_postgres_data: - -networks: - traefik: - database: - -services: - - traefik: - image: "traefik:v3.0" - container_name: "traefik" - command: - - "--log.level=INFO" - - "--api.dashboard=true" - - "--providers.docker=true" - - "--providers.docker.exposedbydefault=false" - - "--entrypoints.web.address=:80" - labels: - - "traefik.enable=true" - # - "traefik.http.middlewares.traefik-ipallowlist.ipallowlist.sourcerange=172.18.0.1/24" - # - "traefik.http.routers.api.middlewares=traefik-ipallowlist@docker" - - "traefik.http.routers.api.entrypoints=web" - - "traefik.http.routers.api.rule=Host(`traefik.localhost`)" - - "traefik.http.routers.api.service=api@internal" - ports: - - "80:80" - volumes: - - "/var/run/docker.sock:/var/run/docker.sock:ro" - networks: - - default - - frontend: - image: maibornwolff/secobserve-frontend:1.26.0 - labels: - - "traefik.enable=true" - - "traefik.http.routers.frontend.rule=Host(`secobserve.localhost`)" - - "traefik.http.routers.frontend.entrypoints=web" - environment: - API_BASE_URL: ${SO_API_BASE_URL:-http://secobserve-backend.localhost/api} - OIDC_ENABLE: ${SO_OIDC_ENABLE:-false} - OIDC_AUTHORITY: ${SO_OIDC_AUTHORITY:-dummy} - OIDC_CLIENT_ID: ${SO_OIDC_CLIENT_ID:-dummy} - OIDC_REDIRECT_URI: ${SO_OIDC_REDIRECT_URI:-http://secobserve.localhost} - OIDC_POST_LOGOUT_REDIRECT_URI: ${SO_OIDC_POST_LOGOUT_REDIRECT_URI:-http://secobserve.localhost} - OIDC_SCOPE: ${SO_OIDC_SCOPE:-openid profile email} - networks: - - traefik - - backend: - image: maibornwolff/secobserve-backend:1.26.0 - labels: - - "traefik.enable=true" - - "traefik.http.routers.backend.rule=Host(`secobserve-backend.localhost`)" - - "traefik.http.routers.backend.entrypoints=web" - depends_on: - - postgres - environment: - # --- Admin user --- - ADMIN_USER: ${SO_ADMIN_USER:-admin} - ADMIN_PASSWORD: ${SO_ADMIN_PASSWORD:-admin} - ADMIN_EMAIL: ${SO_ADMIN_EMAIL:-admin@example.com} - # --- Database --- - DATABASE_ENGINE: ${SO_DATABASE_ENGINE:-django.db.backends.postgresql} - DATABASE_HOST: ${SO_DATABASE_HOST:-postgres} - DATABASE_PORT: ${SO_DATABASE_PORT:-5432} - DATABASE_DB: ${SO_DATABASE_DB:-secobserve} - DATABASE_USER: ${SO_DATABASE_USER:-secobserve} - DATABASE_PASSWORD: ${SO_DATABASE_PASSWORD:-secobserve} - # --- Security --- - ALLOWED_HOSTS: ${SO_ALLOWED_HOSTS:-secobserve-backend.localhost} - CORS_ALLOWED_ORIGINS: ${SO_CORS_ALLOWED_ORIGINS:-http://secobserve.localhost} - DJANGO_SECRET_KEY: ${SO_DJANGO_SECRET_KEY:-NxYPEF5lNGgk3yonndjSbwP77uNJxOvfKTjF5aVBqsHktNlf1wfJHHvJ8iifk32r} - FIELD_ENCRYPTION_KEY: ${SO_FIELD_ENCRYPTION_KEY:-DtlkqVb3wlaVdJK_BU-3mB4wwuuf8xx8YNInajiJ7GU=} - # --- OpenID Connect --- - OIDC_AUTHORITY: ${SO_OIDC_AUTHORITY:-} - OIDC_CLIENT_ID: ${SO_OIDC_CLIENT_ID:-} - OIDC_USERNAME: ${SO_OIDC_USERNAME:-} - OIDC_FIRST_NAME: ${SO_OIDC_FIRST_NAME:-} - OIDC_LAST_NAME: ${SO_OIDC_LAST_NAME:-} - OIDC_FULL_NAME: ${SO_OIDC_FULL_NAME:-} - OIDC_EMAIL: ${SO_OIDC_EMAIL:-} - OIDC_GROUPS: ${SO_OIDC_GROUPS:-} - command: /start - networks: - - traefik - - database - - postgres: - image: postgres:15.2-alpine - environment: - POSTGRES_DB: ${SO_POSTGRES_DB:-secobserve} - POSTGRES_USER: ${SO_POSTGRES_USER:-secobserve} - POSTGRES_PASSWORD: ${SO_POSTGRES_PASSWORD:-secobserve} - volumes: - - prod_postgres_data:/var/lib/postgresql/data - networks: - - database +```include {language=yaml title="docker-compose-prod-postgres.yml"} +docker-compose-prod-postgres.yml ``` #### Configuration for Traefik @@ -132,3 +40,35 @@ Some values should be changed for productive use, to avoid using the default val * The database structure is initialized with the first start of the backend container. * The URLs for frontend and backend are available after approximately 30 seconds, after the healthcheck of the containers has been running for the first time. + +## Kubernetes + +SecObserve provides a Helm chart as a template for productive use. The default values will work if the release name is `secobserve` and the frontend will be accessible with [https://secobserve.dev/](https://secobserve.dev/). + +#### Database + +The PostgreSQL database is provided by Bitnami's Helm chart. Bitnami doesn't provide updates for their free tier anymore, see [Upcoming changes to the Bitnami Catalog](https://github.com/bitnami/charts?tab=readme-ov-file#%EF%B8%8F-important-notice-upcoming-changes-to-the-bitnami-catalog) and the Docker image is pulled from the `bitnamilegacy` repository. + +This is ok to test the Kubernetes installation, but not suitable for production use. A productive environment has to use an update-to-date database, e.g. installed as an operator like [CloudNativePG](https://cloudnative-pg.io/) or a managed service of a cloud provider. + +If the provided database is used and the chart is installed with a release name different from `secobserve`, all occurrences of `secobserve-postgresql` in the chart have to be changed to `-postgresql`. + +#### Secrets + +Three values are read from a secret, which has to be set up manually before installing the chart: + +* `ADMIN_PASSWORD` +* `DJANGO_SECRET_KEY` +* `FIELD_ENCRYPTION_KEY` + +The command to setup the secret can look like this: + +``` +kubectl create secret generic secobserve-secrets \ + --namespace ... \ + --from-literal=password='...' \ + --from-literal=django_secret_key='...' \ + --from-literal=field_encryption_key='...' +``` + +See [Configuration](configuration.md#backend) for more information how to set these values. diff --git a/docs/getting_started/upgrading.md b/docs/getting_started/upgrading.md index fc324bf1f..2ed7a6c6f 100644 --- a/docs/getting_started/upgrading.md +++ b/docs/getting_started/upgrading.md @@ -10,6 +10,62 @@ * There will be specific upgrade instructions if necessary, e.g. when there are new configuration parameters. +## Release 1.47.0 + +**Breaking changes** + +* The OIDC attribute `prompt` is now configurable and it is not set as default. To return to the former behaviour, you have to set the environment variable `OIDC_PROMPT=select_account` for the frontend in your installation. For further details search for `prompt` in the [OpenID specification](https://openid.net/specs/openid-connect-core-1_0.html). + + +## Release 1.46.0 + +**Breaking changes** + +* The results of API list endpoints for `products/`, `license_components/`, `licences/`, `license_groups/` and `license_policies/` contained some attributes that were used to streamline the user interface for single instances but are not necessary for list views and needed a lot of interactions with the database. These attributes have been removed from the result sets for performance optimization. + + +## Release 1.42.0 + +!!! warning + The database migration in version 1.42.0 did not work on some installations with MySQL databases and aborted with an error message. MySQL users should skip this version and update directly to 1.43.0. + + +**Breaking changes** + +!!! info + The location of the Docker images has been changed with release 1.42.0, they are now stored in a GitHub container registry: + + * **ghcr.io/secobserve/secobserve-backend** + * **ghcr.io/secobserve/secobserve-frontend** + + Please adjust your pull statements accordingly. + + +## Release 1.40.0 + +**Breaking changes** + +* The field `[origin_]component_purl_namespace` has been removed from the APIs for `observations`, `license_components` and `components`. Users of the API shall parse the `[origin_]component_purl` if they need any of its attributes. + +## Release 1.38.0 + +**Noteable change** + +* Microsoft is rotating the root certificate for the flexible Azure Database for MySQL see [https://learn.microsoft.com/en-us/azure/mysql/flexible-server/concepts-root-certificate-rotation](https://learn.microsoft.com/en-us/azure/mysql/flexible-server/concepts-root-certificate-rotation). This release contains the new certificates. + +## Release 1.37.0 + +**Breaking changes** + +* The API for `license_components` has been changed, due to the rename of the existing license fields to `imported_declared_license_...` in [https://github.com/SecObserve/SecObserve/pull/3229](https://github.com/SecObserve/SecObserve/pull/3229). + +## Release 1.30.0 + +**Noteable change** + +* If multiple licenses have been found for a component, they are now evaluated like an `AND` expression. If for example one license is `Allowed` and the other one is `Forbidden`, the component is evaluated as `Forbidden`. An explicit rule in a License Policy is not necessary anymore. This new behaviour comes into effect with the next import of components. +* There is now an explicit menu in the UI and an API endpoint to import SBOMs to get all components with their licenses and dependencies, see [Upload SBOMs](../usage/upload_sbom.md). + ## Release 1.26.0 **Breaking changes** diff --git a/docs/integrations/api_import.md b/docs/integrations/api_import.md index cd8b04530..066e14970 100644 --- a/docs/integrations/api_import.md +++ b/docs/integrations/api_import.md @@ -37,12 +37,16 @@ API imports can be configured to run automatically at a specific time. There is #### General setting -In the `Automatic API import` section of the [Settings](../getting_started/configuration.md#admininistration-in-secobserve) the automatic import can be enabled for the whole SecObserve instance and the hour (in UTC time) and minute can be set, when the automatic API imports will run. A restart of the SecObserve instance is required to apply the changes. +In the `Features` section of the [Settings](../getting_started/configuration.md#admininistration-in-secobserve) the automatic import can be enabled for the whole SecObserve instance. -![Settings automatic API import](../assets/images/screenshot_settings_automatic_api_import.png){ width="60%" style="display: block; margin: 0 auto" } +![Settings automatic API import](../assets/images/screenshot_settings_automatic_api_import.png){ width="80%" style="display: block; margin: 0 auto" } + +The hour (in UTC time) and minute, when the automatic API imports and the [OSV scanning](./osv_scan.md#automatic-scan) will run, can be set in the `Background tasks` section. A restart of the SecObserve instance is required to apply the changes. + +![Settings automatic API import](../assets/images/screenshot_settings_background_api_osv.png){ width="70%" style="display: block; margin: 0 auto" } #### Setting per API configuration Only API configurations that have `Automatic import enabled` turned on will be imported automatically. When the automatic import is enabled for an API configuration, the values for `Branch / Version`, `Service`, `Docker image name:tag`, `Endpoint URL`and `Kubernetes cluster` can be set to be used for the import. -![Automatic API import enabled](../assets/images/screenshot_automatic_api_import_enabled.png){ width="60%" style="display: block; margin: 0 auto" } +![Automatic API import enabled](../assets/images/screenshot_automatic_api_import_enabled.png){ width="80%" style="display: block; margin: 0 auto" } diff --git a/docs/integrations/epss.md b/docs/integrations/epss.md index 561a49b27..a3e94f2ce 100644 --- a/docs/integrations/epss.md +++ b/docs/integrations/epss.md @@ -6,12 +6,6 @@ SecObserve imports the EPSS data and updates all observations with a CVE value w ## Configuration -Per default the task to import the EPSS data and update the observations is scheduled to run every night at 03:00 UTC time. This default can be changed by administrators via the [Settings](../getting_started/configuration.md#admininistration-in-secobserve). The expressions for `BACKGROUND_EPSS_IMPORT_CRONTAB_MINUTE` and `BACKGROUND_EPSS_IMPORT_CRONTAB_HOUR` have to be valid values according to [https://huey.readthedocs.io/en/latest/api.html#crontab](https://huey.readthedocs.io/en/latest/api.html#crontab): +Per default the task to import the EPSS data and update the observations is scheduled to run every night at 03:00 UTC time. This default can be changed by administrators via the **Background tasks** section in the [Settings](../getting_started/configuration.md#admininistration-in-secobserve). Hours are always in UTC time. -* `*` = every distinct value (every minute, every hour) -* `*/n` = run every `n` times, i.e. hours=’*/4’ == 0, 4, 8, 12, 16, 20 -* `n` = run every `n` (minutes 0 - 60, hours 0 - 24) -* `m-n` = run every time m..n -* `m,n` = run on m and n - -Hours are always in UTC time. +![EPSS import cron](../assets/images/screenshot_settings_cron_epss.png){ width="80%" style="display: block; margin: 0 auto" } diff --git a/docs/integrations/exploit_information.md b/docs/integrations/exploit_information.md new file mode 100644 index 000000000..ae45ab531 --- /dev/null +++ b/docs/integrations/exploit_information.md @@ -0,0 +1,22 @@ +# Exploit information + +Several databases and tools collect information about exploits of known vulnerabilities. This information is important to prioritize the remediation of vulnerabilities. The project [cvss-bt](https://github.com/t0sche/cvss-bt) collects information about exploits from various sources which can be imported automatically into SecObserve. The exploit information is linked to the corresponding observations via the CVE Id. + +Exploit information from these sources is made available: + +* [CISA Known Exploited Vulnerabilities Catalog](https://www.cisa.gov/known-exploited-vulnerabilities-catalog) +* [Exploit-DB](https://www.exploit-db.com) +* [Metasploit](https://www.metasploit.com) +* [Nuclei](https://docs.projectdiscovery.io/tools/nuclei/overview) +* [PoC GitHub](https://github.com/nomi-sec/PoC-in-GitHub) +* [VulnCheck KEV](https://vulncheck.com) + +Observations can be filtered by the presence of exploit information and the links to exploit information are displayed in the observation details. + +## Configuration + +In the `Features` section of the [Settings](../getting_started/configuration.md#admininistration-in-secobserve) the import of exploit information can be enabled or disabled for the whole SecObserve instance. Additionally, the maximum age of CVEs can be set. Data for CVEs older than the specified number of days will not be imported. + +![Settings exploit information import](../assets/images/screenshot_settings_feature_exploit_enrichment.png){ width="80%" style="display: block; margin: 0 auto" } + +The import of exploit information runs together with the EPSS import, see [EPSS import / Configuration](./epss.md#configuration). diff --git a/docs/integrations/github_actions_and_templates.md b/docs/integrations/github_actions_and_templates.md index f449d1900..0837aa20c 100644 --- a/docs/integrations/github_actions_and_templates.md +++ b/docs/integrations/github_actions_and_templates.md @@ -4,7 +4,7 @@ Integrating vulnerability scanners in a CI/CD pipeline can be cumbersome. Every All actions and templates run the scanner, import the results into SecObserve and make the report available as an artifact. - The actions and the templates are stored in the repository [https://github.com/MaibornWolff/secobserve_actions_templates](https://github.com/MaibornWolff/secobserve_actions_templates). + The actions and the templates are stored in the repository [https://github.com/SecObserve/secobserve_actions_templates](https://github.com/SecObserve/secobserve_actions_templates). ## Variables @@ -62,10 +62,11 @@ Most of the actions and templates use the same set of variables: | [DrHeader](https://github.com/Santandersecurityresearch/DrHeader) | `actions/DAST/drheader` | `templates/DAST/drheader.yml` | [MIT](https://github.com/Santandersecurityresearch/DrHeader/blob/master/LICENSE) | | [ZAP](https://github.com/zaproxy/zaproxy) | `actions/DAST/zap` | `templates/DAST/zap.yml` | [Apache 2.0](https://github.com/zaproxy/zaproxy/blob/main/LICENSE) | -| Task | GitHub Action | GitLab CI Template | -|---------------------------------------|---------------------------|---------------------------------| +| Task | GitHub Action | GitLab CI Template | +|--------------------------------------|---------------------------|---------------------------------| | Import existing file into SecObserve | `actions/importer` | `templates/importer.yml` | | Check security gate of a product (`exit code 1` if security gate **Failed**, `exit code 0` if security gate **Passed** or **Disabled**) | `actions/check_security_gate` | `templates/check_security_gate.yml` | +| Upload SBOM into SecObserve | `actions/upload_sbom` | `templates/upload_sbom.yml` | All GitHub actions and GitLab CI templates use a pre-built Docker image that contains all scanners and the SecObserve importer. @@ -87,7 +88,7 @@ jobs: uses: actions/checkout@v3 - name: Run Bandit - uses: MaibornWolff/secobserve_actions_templates/actions/SAST/bandit@main + uses: SecObserve/secobserve_actions_templates/actions/SAST/bandit@main with: target: 'backend' report_name: 'backend_bandit.sarif' @@ -96,7 +97,7 @@ jobs: so_product_name: ${{ vars.SO_PRODUCT_NAME }} - name: Run Semgrep - uses: MaibornWolff/secobserve_actions_templates/actions/SAST/semgrep@main + uses: SecObserve/secobserve_actions_templates/actions/SAST/semgrep@main with: target: 'backend' report_name: 'backend_semgrep.json' @@ -106,7 +107,7 @@ jobs: so_product_name: ${{ vars.SO_PRODUCT_NAME }} - name: Run KICS - uses: MaibornWolff/secobserve_actions_templates/actions/SAST/kics@main + uses: SecObserve/secobserve_actions_templates/actions/SAST/kics@main with: target: '.' report_name: 'backend_kics.sarif' @@ -115,7 +116,7 @@ jobs: so_product_name: ${{ vars.SO_PRODUCT_NAME }} - name: Run Checkov - uses: MaibornWolff/secobserve_actions_templates/actions/SAST/checkov@main + uses: SecObserve/secobserve_actions_templates/actions/SAST/checkov@main with: target: '.' report_name: 'backend_checkov.sarif' @@ -124,25 +125,25 @@ jobs: so_product_name: ${{ vars.SO_PRODUCT_NAME }} - name: Run Trivy image - uses: MaibornWolff/secobserve_actions_templates/actions/SCA/trivy_image@main + uses: SecObserve/secobserve_actions_templates/actions/SCA/trivy_image@main with: - target: 'maibornwolff/secobserve-backend:latest' + target: 'ghcr.io/secobserve/secobserve-backend:latest' report_name: 'backend_trivy_image.json' so_api_base_url: ${{ vars.SO_API_BASE_URL }} so_api_token: ${{ secrets.SO_API_TOKEN }} so_product_name: ${{ vars.SO_PRODUCT_NAME }} - name: Run Grype image - uses: MaibornWolff/secobserve_actions_templates/actions/SCA/grype_image@main + uses: SecObserve/secobserve_actions_templates/actions/SCA/grype_image@main with: - target: 'maibornwolff/secobserve-backend:latest' + target: 'ghcr.io/secobserve/secobserve-backend:latest' report_name: 'backend_grype_image.json' so_api_base_url: ${{ vars.SO_API_BASE_URL }} so_api_token: ${{ secrets.SO_API_TOKEN }} so_product_name: ${{ vars.SO_PRODUCT_NAME }} - name: Run Gitleaks - uses: MaibornWolff/secobserve_actions_templates/actions/secrets/gitleaks@main + uses: SecObserve/secobserve_actions_templates/actions/secrets/gitleaks@main with: report_name: 'backend_gitleaks.sarif' so_api_base_url: ${{ vars.SO_API_BASE_URL }} @@ -170,17 +171,17 @@ jobs: ```yaml include: - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/DAST/drheader.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/DAST/cryptolyzer.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SAST/bandit.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SAST/checkov.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SAST/eslint.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SAST/kics.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SAST/semgrep.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SCA/grype_image.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SCA/trivy_filesystem.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/SCA/trivy_image.yml" - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/secrets/gitleaks.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/DAST/drheader.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/DAST/cryptolyzer.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SAST/bandit.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SAST/checkov.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SAST/eslint.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SAST/kics.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SAST/semgrep.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SCA/grype_image.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SCA/trivy_filesystem.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/SCA/trivy_image.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/secrets/gitleaks.yml" grype_image_backend: extends: .grype_image @@ -313,7 +314,7 @@ jobs: uses: actions/checkout@v4 - name: Run vulnerability scanners - uses: MaibornWolff/secobserve_actions_templates/actions/vulnerability_scanner@main + uses: SecObserve/secobserve_actions_templates/actions/vulnerability_scanner@main with: so_configuration: 'so_configuration.yml' SO_API_TOKEN: ${{ secrets.SO_API_TOKEN }} @@ -323,7 +324,7 @@ jobs: ```yaml include: - - "https://raw.githubusercontent.com/MaibornWolff/secobserve_actions_templates/main/templates/vulnerability_scanner.yml" + - "https://raw.githubusercontent.com/SecObserve/secobserve_actions_templates/main/templates/vulnerability_scanner.yml" vulnerability_scans: stage: test @@ -408,6 +409,6 @@ importer: Some real life examples can be found in the SecObserve GitHub repository: -* [so_configuration_code.yml](https://github.com/MaibornWolff/SecObserve/blob/main/so_configuration_code.yml) used in pipeline [check_vulnerabilities.yml](https://github.com/MaibornWolff/SecObserve/blob/main/.github/workflows/check_vulnerabilities.yml) -* [so_configuration_sca_dev.yml](https://github.com/MaibornWolff/SecObserve/blob/main/so_configuration_sca_dev.yml) used in pipeline [build_push_dev.yml](https://github.com/MaibornWolff/SecObserve/blob/main/.github/workflows/build_push_dev.yml#L53-L58) -* [so_configuration_endpoints.yml](https://github.com/MaibornWolff/SecObserve/blob/main/so_configuration_endpoints.yml) used in pipeline [build_push_release.yml](https://github.com/MaibornWolff/SecObserve/blob/main/.github/workflows/build_push_release.yml#L71-L76) +* [so_configuration_code.yml](https://github.com/SecObserve/SecObserve/blob/main/so_configuration_code.yml) +* [so_configuration_sca_dev.yml](https://github.com/SecObserve/SecObserve/blob/main/so_configuration_sca_dev.yml) +* [so_configuration_endpoints.yml](https://github.com/SecObserve/SecObserve/blob/main/so_configuration_endpoints.yml) diff --git a/docs/integrations/issue_trackers.md b/docs/integrations/issue_trackers.md index d4116b428..e063055d3 100644 --- a/docs/integrations/issue_trackers.md +++ b/docs/integrations/issue_trackers.md @@ -23,7 +23,7 @@ The parameters for the issue tracker integration are set in the product: | **Type** | Either **GitHub** or **GitLab** or **Jira** | | **Base URL** | The base URL of the issue tracker. For **GitHub** it is `https://api.github.com`, for a self hosted **GitLab** it will be something like `https://gitlab.example.com`, for **Jira** it is `https:\\{organization_name}.atlassian.net`. | | **API key** | An API key must be created in the issue tracker, having the permissions to create and update issues. | -| **Project id** | The path of the repository in its URL in **GitHub** or **GitLab**, e.g. `MaibornWolff/SecObserve`. For **Jira** it is the key of the project. | +| **Project id** | The path of the repository in its URL in **GitHub** or **GitLab**, e.g. `SecObserve/SecObserve`. For **Jira** it is the key of the project. | | **Labels** | A comma separated list of labels, that will be set for the issue. Additional labels can be set in the issue tracker, they will be preserved when the issue is updated. | | **Minimum severity** | *(optional)* Issues will only be exported for observations with a severity that is higher or the same. | | **Username** | *(only for Jira)* The REST API of Jira needs an authentication with username and API key. | diff --git a/docs/integrations/license_data.md b/docs/integrations/license_data.md index 1ddab5de9..c25940802 100644 --- a/docs/integrations/license_data.md +++ b/docs/integrations/license_data.md @@ -12,6 +12,10 @@ Per default the task to import the SPDX licenses is scheduled to run every night Hour is always in UTC time. -## ScanCode LicenseDB +## ScanCode LicenseDB -The [ScanCode LicenseDB](https://scancode-licensedb.aboutcode.org) is a free and open database of mostly all the software licenses, including a category per license. Administrators can import the data from the ScanCode LicenseDB with a button in the list of License Groups. License groups will be created or updated for each category, containing the respective SPDX licenses. +The [ScanCode LicenseDB](https://scancode-licensedb.aboutcode.org) is a free and open database of mostly all the software licenses, including a category per license. License groups will be created with the initial startup of SecObserve for each category of the ScanCode License, containing the respective SPDX licenses, and updated every night after the update of the SPDX licenses. + +## LicenseLynx + +[LicenseLynx](https://licenselynx.org/) is a project focused on deterministically map unknown or ambiguous license names and their canonical license names. Imported license names which are not SPDX licenses will be mapped with LicenseLynx to find the correct SPDX license if possible. diff --git a/docs/integrations/links.md b/docs/integrations/links.md index 7f5518640..a4057f384 100644 --- a/docs/integrations/links.md +++ b/docs/integrations/links.md @@ -15,19 +15,42 @@ If an observation is a vulnerability with a CVE or GHSA number, the Vulnerabilit ## Components -If an observation has a component with a PURL as its origin and the package type is in +If an observation has a component with a PURL as its origin, a link to further information about the package can be shown. The link can go either to [**open/source/insights**](https://deps.dev) or [**ecosyste.ms**](https://ecosyste.ms/), depending on the user settings. -* Cargo -* Go -* Maven -* npm -* NuGet -* PyPI +![Link to component](../assets/images/screenshot_links_component.png) -the Component PURL in the `Origins` box will be a link to the **open/source/insights** platform. +#### open/source/insights -![Link to component](../assets/images/screenshot_links_component.png) +If the preference in the user settings has been set to `deps.dev` and the package type is in -**open/source/insights** ([https://deps.dev](https://deps.dev)) provides insights into the open source component containing the vulnerability. It helps you to understand the security, licensing, and maintenance aspects of the component. +* `cargo` (Rust) +* `go` (Go) +* `maven` (Java) +* `npm` (JavaScript / TypeScript) +* `nuget` (.NET) +* `pypi` (Python) + +the Component PURL in the `Origins` box will be a link to the **open/source/insights** platform, which provides insights into the open source component containing the vulnerability. It helps you to understand the security, licensing, and maintenance aspects of the component. ![Link to open/source/insights](../assets/images/screenshot_links_osi.png) + +#### ecosyste.ms + +If the preference in the user settings has been set to `ecosyste.ms` and the package type is in + +* `cargo` (Rust) +* `cocoapods` (iOS / macOS) +* `composer` (PHP) +* `cpan` (Perl) +* `cran` (R) +* `gem` (Ruby) +* `golang` (Go) +* `hackage` (Haskell) +* `maven` (Java) +* `npm` (JavaScript / TypeScript) +* `nuget` (.NET) +* `pypi` (Python) + +the Component PURL in the `Origins` box will be a link to the **ecosyste.ms** platform. + +![Link to ecosyste.ms](../assets/images/screenshot_links_ecosystems.png) diff --git a/docs/integrations/oidc_authentication.md b/docs/integrations/oidc_authentication.md index 816ec4b11..60101b65d 100644 --- a/docs/integrations/oidc_authentication.md +++ b/docs/integrations/oidc_authentication.md @@ -32,6 +32,7 @@ Frontend | `OIDC_CLIENT_ID` | `CLIENT_ID` | | `OIDC_REDIRECT_URI` | `https://secobserve.example.com` | | `OIDC_POST_LOGOUT_REDIRECT_URI` | `https://secobserve.example.com` | +| `OIDC_PROMPT` | [no value] | ## Azure Active Directory @@ -61,6 +62,7 @@ Frontend | `OIDC_CLIENT_ID` | `CLIENT_ID` | | `OIDC_REDIRECT_URI` | `https://secobserve.example.com` | | `OIDC_POST_LOGOUT_REDIRECT_URI` | `https://secobserve.example.com` | +| `OIDC_PROMPT` | [no value] | ## Customize the login dialog @@ -74,3 +76,10 @@ Then the login dialog will only show the `Enterprise sign in` button: ![Enterprise sign in](../assets/images/screenshot_login_enterprise.png) If the user and password is needed to login, e.g. for a local admin user, `#force_user_login` can be added to the URL (like `https://secobserve.example.com/#/login#force_user_login`) to force the user and password fields to be shown. + + +## Clock skew betwenn OIDC server and SecObserve backend + +A time deviation between the OIDC server and the SecObserve backend cannot always be avoided. To prevent the verification of claims issued at, not before and expiry from failing because of it, the parameter `OIDC clock skew` can be set in the settings. + +![OIDC clock skew](../assets/images/screenshot_oidc_clock_skew.png) diff --git a/docs/integrations/osv_scan.md b/docs/integrations/osv_scan.md new file mode 100644 index 000000000..a132454a8 --- /dev/null +++ b/docs/integrations/osv_scan.md @@ -0,0 +1,70 @@ +# Vulnerability scanning from OSV database + +The components of a product can be scanned for vulnerabilities using the OSV database. The OSV database is a database of open-source vulnerabilities, maintained by Google and is available at [https://osv.dev/](https://osv.dev/). + +There are 2 preconditions for a product to be met before using the OSV database for vulnerability scanning: + +* License/Component information has to be imported, only then all components are available for scanning. +* The flag `OSV scanning enabled` in the product settings has to be activated. + +![OSV scanning settings](../assets/images/screenshot_osv_scanning_settings_1.png) + +OSV delivers vulnerabilities for a component, without regard to the version of the component, plus a list of affected versions. Currently these package managers are supported directly to get the affected versions, by using information from the PURL of the component: + +* **bitnami:** Bitnami +* **conan:** ConanCenter +* **cran:** CRAN +* **cargo:** crates.io +* **golang:** Go +* **hackage:** Hackage +* **hex:** Hex +* **maven:** Maven +* **npm:** npm +* **nuget:** NuGet +* **pub:** Pub +* **pypi:** PyPI +* **gem:** RubyGems +* **swift:** SwiftURL + +Some Linux distribution and version can be determined automatically by information from the PURL of the component: + +* Alpine +* Chainguard +* Debian +* Ubuntu (only the free version, not Pro) +* Wolfi + +To correctly identify other Linux packages, the Linux distribution and version has to be set in the product or branch settings according to [OSV affected package specification](https://ossf.github.io/osv-schema/#affectedpackage-field). If it is not set for a branch, the product settings are used. Examples are: + +| Linux distribution | Version | +|--------------------|-----------| +| Red Hat | rhel_aus:8.4::appstream | +| Ubuntu | Pro:24.04:LTS | + + +## Manual scan + +If both preconditions are met, the OSV scan can be started manually from the `Import` menu. If a branch is selected, the scan will be performed on the components of the branch. If no branch is selected, the scan will be performed on the components of all branches and components without a branch. + +![OSV scanning import](../assets/images/screenshot_osv_scanning_import.png){ width="40%" style="display: block; margin: 0 auto" } + + +## Automatic scan + +OSV scanning can be configured to run automatically at a specific time. There is a general setting and a setting per API configuration. + +#### General setting + +In the `Features` section of the [Settings](../getting_started/configuration.md#admininistration-in-secobserve) the automatic OSV scanning can be enabled or disabled for the whole SecObserve instance. + +![Settings automatic OSV scanning](../assets/images/screenshot_settings_automatic_osv_scanning.png){ width="65%" style="display: block; margin: 0 auto" } + +The hour (in UTC time) and minute, when the automatic [API imports](./api_import.md/#automatic-import) and OSV scanning will run, can be set in the `Background tasks` section. A restart of the SecObserve instance is required to apply the changes. + +![Settings automatic OSV scanning](../assets/images/screenshot_settings_background_api_osv.png){ width="70%" style="display: block; margin: 0 auto" } + +#### Setting per product + +Only products that have `Automatic OSV scanning enabled` turned on will be imported automatically. + +![OSV scanning settings](../assets/images/screenshot_osv_scanning_settings_2.png) diff --git a/docs/integrations/overview.md b/docs/integrations/overview.md index 163e00783..e104e7908 100644 --- a/docs/integrations/overview.md +++ b/docs/integrations/overview.md @@ -1,3 +1,114 @@ # Overview ![Integrations](../assets/images/secobserve_integrations.svg) + +
+ +- :fontawesome-brands-openid:{ .lg .middle } __Authentication__ + + --- + + All `OpenID Connect` providers are supported for authentication with an external user directory. + + [:octicons-arrow-right-24: OpenID Connect](oidc_authentication.md) + +- :material-puzzle:{ .lg .middle } __Components__ + + --- + + Components can be uploaded from CycloneDX and SPDX SBOMs for vulnerability scanning and license management. + + [:octicons-arrow-right-24: Upload SBOM](../usage/upload_sbom.md) + +- :material-magnify:{ .lg .middle } __Vulnerabilities__ + + --- + + Vulnerability data can be imported from the results of several vulnerability scanners. Additionally components + can be scanned by SecObserve against the OSV database. + + [:octicons-arrow-right-24: Supported scanners](supported_scanners.md) + + [:octicons-arrow-right-24: OSV scan](osv_scan.md) + +- :material-skull-scan:{ .lg .middle } __EPSS Scores, Exploits__ + + --- + + Observations with a CVE Id are enriched with EPSS scores and information about exploits. The necessary data is + imported automatically every night. + + [:octicons-arrow-right-24: EPSS scores](epss.md) + + [:octicons-arrow-right-24: Exploit information](exploit_information.md) + +- :material-license:{ .lg .middle } __Licences, License Groups__ + + --- + + The list of SPDX licenses is updated nightly. Additionally superusers can manually import license groups generated + from the ScanCode LicenseDB. + + [:octicons-arrow-right-24: License data](license_data.md) + +- :material-security:{ .lg .middle } __VEX Import/Export__ + + --- + + Vulnerability Exploitability eXchange (VEX) documents can be imported and exported in CSAF, CycloneDX and OpenVEX + format. + + [:octicons-arrow-right-24: VEX documents](vex.md) + +- :material-download:{ .lg .middle } __Observation and License Export__ + + --- + + Observations and licenses of a product or product group can be exported to CSV or MS Excel files. + + [:octicons-arrow-right-24: Export of observations](observations_export.md) + +- :material-exclamation-thick:{ .lg .middle } __Issues__ + + --- + + SecObserve supports automatic creation of issues in GitHub, GitLab and Jira (Cloud). + + [:octicons-arrow-right-24: Issue trackers](issue_trackers.md) + +- :material-open-in-new:{ .lg .middle } __Code links__ + + --- + + For observations originating from a source file, a link can be generated to view it in the + source code repository. + + [:octicons-arrow-right-24: Source code repositories](source_code_repositories.md) + +- :material-bell-ring:{ .lg .middle } __Notifications__ + + --- + + SecObserve can send notifications via email, MS Teams or Slack when a security gate changes + or an exception occurs. + + [:octicons-arrow-right-24: Notifications](notifications.md) + +- :material-open-in-new:{ .lg .middle } __Information links__ + + --- + + Observations and components show links to get further information from external sources. + + [:octicons-arrow-right-24: Links to additional information](links.md) + +- :material-cog:{ .lg .middle } __REST API__ + + --- + + SecObserve is build with an API first approach, every functionality needed to use SecObserve + is covered by the REST API. + + [:octicons-arrow-right-24: REST API](rest_api.md) + +
diff --git a/docs/integrations/source_code_repositories.md b/docs/integrations/source_code_repositories.md index eaf57cec9..567e29f2e 100644 --- a/docs/integrations/source_code_repositories.md +++ b/docs/integrations/source_code_repositories.md @@ -6,9 +6,9 @@ Observations can have a source file plus start and end lines as an origin. Durin When creating or editing a product, the field `Repository prefix` can be set. This needs to be the prefix of the URL to show a file in the repository. -* If the observations of the product have the branch set, then for **GitLab** it is something like `https://gitlab.maibornwolff.de/secobserve/secobserve/-/blob`, for **GitHub** it looks like `https://github.com/MaibornWolff/codecharta/blob`. -* If the observations don't have the branch set, then a branch need to be at the end of the repository prefix, e.g. `https://gitlab.maibornwolff.de/secobserve/secobserve/-/blob/dev` or `https://github.com/MaibornWolff/codecharta/blob/dev`. -* **Azure DevOps** does not need the branch in the repository prefix, an example is `https://dev.azure.com/maibornwolff/SecObserve/_git/SecObserve_Frontend`. If the observations have a branch set, then this branch will be used in the URL, otherwise the default branch of the repository will be used. +* If the observations of the product have the branch set, then for **GitLab** it is something like `https://gitlab.secobserve.de/secobserve/secobserve/-/blob`, for **GitHub** it looks like `https://github.com/SecObserve/codecharta/blob`. +* If the observations don't have the branch set, then a branch need to be at the end of the repository prefix, e.g. `https://gitlab.secobserve.de/secobserve/secobserve/-/blob/dev` or `https://github.com/SecObserve/codecharta/blob/dev`. +* **Azure DevOps** does not need the branch in the repository prefix, an example is `https://dev.azure.com/secobserve/SecObserve/_git/SecObserve_Frontend`. If the observations have a branch set, then this branch will be used in the URL, otherwise the default branch of the repository will be used. ![Repository integration](../assets/images/screenshot_repository_1.png) diff --git a/docs/integrations/supported_scanners.md b/docs/integrations/supported_scanners.md index 8065bf72d..65e9cd1e1 100644 --- a/docs/integrations/supported_scanners.md +++ b/docs/integrations/supported_scanners.md @@ -16,9 +16,7 @@ There are different types of vulnerability scans: While every vulnerability scanner writes its own format, there are 2 standardized formats that are implemented by several scanners: -* **[CycloneDX](https://cyclonedx.org/)**: CycloneDX is a Software Bill of Material (SBOM), that contains information about components of a system and their vulnerabilities. It is typically used by SCA scanners. - - If the SBOM contains licenses of components, they are imported into SecObserve as well for [license management](../usage/license_management.md/#managing-licenses-in-products). +* **[CycloneDX](https://cyclonedx.org)**: CycloneDX is a Software Bill of Material (SBOM), that contains information about components of a system and their vulnerabilities. It is typically used by SCA scanners. A CycloneDX file can either be imported to get vulnerabilities ([Import vulnerabilities](../usage/import_observations.md)) or to get components with their licenses ([Upload SBOMs](../usage/upload_sbom.md)). * **[SARIF](https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=sarif)**: The Static Analysis Results Interchange Format is an OASIS standard which is implemented by a lot of SAST scanners. diff --git a/docs/integrations/vex.md b/docs/integrations/vex.md index b1ce3b779..6dc6be2ca 100644 --- a/docs/integrations/vex.md +++ b/docs/integrations/vex.md @@ -1,51 +1,64 @@ # VEX documents -A VEX (Vulnerability Exploitability eXchange) document is a form of a security advisory that indicates whether a product or products are affected by a known vulnerability or vulnerabilities. SecObserve supports the export of VEX documents in two formats: +A VEX (Vulnerability Exploitability eXchange) document is a form of a security advisory that indicates whether a product or products are affected by a known vulnerability or vulnerabilities. SecObserve supports the export and import of VEX documents in three formats: -* The [Common Security Advisory Framework](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) (CSAF) format is an [OASIS](https://www.oasis-open.org/) standard +* The [Common Security Advisory Framework](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) (CSAF) format is an [OASIS](https://www.oasis-open.org/) standard +* [CycloneDX](https://cyclonedx.org/capabilities/vex/) is an international standard for Bill of Materials (ECMA-424). * [OpenVEX](https://github.com/openvex/spec/blob/main/OPENVEX-SPEC.md) is a community-driven format, maintained by an [OpenSSF](https://openssf.org/) special interest group ## Feature flag The VEX feature is is disabled by default because not all installations will use it. It can be enabled by setting the feature flag `FEATURE_VEX` in the [Settings](../getting_started/configuration.md#admininistration-in-secobserve): -![VEX feature flag](../assets/images/screenshot_vex_feature_flag.png) +![VEX feature flag](../assets/images/screenshot_vex_feature_flag.png){ width="80%" style="display: block; margin: 0 auto" } Enabling the feature flag will make the VEX functionality available in the main navigation bar and enable the "VEX justification" field in several places. -## Create a new VEX document +## Export a new VEX document -To create a VEX document, the user has to define a set of attributes in a dialog. Some of them are different for either a CSAF or OpenVEX document. Other attributes are common to both formats: +To export a VEX document, the user has to define a set of attributes in a dialog. Some of them are different per format,other attributes are common to all formats: | Attribute | Optionality | Description | |-----------------------|:-----------:|-------------| | `Product` | optional | If a product is selected, only vulnerabilities for that product will be included in the VEX document. | | `Vulnerabilities` | optional | Zero or more names of vulnerabilities to be included in the VEX document, e.g. `CVE-2021-44228` | | `Branches / Versions` | optional | If a product is selected, the VEX document can be limited to cover only the selected branches / versions of this product. | -| `ID prefix` | mandatory | The `ID prefix` is part of of the unique identifier of the VEX document. The unique id has the format `prefix_year_counter`, where the counter is increased by 1 for every new document per prefix and year. This unique id will stored in a designated attribute in the VEX document and used for the filename of the VEX document. | +| `ID prefix` | mandatory | (Only CSAF and OpenVEX) The `ID prefix` is part of of the unique identifier of the VEX document. The unique id has the format `prefix_year_counter`, where the counter is increased by 1 for every new document per prefix and year. This unique id will stored in a designated attribute in the VEX document and used for the filename of the VEX document. | Either a product or at least one vulnerability has to be selected. -After pushing the **Create** button in the dialog, the VEX document with the version 1 will be created and ready for download. Additionally a database entry with all attributes is created, which can be selected later to create an updated version of the VEX document +After pushing the **Create** button in the dialog, the VEX document with the version 1 will be created and ready for download. Additionally a database entry with all attributes is created, which can be selected later to export an updated version of the VEX document. +**Exporting CycloneDX VEX documents** -## Update a VEX document +CycloneDX VEX documents can only be exported as dedicated BOMs. This means the components with vulnerabilities are not listed in the document but are referenced with [BOM-links](https://cyclonedx.org/capabilities/bomlink/). Therefore all vulnerabilities to be exported need to have BOM-link, which means either they have been imported via a CycloneDX document or they have been found by the OSV scanner after uploading components with a CycloneDX SBOM (see [SBOM workflow](../getting_started/about.md#consuming-sboms-for-license-and-vulnerability-management)). + +**Mapping of justifications** + +Justifications have different enumerations for CSAF/OpenVEX compared to CycloneDX. When entering a justification, e.g. for an observation assessment, a parameter in the settings (see [Feature flag](#feature-flag)), decides if the user gets the CSAF/OpenVEX list or the CycloneDX list of justifications. E.g. if an organisation decides to publish CycloneDX VEX documents, this attribute should be set to `CycloneDX`. Then the user can only enter justifications, that can be written directly to the VEX document. + +If a VEX document is exported in a format that does not correlate with this parameter, the justifications are mapped to corresponding values of the exported format. + +## Export an updated VEX document After selecting the entry of either a CSAF or OpenVEX document from the respective list, a form shows the details of its attributes and a button to update a document. Some of the attributes can be changed for a new version of the document. If there have been no changes to the included vulnerabilities, no new document will be created. Otherwise a new version of the document will be created and ready for download. ## Import VEX documents -VEX documents can be imported in CSAF or OpenVEX format. After importing the file, the document will be parsed and the VEX statements will be applied to the referenced observations. A user needs to be `superuser` to import VEX documents. +VEX documents can be imported in CSAF, OpenVEX and CycloneDX (integrated and dedicated) format. After importing the file, the document will be parsed and the VEX statements will be applied to the referenced observations. A user needs to be `superuser` to import VEX documents. When observations are imported, the VEX statements will be applied to the referenced observations as well. -**How are the referened observations determined?** +**How are the referenced observations determined?** + +For **CSAF**, **OpenVEX** and **CycloneDX integrated** VEX documents, the reference is determined by PURLs. -First, the relevant products are determined by the product PURL. The PURL of the product or the PURL of a branch must match the product PURL in the VEX statements. +* First, the relevant products are determined by the product PURL. The PURL of the product or the PURL of a branch must match the product PURL in the VEX statements. +* Second, the relevant observations are determined by their Vulnerability ID and optionally the component PURL. The Vulnerability ID of the observation must be the same as the Vulnerability ID of the VEX statements. If the VEX statement contains a component PURL, this must match the vulnerability PURL in the component PURL of the observation. -Second, the relevant observations are determined by their Vulnerability ID and optionally the component PURL. The Vulnerability ID of the observation must be the same as the Vulnerability ID of the VEX statements. If the VEX statement contains a component PURL, this must match the vulnerability PURL in the component PURL of the observation. +For **CycloneDX dedicated** VEX documents, the reference is determined by a [BOM-link](https://cyclonedx.org/capabilities/bomlink/). These documents contain only vulnerability and VEX information with references to components in a separate SBOM. Therefore it is a precondition to have imported the corresponding SBOM before. Then the observations are determined by the Vulnerability ID and the BOM-link. **When do PURLs match?** diff --git a/docs/mkdocs_plugins/__init__.py b/docs/mkdocs_plugins/__init__.py new file mode 100644 index 000000000..d1571f349 --- /dev/null +++ b/docs/mkdocs_plugins/__init__.py @@ -0,0 +1 @@ +# SPDX-Identifier: BSD-3-Clause diff --git a/docs/mkdocs_plugins/include.py b/docs/mkdocs_plugins/include.py new file mode 100644 index 000000000..0d2fa333c --- /dev/null +++ b/docs/mkdocs_plugins/include.py @@ -0,0 +1,63 @@ +# SPDX-Identifier: BSD-3-Clause +import logging + +from markdown import Markdown +from pymdownx.superfences import SuperFencesBlockPreprocessor, highlight_validator + +log = logging.getLogger("mkdocs.include_extension") + + +def include_file_format(source: str, _language: str, class_name: str, options: dict, md: Markdown, **kwargs): + """ + Custom fence formatter that includes the contents of an external file + into a fenced code block in a Markdown document. + + This is designed to work with `pymdownx.superfences` by defining a + custom fence (e.g., `include`) and enabling file inclusion using + a `path` option. + + Usage: + Define a custom fence in mkdocs.yml: + + markdown_extensions: + - pymdownx.superfences: + custom_fences: + - name: include + class: source + format: !!python/name:include.include_file_format + + Then use in Markdown like: + + ```include {language=python} + snippets/example.py + ``` + + This will read `snippets/example.py` and render it as a highlighted + code block. + """ + + fenced_preprocessor: SuperFencesBlockPreprocessor = md.preprocessors["fenced_code_block"] + file_path = source.strip() + log.debug(f"Processing file inclusion: {file_path}") + if file_path: + try: + with open(file_path, 'r', encoding='utf-8') as f: + source = f.read() + except Exception as e: + source = f"Error including file: {e}" + log.error(f"Error reading file: {e}") + language = options.pop('language', '') + classes: list[str] = kwargs.setdefault('classes', []) + classes.append(class_name) + return fenced_preprocessor.highlight(source, language, options, md, **kwargs) + + +def include_file_validate(language, inputs, options, attrs, md): + """ + Validation callback for the `include` custom fenced code block. + + This function is used with `pymdownx.superfences` to process and validate + options for the `include` fence before the formatter is called. + """ + options['language'] = inputs.pop('language', '') + return highlight_validator(language, inputs, options, attrs, md) diff --git a/docs/specification/secobserve_license_policy_schema.json b/docs/specification/secobserve_license_policy_schema.json new file mode 100644 index 000000000..23cf912e4 --- /dev/null +++ b/docs/specification/secobserve_license_policy_schema.json @@ -0,0 +1,137 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/SecObserve/SecObserve/secobserve_license_policy_schema_1.0.0.json", + "title": "SecObserve License Policy Schema", + "description": "The SecObserve License Policy Schema specifies the format of license policies exported in the JSON format.", + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "Description of the license policy." + }, + "ignore_component_types": { + "type": "array", + "uniqueItems": true, + "minItems": 1, + "description": "List of PURL types that should be ignored in the license policy.", + "items": { + "type": "string", + "description": "PURL type that should be ignored in the license policy.", + "enum": [ + "alpm", + "apk", + "bitbucket", + "bitnami", + "cargo", + "cocoapods", + "composer", + "conan", + "conda", + "cpan", + "cran", + "deb", + "docker", + "gem", + "generic", + "github", + "golang", + "hackage", + "hex", + "huggingface", + "luarocks", + "maven", + "mlflow", + "npm", + "nuget", + "oci", + "pub", + "pypi", + "rpm", + "qpkg", + "swid", + "swift" + ] + } + }, + "items": { + "type": "array", + "uniqueItems": true, + "minItems": 1, + "description": "", + "items": { + "properties": { + "comment": { + "type": "string", + "description": "Comment, why the license is included in the policy with the defined evaluation result." + }, + "evaluation_result": { + "type": "string", + "description": "The evaluation result for the license.", + "enum": [ + "Allowed", + "Forbidden", + "Ignored", + "Review required", + "Unknown" + ] + }, + "from_parent": { + "type": "boolean", + "description": "Indicates if the evaluation result was inherited from a parent policy." + }, + "license_group": { + "type": "string", + "description": "Name of the license group containing the license." + }, + "license_expression": { + "type": "string", + "description": "License expression according to the SPDX specification." + }, + "non_spdx_license": { + "type": "string", + "description": "Name of a license that is not part of the SPDX specification." + }, + "spdx_license": { + "type": "string", + "description": "SPDX identifier of the license." + } + }, + "required": [ + "evaluation_result", + "from_parent" + ], + "oneOf": [ + { + "required": [ + "license_expression" + ] + }, + { + "required": [ + "non_spdx_license" + ] + }, + { + "required": [ + "spdx_license" + ] + } + ], + "additionalProperties": false + } + }, + "name": { + "type": "string", + "description": "Name of the license policy." + }, + "parent": { + "type": "string", + "description": "Name of the parent license policy." + } + }, + "required": [ + "items", + "name" + ], + "additionalProperties": false +} diff --git a/docs/usage/branches.md b/docs/usage/branches.md index 74514e89c..8da9b4e8b 100644 --- a/docs/usage/branches.md +++ b/docs/usage/branches.md @@ -48,15 +48,9 @@ The parameters are set globally in the [Settings](../getting_started/configurati | **BRANCH_HOUSEKEEPING_KEEP_INACTIVE_DAYS** | Days before incative branches / versions and their observations are deleted | | **BRANCH_HOUSEKEEPING_EXEMPT_BRANCHES** | Regular expression which branches / versions to exempt from deletion | -Per default the task to delete inactive branches / version including their observations is scheduled to run every night at 02:00 UTC time. This default can be changed by administrators via the [Settings](../getting_started/configuration.md#admininistration-in-secobserve). The expressions for `BRANCH_HOUSEKEEPING_CRONTAB_MINUTE` and `BRANCH_HOUSEKEEPING_CRONTAB_HOUR` have to be valid values according to [https://huey.readthedocs.io/en/latest/api.html#crontab](https://huey.readthedocs.io/en/latest/api.html#crontab): +Per default the task to delete inactive branches / version including their observations is scheduled to run every night at 02:00 UTC time. This default can be changed by administrators via the **Background tasks** section in the [Settings](../getting_started/configuration.md#admininistration-in-secobserve). Hours are always in UTC time. -* `*` = every distinct value (every minute, every hour) -* `*/n` = run every `n` times, i.e. hours=’*/4’ == 0, 4, 8, 12, 16, 20 -* `n` = run every `n` (minutes 0 - 60, hours 0 - 24) -* `m-n` = run every time m..n -* `m,n` = run on m and n - -Hours are always in UTC time. +![Settings housekeeping](../assets/images/settings_cron_housekeeping.png){ width="80%" style="display: block; margin: 0 auto" } #### Product specific settings diff --git a/docs/usage/import_observations.md b/docs/usage/import_observations.md index 54129942e..607661004 100644 --- a/docs/usage/import_observations.md +++ b/docs/usage/import_observations.md @@ -14,20 +14,22 @@ Alternatively observations can be imported via the user interface. When showing ![Upload of files](../assets/images/screenshot_import_2.png){ width="50%" style="display: block; margin: 0 auto" } -A file needs to be selected. The parser to interpret the content of the file will be detected automatically. Optional are attributes for the branch / version, the origin as service, docker image, endpoint URL and Kubernetes cluster. If `Supress licenses` is checked, no licenses will be imported, only observations. +A file needs to be selected. The parser to interpret the content of the file will be detected automatically. Optional are attributes for the branch / version, service, docker image, endpoint URL and Kubernetes cluster. + +When uploading a CycloneDX file here, only the vulnerabilities will be imported. To import all components with their licenses, [Upload SBOM](../usage/upload_sbom.md) has to be used. #### API import ![API import](../assets/images/screenshot_import_3.png){ width="50%" style="display: block; margin: 0 auto" } -Before importing observations from an API, an API configuration needs to be created for the product. This API configuration specifies how to access the API (URL, API key,Query, Basic Authentication, SSL Verify, ...). Optional for the import are attributes for the branch / version, the origin as service, docker image, endpoint URL and Kubernetes cluster. +Before importing observations from an API, an [API configuration](../integrations/api_import.md#api-configuration) needs to be created for the product. This API configuration specifies how to access the API (URL, API key,Query, Basic Authentication, SSL Verify, ...). Optional for the import are attributes for the branch / version, service, docker image, endpoint URL and Kubernetes cluster. ## Import algorithm The import algorithm has to decide, if an observation already exists and needs to be updated or it is new and needs to be created. But how does the import algorithm identifies an observation to make this decision? Two terms help to understand how that works: * **Identity hash**: The `identity hash` is a SHA256 hash code of the concatenation of the observation's title and all its origins ^[1]^. Two observations with the same `identity hash` are defined as identical. -* **Vulnerability check**: An import for one product, one branch / version and one file name resp. one API configuration is a so-called vulnerability check. +* **Vulnerability check**: An import for one product, one branch / version, one service and one file name resp. one API configuration is a so-called vulnerability check. A flowchart visualizes the import algorithm: diff --git a/docs/usage/license_management.md b/docs/usage/license_management.md index a3819661c..6abfeb4ad 100644 --- a/docs/usage/license_management.md +++ b/docs/usage/license_management.md @@ -17,9 +17,7 @@ If license management is deactivated: #### Importing components with licenses -When importing data from CycloneDX or SPDX SBOMs, the licenses of the components are imported as well, if they are available in the SBOM and the parameter `SO_SUPPRESS_LICENSES` is not set or set to `false` ^1)^. The licenses are shown in the `License` tab of the Product view. - -**^1)^** `SO_SUPPRESS_LICENSES` will be set to `true` by the *Grype*, *Trivy Filesystem* and *Trivy Image* GitHub actions / GitLab templates if not set otherwise, to not accidently import licenses. +When uploading data from CycloneDX or SPDX SBOMs, the licenses of the components are imported as well, if they are available in the SBOM. The components and licenses are shown in the `License` tab of the Product view. ![License component list](../assets/images/screenshot_license_component_list.png) @@ -28,6 +26,30 @@ After clicking on an entry, the details of the component and its license are sho ![License component show](../assets/images/screenshot_license_component_show.png) +#### Declared and concluded licenses + +> Declared licenses and concluded licenses represent two different stages in the licensing process within software development. + +> * **Declared licenses** refer to the initial intention of the software authors regarding the licensing terms under which their code is released. +> * **Concluded licenses** on the other hand, are the result of a comprehensive analysis of the project's codebase to identify and confirm the actual licenses of the components used, which may differ from the initially declared licenses. + +> While declared licenses provide an upfront indication of the licensing intentions, concluded licenses offer a more thorough understanding of the actual licensing within a project, facilitating proper compliance and risk management. + +*(Copied from the [CycloneDX specification](https://cyclonedx.org/docs/1.6/json/#components_items_licenses_oneOf_i0_items_license_acknowledgement))* + +Both types of acknowledgement are imported from an SBOM and shown in the UI. If a CycloneDX SBOM doesn't include a license acknowledgement, it will be treated as `Declared`. + +Additionally, a user can manually add a concluded license, with the button `ADD / EDIT CONCLUDED LICENSE`, for example when there was no license or there was a wrong license in the SBOM. + +There is a priority for the license acknowledgement. + +1. If a **manual concluded license** has been set, this is used for the evaluation of the license and is shown in lists. +2. The **imported concluded license** is used, if it is available and no manual concluded license is set. +3. Otherwise the **imported declared license** is used for evaluation and lists if it was in the SBOM. + +Manually set concluded licenses are stored in a cache and will be set again for a component, when a new SBOM is imported for the same product or another product in the same product group. + + #### Evaluation of licenses A License Policy for the Product can be set, when editing the product settings. @@ -44,6 +66,8 @@ If no License Policy is set, all licenses are evaluated as `Unknown`. If a Licen License expressions are evaluated by their included licenses, if the operators are all either `AND` or `OR`. If other operators are used, e.g. `WITH`, the expression is evaluated as `Unknown`, if there is no explicit rule for this license expression. +If multiple licenses have been found for a component, they are evaluated like an `AND` expression. If for example one license is `Allowed` and the other one is `Forbidden`, the component is evaluated as `Forbidden`. + A good strategy is to start with an existing License Policy and when needed make a copy of it and adjust the rules to the needs of the Product. ## Managing License Policies @@ -54,13 +78,13 @@ The list of `License Policies` can be found in the `Licenses` sub-menu under `Ad A `License Policy` can have another license policy as a `Parent`. If a license policy has a parent, the rules of the parent are also valid for the child policy, but existing rules of the parent can be overriden and new rules can be added. A license policy which is a parent cannot have a parent itself. -Within the `License Policy` itself a comma-separated list of component (e.g. `apk` or `deb`) types can be defined, which shall be ignored in the license evaluation. This can be useful for operating system packages in a Docker container, which are not relevant for the license management. +Within the `License Policy` itself a comma-separated list of component types (e.g. `apk` or `deb`) can be defined, which shall be ignored in the license evaluation. This can be useful for operating system packages in a Docker container, which are not relevant for the license management. The attribute `Public` defines, if the License Policy is visible for all users or only for the members of the policy. **Actions** -* The `Export` button opens a sub-menu to exports the License Policy either as a JSON or a YAML file. +* The `Export` button opens a sub-menu to exports the License Policy either as a SecObserve specific JSON or YAML file or in a format that can be used with [sbom-utility](https://github.com/CycloneDX/sbom-utility?tab=readme-ov-file#license-list-subcommand), using the `--config-license` parameter. The SecObserve specific output format is specified as a [JSON schema](../specification/secobserve_license_policy_schema.json). * With the `Apply` button the rules of the License Policy are applied to all products, that have this License Policy set. * The `Copy` button creates a new License Policy with the same rules, which can be adjusted for a specific Product. @@ -77,6 +101,7 @@ A `License Policy` has a list of items, which are the rules of the policy. It ca ![License policy item](../assets/images/screenshot_license_policy_item.png){ width="60%" style="display: block; margin: 0 auto" } + Additionally a `License Policy` has a list of user members and a list of authorization group members, which define who has access to a license policy, either read-only or as a manager. To define read-only members is not necessary, if the policy is defined as `Public`. Additionally, users can view all license policies that are assigned to a product, if they have access to the product. ![License policy member](../assets/images/screenshot_license_policy_member.png){ width="60%" style="display: block; margin: 0 auto" } @@ -84,9 +109,9 @@ Additionally a `License Policy` has a list of user members and a list of authori ## Managing License Groups -A `License Group` is a collection of licenses with similar license conditions. There is a predefined list of license groups, taken from the classification of the [Blue Oak Council](https://blueoakcouncil.org/). Administrators can import license group from the ScanCode LicenseDB, see [License data import](../integrations/license_data.md#scancode-licensedb). +A `License Group` is a collection of licenses with similar license conditions. There is a predefined list of license groups, imported from the ScanCode LicenseDB, see [License data import](../integrations/license_data.md#scancode-licensedb). -As with `License Policies`, a `License Group` +As for `License Policies`, a `License Group` * can be found in the `Licenses` sub-menu under `Administration`, * can be copied if adjustments are needed for a specific Product, diff --git a/docs/usage/risk_acceptance_expiry.md b/docs/usage/risk_acceptance_expiry.md index 2fe699655..62b89ce86 100644 --- a/docs/usage/risk_acceptance_expiry.md +++ b/docs/usage/risk_acceptance_expiry.md @@ -20,5 +20,4 @@ The number of `Risk acceptance expiry (days)` will be used to set a default for --- -The [Settings](../getting_started/configuration.md#admininistration-in-secobserve) have to additional parameters, `Risk acceptance expiry crontab (hours/UTC)` and `Risk acceptance expiry crontab (minutes)` to control, when the expiry date for the Observations is checked. The backend server will need to be restarted after changing these parameters for the changes to take effect. - \ No newline at end of file +Per default the task to check the risk acceptance expiry is scheduled to run every night at 01:00 UTC time. This default can be changed by administrators via the **Background tasks** section in the [Settings](../getting_started/configuration.md#admininistration-in-secobserve). Hours are always in UTC time. diff --git a/docs/usage/upload_sbom.md b/docs/usage/upload_sbom.md new file mode 100644 index 000000000..00e37907d --- /dev/null +++ b/docs/usage/upload_sbom.md @@ -0,0 +1,29 @@ +# Upload SBOMs + +Uploading an SBOM for a Product or Branch / Version imports all components with their licenses and dependencies. This is a precondition to use [License management](../usage/license_management.md) or [OSV scanning](../integrations/osv_scan.md). When uploading a CycloneDX file, no vulnerabilities will be imported, just components with their licenses. To import vulnerabilities from a CycloneDX file, [Import observations](../usage/import_observations.md) has to be used. + +Currently [CycloneDX](https://cyclonedx.org/) and [SPDX](https://spdx.dev) files are supported, both in JSON format. + + +## Import from CI pipelines via the API + +There is a [GitHub action and GitLab CI template](../integrations/github_actions_and_templates.md) available to import an SBOM shortly. + +Alternatively, the [REST API](../integrations/rest_api.md) can be used to import an SBOM, with the endpoints `/api/import/file_upload_sbom_by_id/` and `/api/import/file_upload_sbom_by_name/`. + + +## Import from the frontend + +Additionally observations can be imported via the user interface. When showing a product, there is a respective button in the **Import** menu: + +![Start import](../assets/images/screenshot_import_sbom_1.png) + +![Upload of files](../assets/images/screenshot_import_sbom_2.png){ width="50%" style="display: block; margin: 0 auto" } + +A file needs to be selected. The parser to interpret the content of the file will be detected automatically. The Branch / Version is optional. + +When uploading a CycloneDX file here, no vulnerabilities will be imported, just components with their licenses. To import vulnerabilities from a CycloneDX file, [Import observations](../usage/import_observations.md) has to be used. + +## Import algorithm + +The import algorithm to decide, if a component with its license already exists it is new is similar as for [importing observations](../usage/import_observations.md#import-algorithm). diff --git a/end_to_end_tests/package-lock.json b/end_to_end_tests/package-lock.json index cf2a05494..91d49d8a6 100644 --- a/end_to_end_tests/package-lock.json +++ b/end_to_end_tests/package-lock.json @@ -1,25 +1,25 @@ { "name": "end_to_end_tests", - "version": "1.26.0", + "version": "1.48.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "end_to_end_tests", - "version": "1.26.0", + "version": "1.48.0", "devDependencies": { - "@playwright/test": "1.49.1", - "@types/node": "22.10.6" + "@playwright/test": "1.58.2", + "@types/node": "24.10.13" } }, "node_modules/@playwright/test": { - "version": "1.49.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.49.1.tgz", - "integrity": "sha512-Ky+BVzPz8pL6PQxHqNRW1k3mIyv933LML7HktS8uik0bUXNCdPhoS/kLihiO1tMf/egaJb4IutXd7UywvXEW+g==", + "version": "1.58.2", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.58.2.tgz", + "integrity": "sha512-akea+6bHYBBfA9uQqSYmlJXn61cTa+jbO87xVLCWbTqbWadRVmhxlXATaOjOgcBaWU4ePo0wB41KMFv3o35IXA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright": "1.49.1" + "playwright": "1.58.2" }, "bin": { "playwright": "cli.js" @@ -29,13 +29,13 @@ } }, "node_modules/@types/node": { - "version": "22.10.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.6.tgz", - "integrity": "sha512-qNiuwC4ZDAUNcY47xgaSuS92cjf8JbSUoaKS77bmLG1rU7MlATVSiw/IlrjtIyyskXBZ8KkNfjK/P5na7rgXbQ==", + "version": "24.10.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.13.tgz", + "integrity": "sha512-oH72nZRfDv9lADUBSo104Aq7gPHpQZc4BTx38r9xf9pg5LfP6EzSyH2n7qFmmxRQXh7YlUXODcYsg6PuTDSxGg==", "dev": true, "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "undici-types": "~7.16.0" } }, "node_modules/fsevents": { @@ -54,13 +54,13 @@ } }, "node_modules/playwright": { - "version": "1.49.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.49.1.tgz", - "integrity": "sha512-VYL8zLoNTBxVOrJBbDuRgDWa3i+mfQgDTrL8Ah9QXZ7ax4Dsj0MSq5bYgytRnDVVe+njoKnfsYkH3HzqVj5UZA==", + "version": "1.58.2", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.58.2.tgz", + "integrity": "sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.49.1" + "playwright-core": "1.58.2" }, "bin": { "playwright": "cli.js" @@ -73,9 +73,9 @@ } }, "node_modules/playwright-core": { - "version": "1.49.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.49.1.tgz", - "integrity": "sha512-BzmpVcs4kE2CH15rWfzpjzVGhWERJfmnXmniSyKeRZUs9Ws65m+RGIi7mjJK/euCegfn3i7jvqWeWyHe9y3Vgg==", + "version": "1.58.2", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.58.2.tgz", + "integrity": "sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==", "dev": true, "license": "Apache-2.0", "bin": { @@ -86,9 +86,9 @@ } }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", "dev": true, "license": "MIT" } diff --git a/end_to_end_tests/package.json b/end_to_end_tests/package.json index 66c66fce0..b5860d44d 100644 --- a/end_to_end_tests/package.json +++ b/end_to_end_tests/package.json @@ -1,6 +1,6 @@ { "name": "end_to_end_tests", - "version": "1.26.0", + "version": "1.48.0", "private": true, "description": "", "main": "index.js", @@ -8,7 +8,7 @@ "keywords": [], "author": "", "devDependencies": { - "@playwright/test": "1.49.1", - "@types/node": "22.10.6" + "@playwright/test": "1.58.2", + "@types/node": "24.10.13" } } diff --git a/end_to_end_tests/playwright.config.ts b/end_to_end_tests/playwright.config.ts index 13c1bedb5..07b3eb2f9 100644 --- a/end_to_end_tests/playwright.config.ts +++ b/end_to_end_tests/playwright.config.ts @@ -30,8 +30,12 @@ export default defineConfig({ trace: 'on-first-retry', }, + expect: { + timeout: 10_000, + }, + workers: 3, - timeout: 60000, + timeout: 120000, /* Configure projects for major browsers */ projects: [ @@ -39,10 +43,10 @@ export default defineConfig({ name: 'chromium', use: { ...devices['Desktop Chrome'] }, }, - { - name: 'firefox', - use: { ...devices['Desktop Firefox'] }, - }, + // { + // name: 'firefox', + // use: { ...devices['Desktop Firefox'] }, + // }, { name: 'webkit', use: { ...devices['Desktop Safari'] }, diff --git a/end_to_end_tests/tests/secobserve_test.spec.ts b/end_to_end_tests/tests/secobserve_test.spec.ts index f7d24e0fa..264f08095 100644 --- a/end_to_end_tests/tests/secobserve_test.spec.ts +++ b/end_to_end_tests/tests/secobserve_test.spec.ts @@ -14,22 +14,19 @@ test.describe("SecObserve", async () => { test("Login", async () => { if (process.env.SO_PW_DOCKER) { - await delay(35000); + await delay(50000); } await page.goto(process.env.SO_PW_FRONTEND_BASE_URL); await expect(page).toHaveURL(process.env.SO_PW_FRONTEND_BASE_URL + "/#/login"); - page.on('console', msg => console.log(msg.text())); - await page.getByLabel("Username *").click(); await page.getByLabel("Username *").fill(process.env.SO_PW_USERNAME); await page.getByLabel("Username *").press("Tab"); await page.getByLabel("Password *").fill(process.env.SO_PW_PASSWORD); await page.getByRole("button", { name: "Sign in with user" }).click(); - page.on('console', msg => console.log(msg.text())); await expect(page).toHaveURL(process.env.SO_PW_FRONTEND_BASE_URL + "/#/"); await page.getByRole("menuitem", { name: "Product Groups" }).click(); diff --git a/frontend/.env.keycloak b/frontend/.env.keycloak index f0b562b03..08b3f8c00 100644 --- a/frontend/.env.keycloak +++ b/frontend/.env.keycloak @@ -5,3 +5,4 @@ OIDC_AUTHORITY=http://localhost:8080/realms/secobserve OIDC_CLIENT_ID=secobserve OIDC_REDIRECT_URI=http://localhost:3000 OIDC_POST_LOGOUT_REDIRECT_URI=http://localhost:3000 +OIDC_PROMPT= diff --git a/frontend/.env.no_oidc b/frontend/.env.no_oidc index cad598f9c..abcd262c9 100644 --- a/frontend/.env.no_oidc +++ b/frontend/.env.no_oidc @@ -6,3 +6,4 @@ OIDC_CLIENT_ID=dummy OIDC_REDIRECT_URI=dummy OIDC_POST_LOGOUT_REDIRECT_URI=dummy OIDC_SCOPE=dummy +OIDC_PROMPT= diff --git a/frontend/.npmrc b/frontend/.npmrc index 941368636..521a9f7c0 100644 --- a/frontend/.npmrc +++ b/frontend/.npmrc @@ -1,6 +1 @@ -# Update dependencies with WhiteSource Renovate -# Configuration see https://docs.renovatebot.com/setup-azure-devops/ - -registry=https://registry.npmjs.org/ -always-auth=false legacy-peer-deps=true diff --git a/frontend/eslint.config.mjs b/frontend/eslint.config.mjs index 43418e64d..0553da1ef 100644 --- a/frontend/eslint.config.mjs +++ b/frontend/eslint.config.mjs @@ -1,36 +1,20 @@ -import { fixupConfigRules, fixupPluginRules } from "@eslint/compat"; import react from "eslint-plugin-react"; import typescriptEslint from "@typescript-eslint/eslint-plugin"; import security from "eslint-plugin-security"; +import reactHooks from "eslint-plugin-react-hooks"; import globals from "globals"; import tsParser from "@typescript-eslint/parser"; -import path from "node:path"; -import { fileURLToPath } from "node:url"; import js from "@eslint/js"; -import { FlatCompat } from "@eslint/eslintrc"; -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const compat = new FlatCompat({ - baseDirectory: __dirname, - recommendedConfig: js.configs.recommended, - allConfig: js.configs.all -}); - -export default [...fixupConfigRules(compat.extends( - "eslint:recommended", - "plugin:react/recommended", - "plugin:@typescript-eslint/strict", - "plugin:@typescript-eslint/stylistic", - "plugin:security/recommended-legacy", - "plugin:react-hooks/recommended", - )), +export default [ + js.configs.recommended, { files: ["**/*.tsx", "**/*.ts"], plugins: { - react: fixupPluginRules(react), - "@typescript-eslint": fixupPluginRules(typescriptEslint), - security: fixupPluginRules(security), + react, + "@typescript-eslint": typescriptEslint, + security, + "react-hooks": reactHooks, }, languageOptions: { globals: { @@ -40,6 +24,7 @@ export default [...fixupConfigRules(compat.extends( ecmaVersion: "latest", sourceType: "module", parserOptions: { + project: "./tsconfig.json", ecmaFeatures: { jsx: true, }, @@ -52,11 +37,32 @@ export default [...fixupConfigRules(compat.extends( }, }, rules: { - "react/react-in-jsx-scope": "off", + // React recommended rules + ...react.configs.recommended.rules, + ...react.configs["jsx-runtime"].rules, + + // TypeScript recommended rules + // ...typescriptEslint.configs["strict-type-checked"].rules, + ...typescriptEslint.configs.recommended.rules, + ...typescriptEslint.configs["stylistic-type-checked"].rules, + + // Security recommended rules + ...security.configs.recommended.rules, + + // React Hooks recommended rules + ...reactHooks.configs.recommended.rules, + + // Custom overrides + "@typescript-eslint/consistent-type-definitions": "off", + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/prefer-nullish-coalescing": "off", "react/display-name": "off", "react/jsx-key": "off", - "@typescript-eslint/no-explicit-any": "off", - "@typescript-eslint/consistent-type-definitions":"off", + "react-hooks/immutability": "off", + "react-hooks/purity": "off", + "react-hooks/refs": "off", + "react-hooks/set-state-in-effect": "off", + "react-hooks/static-components": "off", }, } ]; \ No newline at end of file diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 7d593793d..b41461a85 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,63 +1,72 @@ { "name": "secobserve", - "version": "1.26.0", + "version": "1.48.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "secobserve", - "version": "1.26.0", + "version": "1.48.0", "license": "BSD-3-Clause", "dependencies": { "@emotion/react": "11.14.0", - "@emotion/styled": "11.14.0", - "@fortawesome/fontawesome-svg-core": "6.7.2", - "@fortawesome/free-brands-svg-icons": "6.7.2", - "@fortawesome/free-solid-svg-icons": "6.7.2", - "@fortawesome/react-fontawesome": "0.2.2", - "@textea/json-viewer": "3.5.0", - "axios": "1.7.9", - "chart.js": "4.4.7", - "markdown-to-jsx": "7.7.3", - "mermaid": "11.4.1", - "oidc-client-ts": "3.1.0", + "@emotion/styled": "11.14.1", + "@fortawesome/fontawesome-svg-core": "7.2.0", + "@fortawesome/free-brands-svg-icons": "7.2.0", + "@fortawesome/free-solid-svg-icons": "7.2.0", + "@fortawesome/react-fontawesome": "3.2.0", + "@mdxeditor/editor": "3.52.4", + "@mui/icons-material": "7.3.8", + "@mui/material": "7.3.8", + "@textea/json-viewer": "4.0.1", + "axios": "1.13.5", + "chart.js": "4.5.1", + "cm6-theme-basic-dark": "0.2.0", + "cm6-theme-basic-light": "0.2.0", + "humanize-duration": "3.33.2", + "markdown-to-jsx": "9.7.4", + "marked": "17.0.2", + "mermaid": "11.12.3", + "oidc-client-ts": "3.4.1", + "packageurl-js": "2.0.1", "prop-types": "15.8.1", - "query-string": "9.1.1", - "ra-input-rich-text": "5.4.4", - "react": "18.3.1", - "react-admin": "5.4.4", - "react-chartjs-2": "5.3.0", - "react-dom": "18.3.1", - "react-is": "18.3.1", - "react-oidc-context": "3.2.0", + "query-string": "9.3.1", + "ra-core": "5.14.2", + "ra-ui-materialui": "5.14.2", + "react": "19.0.0", + "react-admin": "5.14.2", + "react-chartjs-2": "5.3.1", + "react-dom": "19.0.0", + "react-is": "19.0.0", + "react-oidc-context": "3.3.0", + "react-syntax-highlighter": "16.1.0", "runtime-env-cra": "file:lib/runtime-env-cra", - "tss-react": "4.9.14" + "tss-react": "4.9.20" }, "devDependencies": { - "@eslint/compat": "1.2.5", - "@eslint/eslintrc": "3.2.0", - "@eslint/js": "9.18.0", + "@eslint/eslintrc": "3.3.3", + "@eslint/js": "9.39.2", "@microsoft/eslint-formatter-sarif": "3.1.0", - "@trivago/prettier-plugin-sort-imports": "5.2.1", - "@types/inflection": "1.13.2", - "@types/node": "22.10.6", - "@types/prop-types": "15.7.14", - "@types/react": "18.3.18", - "@types/react-dom": "18.3.5", - "@types/recharts": "1.8.29", - "@typescript-eslint/eslint-plugin": "8.20.0", - "@typescript-eslint/parser": "8.20.0", - "@vitejs/plugin-react": "4.3.4", - "eslint": "9.18.0", - "eslint-plugin-react": "7.37.4", - "eslint-plugin-react-hooks": "rc", + "@trivago/prettier-plugin-sort-imports": "6.0.2", + "@types/humanize-duration": "3.27.4", + "@types/node": "24.10.13", + "@types/prop-types": "15.7.15", + "@types/react": "19.2.0", + "@types/react-dom": "19.2.0", + "@types/react-syntax-highlighter": "15.5.13", + "@typescript-eslint/eslint-plugin": "8.56.0", + "@typescript-eslint/parser": "8.56.0", + "@vitejs/plugin-react": "5.1.4", + "eslint": "9.39.2", + "eslint-plugin-react": "7.37.5", + "eslint-plugin-react-hooks": "7.0.0", "eslint-plugin-security": "3.0.1", - "globals": "15.14.0", - "prettier": "3.4.2", - "rewire": "7.0.0", - "typescript": "5.7.3", - "vite": "6.0.7", - "yaml": "2.7.0" + "globals": "17.3.0", + "prettier": "3.8.1", + "rewire": "9.0.1", + "typescript": "5.9.3", + "vite": "7.3.1", + "yaml": "2.8.2" } }, "lib/runtime-env-cra": { @@ -70,60 +79,37 @@ "runtime-env-cra": "lib/index.js" } }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@antfu/install-pkg": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-0.4.1.tgz", - "integrity": "sha512-T7yB5QNG29afhWVkVq7XeIMBa5U/vs9mX69YqayXypPRmYzUmzwnYltplHmPtZ4HPCn+sQKeXW8I47wCbuBOjw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz", + "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==", "license": "MIT", "dependencies": { - "package-manager-detector": "^0.2.0", - "tinyexec": "^0.3.0" + "package-manager-detector": "^1.3.0", + "tinyexec": "^1.0.1" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@antfu/utils": { - "version": "0.7.10", - "resolved": "https://registry.npmjs.org/@antfu/utils/-/utils-0.7.10.tgz", - "integrity": "sha512-+562v9k4aI80m1+VuMHehNJWLOFjBnXn3tdOitzD0il5b7smkSBal4+a3oKiQTbrwMmN/TBUMDvbdoWDehgOww==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/antfu" - } - }, "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", + "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.5.tgz", - "integrity": "sha512-XvcZi1KWf88RVbF9wn8MN6tYFloU5qX8KjuF3E1PVBmJ9eypXfs4GRiJwLuTZL0iSnJUKn1BFPa5BPZZJyFzPg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", "dev": true, "license": "MIT", "engines": { @@ -131,22 +117,22 @@ } }, "node_modules/@babel/core": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", - "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.0", - "@babel/generator": "^7.26.0", - "@babel/helper-compilation-targets": "^7.25.9", - "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.0", - "@babel/parser": "^7.26.0", - "@babel/template": "^7.25.9", - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.26.0", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -179,15 +165,15 @@ } }, "node_modules/@babel/generator": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.5.tgz", - "integrity": "sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==", + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.26.5", - "@babel/types": "^7.26.5", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" }, "engines": { @@ -195,14 +181,14 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", - "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.26.5", - "@babel/helper-validator-option": "^7.25.9", + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -221,29 +207,38 @@ "semver": "bin/semver.js" } }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-module-imports": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", - "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", "license": "MIT", "dependencies": { - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.25.9" + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", - "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9", - "@babel/traverse": "^7.25.9" + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -253,9 +248,9 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", - "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", "dev": true, "license": "MIT", "engines": { @@ -263,27 +258,27 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", - "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", "dev": true, "license": "MIT", "engines": { @@ -291,26 +286,26 @@ } }, "node_modules/@babel/helpers": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz", - "integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.0" + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.5.tgz", - "integrity": "sha512-SRJ4jYmXRqV1/Xc+TIVG84WjHBXKlxO9sHQnA2Pf12QQEAp1LOh6kDzNHXcUnbH1QI0FDoPPVOt+vyUDucxpaw==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", "license": "MIT", "dependencies": { - "@babel/types": "^7.26.5" + "@babel/types": "^7.29.0" }, "bin": { "parser": "bin/babel-parser.js" @@ -320,13 +315,13 @@ } }, "node_modules/@babel/plugin-transform-react-jsx-self": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.25.9.tgz", - "integrity": "sha512-y8quW6p0WHkEhmErnfe58r7x0A70uKphQm8Sp8cV7tjNQwK56sNVK0M73LK3WuYmsuyrftut4xAkjjgU0twaMg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -336,13 +331,13 @@ } }, "node_modules/@babel/plugin-transform-react-jsx-source": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.25.9.tgz", - "integrity": "sha512-+iqjT8xmXhhYv4/uiYd8FNQsraMFZIfxVSqxxVSZP0WbbSAWvBXAul0m/zu+7Vv4O/3WtApy9pmaTMiumEZgfg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -352,116 +347,576 @@ } }, "node_modules/@babel/runtime": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", - "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", + "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", "license": "MIT", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/template": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", - "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.25.9", - "@babel/parser": "^7.25.9", - "@babel/types": "^7.25.9" + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.5.tgz", - "integrity": "sha512-rkOSPOw+AXbgtwUga3U4u8RpoK9FEFWBNAlTpcnkLFjL5CT+oyHNuUUC/xx6XefEJ16r38r8Bc/lfp6rYuHeJQ==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.5", - "@babel/parser": "^7.26.5", - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.5", - "debug": "^4.3.1", - "globals": "^11.1.0" + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/types": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.5.tgz", - "integrity": "sha512-L6mZmwFDK6Cjh1nRCLXpa6no13ZIioJDz7mdkzHv399pThrTa/k0nUlNaenOeh2kWu/iaOQYElEpKPUswUa9Vg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@braintree/sanitize-url": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz", - "integrity": "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.2.tgz", + "integrity": "sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==", "license": "MIT" }, "node_modules/@chevrotain/cst-dts-gen": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz", - "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.1.1.tgz", + "integrity": "sha512-fRHyv6/f542qQqiRGalrfJl/evD39mAvbJLCekPazhiextEatq1Jx1K/i9gSd5NNO0ds03ek0Cbo/4uVKmOBcw==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/gast": "11.0.3", - "@chevrotain/types": "11.0.3", - "lodash-es": "4.17.21" + "@chevrotain/gast": "11.1.1", + "@chevrotain/types": "11.1.1", + "lodash-es": "4.17.23" } }, "node_modules/@chevrotain/gast": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz", - "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.1.1.tgz", + "integrity": "sha512-Ko/5vPEYy1vn5CbCjjvnSO4U7GgxyGm+dfUZZJIWTlQFkXkyym0jFYrWEU10hyCjrA7rQtiHtBr0EaZqvHFZvg==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/types": "11.0.3", - "lodash-es": "4.17.21" + "@chevrotain/types": "11.1.1", + "lodash-es": "4.17.23" } }, "node_modules/@chevrotain/regexp-to-ast": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz", - "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.1.1.tgz", + "integrity": "sha512-ctRw1OKSXkOrR8VTvOxrQ5USEc4sNrfwXHa1NuTcR7wre4YbjPcKw+82C2uylg/TEwFRgwLmbhlln4qkmDyteg==", "license": "Apache-2.0" }, "node_modules/@chevrotain/types": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz", - "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.1.1.tgz", + "integrity": "sha512-wb2ToxG8LkgPYnKe9FH8oGn3TMCBdnwiuNC5l5y+CtlaVRbCytU0kbVsk6CGrqTL4ZN4ksJa0TXOYbxpbthtqw==", "license": "Apache-2.0" }, "node_modules/@chevrotain/utils": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz", - "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.1.1.tgz", + "integrity": "sha512-71eTYMzYXYSFPrbg/ZwftSaSDld7UYlS8OQa3lNnn9jzNtpFbaReRRyghzqS7rI3CDaorqpPJJcXGHK+FE1TVQ==", "license": "Apache-2.0" }, + "node_modules/@codemirror/autocomplete": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.20.0.tgz", + "integrity": "sha512-bOwvTOIJcG5FVo5gUUupiwYh8MioPLQ4UcqbcRf7UQ98X90tCa9E1kZ3Z7tqwpZxYyOvh1YTYbmZE9RTfTp5hg==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/commands": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.10.1.tgz", + "integrity": "sha512-uWDWFypNdQmz2y1LaNJzK7fL7TYKLeUAU0npEC685OKTF3KcQ2Vu3klIM78D7I6wGhktme0lh3CuQLv0ZCrD9Q==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.27.0", + "@lezer/common": "^1.1.0" + } + }, + "node_modules/@codemirror/lang-angular": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@codemirror/lang-angular/-/lang-angular-0.1.4.tgz", + "integrity": "sha512-oap+gsltb/fzdlTQWD6BFF4bSLKcDnlxDsLdePiJpCVNKWXSTAbiiQeYI3UmES+BLAdkmIC1WjyztC1pi/bX4g==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/lang-javascript": "^6.1.2", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.3" + } + }, + "node_modules/@codemirror/lang-cpp": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@codemirror/lang-cpp/-/lang-cpp-6.0.3.tgz", + "integrity": "sha512-URM26M3vunFFn9/sm6rzqrBzDgfWuDixp85uTY49wKudToc2jTHUrKIGGKs+QWND+YLofNNZpxcNGRynFJfvgA==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/cpp": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-css": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-css/-/lang-css-6.3.1.tgz", + "integrity": "sha512-kr5fwBGiGtmz6l0LSJIbno9QrifNMUusivHbnA1H6Dmqy4HZFte3UAICix1VuKo0lMPKQr2rqB+0BkKi/S3Ejg==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.2", + "@lezer/css": "^1.1.7" + } + }, + "node_modules/@codemirror/lang-go": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-go/-/lang-go-6.0.1.tgz", + "integrity": "sha512-7fNvbyNylvqCphW9HD6WFnRpcDjr+KXX/FgqXy5H5ZS0eC5edDljukm/yNgYkwTsgp2busdod50AOTIy6Jikfg==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.6.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/go": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-html": { + "version": "6.4.11", + "resolved": "https://registry.npmjs.org/@codemirror/lang-html/-/lang-html-6.4.11.tgz", + "integrity": "sha512-9NsXp7Nwp891pQchI7gPdTwBuSuT3K65NGTHWHNJ55HjYcHLllr0rbIZNdOzas9ztc1EUVBlHou85FFZS4BNnw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/lang-css": "^6.0.0", + "@codemirror/lang-javascript": "^6.0.0", + "@codemirror/language": "^6.4.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0", + "@lezer/css": "^1.1.0", + "@lezer/html": "^1.3.12" + } + }, + "node_modules/@codemirror/lang-java": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-java/-/lang-java-6.0.2.tgz", + "integrity": "sha512-m5Nt1mQ/cznJY7tMfQTJchmrjdjQ71IDs+55d1GAa8DGaB8JXWsVCkVT284C3RTASaY43YknrK2X3hPO/J3MOQ==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/java": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-javascript": { + "version": "6.2.4", + "resolved": "https://registry.npmjs.org/@codemirror/lang-javascript/-/lang-javascript-6.2.4.tgz", + "integrity": "sha512-0WVmhp1QOqZ4Rt6GlVGwKJN3KW7Xh4H2q8ZZNGZaP6lRdxXJzmjm4FqvmOojVj6khWJHIb9sp7U/72W7xQgqAA==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.6.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0", + "@lezer/javascript": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-jinja": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-jinja/-/lang-jinja-6.0.0.tgz", + "integrity": "sha512-47MFmRcR8UAxd8DReVgj7WJN1WSAMT7OJnewwugZM4XiHWkOjgJQqvEM1NpMj9ALMPyxmlziEI1opH9IaEvmaw==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.2.0", + "@lezer/lr": "^1.4.0" + } + }, + "node_modules/@codemirror/lang-json": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.2.tgz", + "integrity": "sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/json": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-less": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-less/-/lang-less-6.0.2.tgz", + "integrity": "sha512-EYdQTG22V+KUUk8Qq582g7FMnCZeEHsyuOJisHRft/mQ+ZSZ2w51NupvDUHiqtsOy7It5cHLPGfHQLpMh9bqpQ==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-css": "^6.2.0", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-liquid": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-liquid/-/lang-liquid-6.3.1.tgz", + "integrity": "sha512-S/jE/D7iij2Pu70AC65ME6AYWxOOcX20cSJvaPgY5w7m2sfxsArAcUAuUgm/CZCVmqoi9KiOlS7gj/gyLipABw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.1" + } + }, + "node_modules/@codemirror/lang-markdown": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.5.0.tgz", + "integrity": "sha512-0K40bZ35jpHya6FriukbgaleaqzBLZfOh7HuzqbMxBXkbYMJDxfF39c23xOgxFezR+3G+tR2/Mup+Xk865OMvw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.7.1", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.3.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.2.1", + "@lezer/markdown": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-php": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-php/-/lang-php-6.0.2.tgz", + "integrity": "sha512-ZKy2v1n8Fc8oEXj0Th0PUMXzQJ0AIR6TaZU+PbDHExFwdu+guzOA4jmCHS1Nz4vbFezwD7LyBdDnddSJeScMCA==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/php": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-python": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-python/-/lang-python-6.2.1.tgz", + "integrity": "sha512-IRjC8RUBhn9mGR9ywecNhB51yePWCGgvHfY1lWN/Mrp3cKuHr0isDKia+9HnvhiWNnMpbGhWrkhuWOc09exRyw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.3.2", + "@codemirror/language": "^6.8.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.1", + "@lezer/python": "^1.1.4" + } + }, + "node_modules/@codemirror/lang-rust": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-rust/-/lang-rust-6.0.2.tgz", + "integrity": "sha512-EZaGjCUegtiU7kSMvOfEZpaCReowEf3yNidYu7+vfuGTm9ow4mthAparY5hisJqOHmJowVH3Upu+eJlUji6qqA==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/rust": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-sass": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-sass/-/lang-sass-6.0.2.tgz", + "integrity": "sha512-l/bdzIABvnTo1nzdY6U+kPAC51czYQcOErfzQ9zSm9D8GmNPD0WTW8st/CJwBTPLO8jlrbyvlSEcN20dc4iL0Q==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-css": "^6.2.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.2", + "@lezer/sass": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-sql": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-sql/-/lang-sql-6.10.0.tgz", + "integrity": "sha512-6ayPkEd/yRw0XKBx5uAiToSgGECo/GY2NoJIHXIIQh1EVwLuKoU8BP/qK0qH5NLXAbtJRLuT73hx7P9X34iO4w==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-vue": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@codemirror/lang-vue/-/lang-vue-0.1.3.tgz", + "integrity": "sha512-QSKdtYTDRhEHCfo5zOShzxCmqKJvgGrZwDQSdbvCRJ5pRLWBS7pD/8e/tH44aVQT6FKm0t6RVNoSUWHOI5vNug==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/lang-javascript": "^6.1.2", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.1" + } + }, + "node_modules/@codemirror/lang-wast": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-wast/-/lang-wast-6.0.2.tgz", + "integrity": "sha512-Imi2KTpVGm7TKuUkqyJ5NRmeFWF7aMpNiwHnLQe0x9kmrxElndyH0K6H/gXtWwY6UshMRAhpENsgfpSwsgmC6Q==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-xml": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-xml/-/lang-xml-6.1.0.tgz", + "integrity": "sha512-3z0blhicHLfwi2UgkZYRPioSgVTo9PV5GP5ducFH6FaHy0IAJRg+ixj5gTR1gnT/glAIC8xv4w2VL1LoZfs+Jg==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.4.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/xml": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-yaml": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-yaml/-/lang-yaml-6.1.2.tgz", + "integrity": "sha512-dxrfG8w5Ce/QbT7YID7mWZFKhdhsaTNOYjOkSIMt1qmC4VQnXSDSYVHHHn8k6kJUfIhtLo8t1JJgltlxWdsITw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.2.0", + "@lezer/lr": "^1.0.0", + "@lezer/yaml": "^1.0.0" + } + }, + "node_modules/@codemirror/language": { + "version": "6.12.1", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.12.1.tgz", + "integrity": "sha512-Fa6xkSiuGKc8XC8Cn96T+TQHYj4ZZ7RdFmXA3i9xe/3hLHfwPZdM+dqfX0Cp0zQklBKhVD8Yzc8LS45rkqcwpQ==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.5.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/language-data": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@codemirror/language-data/-/language-data-6.5.2.tgz", + "integrity": "sha512-CPkWBKrNS8stYbEU5kwBwTf3JB1kghlbh4FSAwzGW2TEscdeHHH4FGysREW86Mqnj3Qn09s0/6Ea/TutmoTobg==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-angular": "^0.1.0", + "@codemirror/lang-cpp": "^6.0.0", + "@codemirror/lang-css": "^6.0.0", + "@codemirror/lang-go": "^6.0.0", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/lang-java": "^6.0.0", + "@codemirror/lang-javascript": "^6.0.0", + "@codemirror/lang-jinja": "^6.0.0", + "@codemirror/lang-json": "^6.0.0", + "@codemirror/lang-less": "^6.0.0", + "@codemirror/lang-liquid": "^6.0.0", + "@codemirror/lang-markdown": "^6.0.0", + "@codemirror/lang-php": "^6.0.0", + "@codemirror/lang-python": "^6.0.0", + "@codemirror/lang-rust": "^6.0.0", + "@codemirror/lang-sass": "^6.0.0", + "@codemirror/lang-sql": "^6.0.0", + "@codemirror/lang-vue": "^0.1.1", + "@codemirror/lang-wast": "^6.0.0", + "@codemirror/lang-xml": "^6.0.0", + "@codemirror/lang-yaml": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/legacy-modes": "^6.4.0" + } + }, + "node_modules/@codemirror/legacy-modes": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@codemirror/legacy-modes/-/legacy-modes-6.5.2.tgz", + "integrity": "sha512-/jJbwSTazlQEDOQw2FJ8LEEKVS72pU0lx6oM54kGpL8t/NJ2Jda3CZ4pcltiKTdqYSRk3ug1B3pil1gsjA6+8Q==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0" + } + }, + "node_modules/@codemirror/lint": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.9.3.tgz", + "integrity": "sha512-y3YkYhdnhjDBAe0VIA0c4wVoFOvnp8CnAvfLqi0TqotIv92wIlAAP7HELOpLBsKwjAX6W92rSflA6an/2zBvXw==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.35.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/merge": { + "version": "6.11.2", + "resolved": "https://registry.npmjs.org/@codemirror/merge/-/merge-6.11.2.tgz", + "integrity": "sha512-NO5EJd2rLRbwVWLgMdhIntDIhfDtMOKYEZgqV5WnkNUS2oXOCVWLPjG/kgl/Jth2fGiOuG947bteqxP9nBXmMg==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/highlight": "^1.0.0", + "style-mod": "^4.1.0" + } + }, + "node_modules/@codemirror/search": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.6.0.tgz", + "integrity": "sha512-koFuNXcDvyyotWcgOnZGmY7LZqEOXZaaxD/j6n18TCLx2/9HieZJ5H6hs1g8FiRxBD0DNfs0nXn17g872RmYdw==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.37.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/state": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.4.tgz", + "integrity": "sha512-8y7xqG/hpB53l25CIoit9/ngxdfoG+fx+V3SHBrinnhOtLvKHRyAJJuHzkWrR4YXXLX8eXBsejgAAxHUOdW1yw==", + "license": "MIT", + "dependencies": { + "@marijn/find-cluster-break": "^1.0.0" + } + }, + "node_modules/@codemirror/view": { + "version": "6.39.12", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.39.12.tgz", + "integrity": "sha512-f+/VsHVn/kOA9lltk/GFzuYwVVAKmOnNjxbrhkk3tPHntFqjWeI2TbIXx006YkBkqC10wZ4NsnWXCQiFPeAISQ==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.5.0", + "crelt": "^1.0.6", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, + "node_modules/@codesandbox/nodebox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/@codesandbox/nodebox/-/nodebox-0.1.8.tgz", + "integrity": "sha512-2VRS6JDSk+M+pg56GA6CryyUSGPjBEe8Pnae0QL3jJF1mJZJVMDKr93gJRtBbLkfZN6LD/DwMtf+2L0bpWrjqg==", + "license": "SEE LICENSE IN ./LICENSE", + "dependencies": { + "outvariant": "^1.4.0", + "strict-event-emitter": "^0.4.3" + } + }, + "node_modules/@codesandbox/sandpack-client": { + "version": "2.19.8", + "resolved": "https://registry.npmjs.org/@codesandbox/sandpack-client/-/sandpack-client-2.19.8.tgz", + "integrity": "sha512-CMV4nr1zgKzVpx4I3FYvGRM5YT0VaQhALMW9vy4wZRhEyWAtJITQIqZzrTGWqB1JvV7V72dVEUCUPLfYz5hgJQ==", + "license": "Apache-2.0", + "dependencies": { + "@codesandbox/nodebox": "0.1.8", + "buffer": "^6.0.3", + "dequal": "^2.0.2", + "mime-db": "^1.52.0", + "outvariant": "1.4.0", + "static-browser-server": "1.0.3" + } + }, + "node_modules/@codesandbox/sandpack-react": { + "version": "2.20.0", + "resolved": "https://registry.npmjs.org/@codesandbox/sandpack-react/-/sandpack-react-2.20.0.tgz", + "integrity": "sha512-takd1YpW/PMQ6KPQfvseWLHWklJovGY8QYj8MtWnskGKbjOGJ6uZfyZbcJ6aCFLQMpNyjTqz9AKNbvhCOZ1TUQ==", + "license": "Apache-2.0", + "dependencies": { + "@codemirror/autocomplete": "^6.4.0", + "@codemirror/commands": "^6.1.3", + "@codemirror/lang-css": "^6.0.1", + "@codemirror/lang-html": "^6.4.0", + "@codemirror/lang-javascript": "^6.1.2", + "@codemirror/language": "^6.3.2", + "@codemirror/state": "^6.2.0", + "@codemirror/view": "^6.7.1", + "@codesandbox/sandpack-client": "^2.19.8", + "@lezer/highlight": "^1.1.3", + "@react-hook/intersection-observer": "^3.1.1", + "@stitches/core": "^1.2.6", + "anser": "^2.1.1", + "clean-set": "^1.1.2", + "dequal": "^2.0.2", + "escape-carriage": "^1.3.1", + "lz-string": "^1.4.4", + "react-devtools-inline": "4.4.0", + "react-is": "^17.0.2" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17 || ^18 || ^19", + "react-dom": "^16.8.0 || ^17 || ^18 || ^19" + } + }, + "node_modules/@codesandbox/sandpack-react/node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "license": "MIT" + }, "node_modules/@emotion/babel-plugin": { "version": "11.13.5", "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.13.5.tgz", @@ -501,9 +956,9 @@ "license": "MIT" }, "node_modules/@emotion/is-prop-valid": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.1.tgz", - "integrity": "sha512-/ACwoqx7XQi9knQs/G0qKvv5teDMhD7bXYns9N/wM8ah8iNb8jZ2uNO0YOgiq2o2poIvVtJS2YALasQuMSQ7Kw==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.4.0.tgz", + "integrity": "sha512-QgD4fyscGcbbKwJmqNvUMSE02OsHUa+lAWKdEUIJKgqe5IwRSKd7+KhibEWdaKwgjLj0DRSHA9biAIqGBk05lw==", "license": "MIT", "dependencies": { "@emotion/memoize": "^0.9.0" @@ -559,9 +1014,9 @@ "license": "MIT" }, "node_modules/@emotion/styled": { - "version": "11.14.0", - "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.14.0.tgz", - "integrity": "sha512-XxfOnXFffatap2IyCeJyNov3kiDQWoR08gPUQxvbL7fxKryGBKUZUkG6Hz48DZwVrJSVh9sJboyV1Ds4OW6SgA==", + "version": "11.14.1", + "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.14.1.tgz", + "integrity": "sha512-qEEJt42DuToa3gurlH4Qqc1kVpNq8wO8cJtDzU46TjlzWjDlsVyevtYCRijVq3SrHsROS+gVQ8Fnea108GnKzw==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.18.3", @@ -609,9 +1064,9 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz", - "integrity": "sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", "cpu": [ "ppc64" ], @@ -626,9 +1081,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.2.tgz", - "integrity": "sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", "cpu": [ "arm" ], @@ -643,9 +1098,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz", - "integrity": "sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", "cpu": [ "arm64" ], @@ -660,9 +1115,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.2.tgz", - "integrity": "sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", "cpu": [ "x64" ], @@ -677,9 +1132,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz", - "integrity": "sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", "cpu": [ "arm64" ], @@ -694,9 +1149,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz", - "integrity": "sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", "cpu": [ "x64" ], @@ -711,9 +1166,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz", - "integrity": "sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", "cpu": [ "arm64" ], @@ -728,9 +1183,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz", - "integrity": "sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", "cpu": [ "x64" ], @@ -745,9 +1200,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz", - "integrity": "sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", "cpu": [ "arm" ], @@ -762,9 +1217,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz", - "integrity": "sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", "cpu": [ "arm64" ], @@ -779,9 +1234,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz", - "integrity": "sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", "cpu": [ "ia32" ], @@ -796,9 +1251,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz", - "integrity": "sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", "cpu": [ "loong64" ], @@ -813,9 +1268,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz", - "integrity": "sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", "cpu": [ "mips64el" ], @@ -830,9 +1285,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz", - "integrity": "sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", "cpu": [ "ppc64" ], @@ -847,9 +1302,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz", - "integrity": "sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", "cpu": [ "riscv64" ], @@ -864,9 +1319,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz", - "integrity": "sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", "cpu": [ "s390x" ], @@ -881,9 +1336,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz", - "integrity": "sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", "cpu": [ "x64" ], @@ -898,9 +1353,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz", - "integrity": "sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", "cpu": [ "arm64" ], @@ -915,9 +1370,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz", - "integrity": "sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", "cpu": [ "x64" ], @@ -932,9 +1387,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz", - "integrity": "sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", "cpu": [ "arm64" ], @@ -949,9 +1404,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz", - "integrity": "sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", "cpu": [ "x64" ], @@ -965,10 +1420,27 @@ "node": ">=18" } }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/sunos-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz", - "integrity": "sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", "cpu": [ "x64" ], @@ -983,9 +1455,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz", - "integrity": "sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", "cpu": [ "arm64" ], @@ -1000,9 +1472,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz", - "integrity": "sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", "cpu": [ "ia32" ], @@ -1017,9 +1489,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz", - "integrity": "sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", "cpu": [ "x64" ], @@ -1034,9 +1506,9 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", - "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==", + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1053,52 +1525,47 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", "dev": true, "license": "MIT", "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, - "node_modules/@eslint/compat": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.2.5.tgz", - "integrity": "sha512-5iuG/StT+7OfvhoBHPlmxkPA9om6aDUFgmD4+mWKAGsYt4vCe8rypneG03AuseyRHBmcCLXQtIH5S26tIoggLg==", + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", "dev": true, "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "peerDependencies": { - "eslint": "^9.10.0" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - } } }, - "node_modules/@eslint/config-array": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.1.tgz", - "integrity": "sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==", + "node_modules/@eslint/config-helpers": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/object-schema": "^2.1.5", - "debug": "^4.3.1", - "minimatch": "^3.1.2" + "@eslint/core": "^0.17.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.10.0.tgz", - "integrity": "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==", + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1109,9 +1576,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz", - "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz", + "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1121,7 +1588,7 @@ "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", + "js-yaml": "^4.1.1", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" }, @@ -1146,19 +1613,22 @@ } }, "node_modules/@eslint/js": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.18.0.tgz", - "integrity": "sha512-fK6L7rxcq6/z+AaQMtiFTkvbHkBLNlwyRxHpKawP0x3u9+NC6MQTnFW+AdpwC6gfHTW0051cokQgtTN2FqlxQA==", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", "dev": true, "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, "node_modules/@eslint/object-schema": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.5.tgz", - "integrity": "sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==", + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1166,75 +1636,128 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz", - "integrity": "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.10.0", + "@eslint/core": "^0.17.0", "levn": "^0.4.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@floating-ui/core": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.4.tgz", + "integrity": "sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.5.tgz", + "integrity": "sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.4", + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/react": { + "version": "0.27.17", + "resolved": "https://registry.npmjs.org/@floating-ui/react/-/react-0.27.17.tgz", + "integrity": "sha512-LGVZKHwmWGg6MRHjLLgsfyaX2y2aCNgnD1zT/E6B+/h+vxg+nIJUqHPAlTzsHDyqdgEpJ1Np5kxWuFEErXzoGg==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.1.7", + "@floating-ui/utils": "^0.2.10", + "tabbable": "^6.0.0" + }, + "peerDependencies": { + "react": ">=17.0.0", + "react-dom": ">=17.0.0" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.7.tgz", + "integrity": "sha512-0tLRojf/1Go2JgEVm+3Frg9A3IW8bJgKgdO0BN5RkF//ufuz2joZM63Npau2ff3J6lUVYgDSNzNkR+aH3IVfjg==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.7.5" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", + "license": "MIT" + }, "node_modules/@fortawesome/fontawesome-common-types": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.7.2.tgz", - "integrity": "sha512-Zs+YeHUC5fkt7Mg1l6XTniei3k4bwG/yo3iFUtZWd/pMx9g3fdvkSK9E0FOC+++phXOka78uJcYb8JaFkW52Xg==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-7.2.0.tgz", + "integrity": "sha512-IpR0bER9FY25p+e7BmFH25MZKEwFHTfRAfhOyJubgiDnoJNsSvJ7nigLraHtp4VOG/cy8D7uiV0dLkHOne5Fhw==", "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/@fortawesome/fontawesome-svg-core": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-6.7.2.tgz", - "integrity": "sha512-yxtOBWDrdi5DD5o1pmVdq3WMCvnobT0LU6R8RyyVXPvFRd2o79/0NCuQoCjNTeZz9EzA9xS3JxNWfv54RIHFEA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-7.2.0.tgz", + "integrity": "sha512-6639htZMjEkwskf3J+e6/iar+4cTNM9qhoWuRfj9F3eJD6r7iCzV1SWnQr2Mdv0QT0suuqU8BoJCZUyCtP9R4Q==", "license": "MIT", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.7.2" + "@fortawesome/fontawesome-common-types": "7.2.0" }, "engines": { "node": ">=6" } }, "node_modules/@fortawesome/free-brands-svg-icons": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/free-brands-svg-icons/-/free-brands-svg-icons-6.7.2.tgz", - "integrity": "sha512-zu0evbcRTgjKfrr77/2XX+bU+kuGfjm0LbajJHVIgBWNIDzrhpRxiCPNT8DW5AdmSsq7Mcf9D1bH0aSeSUSM+Q==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-brands-svg-icons/-/free-brands-svg-icons-7.2.0.tgz", + "integrity": "sha512-VNG8xqOip1JuJcC3zsVsKRQ60oXG9+oYNDCosjoU/H9pgYmLTEwWw8pE0jhPz/JWdHeUuK6+NQ3qsM4gIbdbYQ==", "license": "(CC-BY-4.0 AND MIT)", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.7.2" + "@fortawesome/fontawesome-common-types": "7.2.0" }, "engines": { "node": ">=6" } }, "node_modules/@fortawesome/free-solid-svg-icons": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-6.7.2.tgz", - "integrity": "sha512-GsBrnOzU8uj0LECDfD5zomZJIjrPhIlWU82AHwa2s40FKH+kcxQaBvBo3Z4TxyZHIyX8XTDxsyA33/Vx9eFuQA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-7.2.0.tgz", + "integrity": "sha512-YTVITFGN0/24PxzXrwqCgnyd7njDuzp5ZvaCx5nq/jg55kUYd94Nj8UTchBdBofi/L0nwRfjGOg0E41d2u9T1w==", "license": "(CC-BY-4.0 AND MIT)", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.7.2" + "@fortawesome/fontawesome-common-types": "7.2.0" }, "engines": { "node": ">=6" } }, "node_modules/@fortawesome/react-fontawesome": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/@fortawesome/react-fontawesome/-/react-fontawesome-0.2.2.tgz", - "integrity": "sha512-EnkrprPNqI6SXJl//m29hpaNzOp1bruISWaOiRtkMi/xSvHJlzc2j2JAYS7egxt/EbjSNV/k6Xy0AQI6vB2+1g==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@fortawesome/react-fontawesome/-/react-fontawesome-3.2.0.tgz", + "integrity": "sha512-E9Gu1hqd6JussVO26EC4WqRZssXMnQr2ol7ZNWkkFOH8jZUaxDJ9Z9WF9wIVkC+kJGXUdY3tlffpDwEKfgQrQw==", "license": "MIT", - "dependencies": { - "prop-types": "^15.8.1" + "engines": { + "node": ">=20" }, "peerDependencies": { - "@fortawesome/fontawesome-svg-core": "~1 || ~6", - "react": ">=16.3" + "@fortawesome/fontawesome-svg-core": "~6 || ~7", + "react": "^18.0.0 || ^19.0.0" } }, "node_modules/@humanfs/core": { @@ -1248,33 +1771,19 @@ } }, "node_modules/@humanfs/node": { - "version": "0.16.6", - "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", - "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", "dev": true, "license": "Apache-2.0", "dependencies": { "@humanfs/core": "^0.19.1", - "@humanwhocodes/retry": "^0.3.0" + "@humanwhocodes/retry": "^0.4.0" }, "engines": { "node": ">=18.18.0" } }, - "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, "node_modules/@humanwhocodes/config-array": { "version": "0.13.0", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", @@ -1314,9 +1823,9 @@ "license": "BSD-3-Clause" }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz", - "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1334,33 +1843,35 @@ "license": "MIT" }, "node_modules/@iconify/utils": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-2.2.1.tgz", - "integrity": "sha512-0/7J7hk4PqXmxo5PDBDxmnecw5PxklZJfNjIVG9FM0mEfVrvfudS22rYWsqVk6gR3UJ/mSYS90X4R3znXnqfNA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-3.1.0.tgz", + "integrity": "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==", "license": "MIT", "dependencies": { - "@antfu/install-pkg": "^0.4.1", - "@antfu/utils": "^0.7.10", + "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", - "debug": "^4.4.0", - "globals": "^15.13.0", - "kolorist": "^1.8.0", - "local-pkg": "^0.5.1", - "mlly": "^1.7.3" + "mlly": "^1.8.0" } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "license": "MIT", "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { @@ -1372,25 +1883,16 @@ "node": ">=6.0.0" } }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", @@ -1403,13 +1905,535 @@ "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==", "license": "MIT" }, + "node_modules/@lexical/clipboard": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/clipboard/-/clipboard-0.35.0.tgz", + "integrity": "sha512-ko7xSIIiayvDiqjNDX6fgH9RlcM6r9vrrvJYTcfGVBor5httx16lhIi0QJZ4+RNPvGtTjyFv4bwRmsixRRwImg==", + "license": "MIT", + "dependencies": { + "@lexical/html": "0.35.0", + "@lexical/list": "0.35.0", + "@lexical/selection": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/code": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/code/-/code-0.35.0.tgz", + "integrity": "sha512-ox4DZwETQ9IA7+DS6PN8RJNwSAF7RMjL7YTVODIqFZ5tUFIf+5xoCHbz7Fll0Bvixlp12hVH90xnLwTLRGpkKw==", + "license": "MIT", + "dependencies": { + "@lexical/utils": "0.35.0", + "lexical": "0.35.0", + "prismjs": "^1.30.0" + } + }, + "node_modules/@lexical/devtools-core": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/devtools-core/-/devtools-core-0.35.0.tgz", + "integrity": "sha512-C2wwtsMCR6ZTfO0TqpSM17RLJWyfHmifAfCTjFtOJu15p3M6NO/nHYK5Mt7YMQteuS89mOjB4ng8iwoLEZ6QpQ==", + "license": "MIT", + "dependencies": { + "@lexical/html": "0.35.0", + "@lexical/link": "0.35.0", + "@lexical/mark": "0.35.0", + "@lexical/table": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + }, + "peerDependencies": { + "react": ">=17.x", + "react-dom": ">=17.x" + } + }, + "node_modules/@lexical/dragon": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/dragon/-/dragon-0.35.0.tgz", + "integrity": "sha512-SL6mT5pcqrt6hEbJ16vWxip5+r3uvMd0bQV5UUxuk+cxIeuP86iTgRh0HFR7SM2dRTYovL6/tM/O+8QLAUGTIg==", + "license": "MIT", + "dependencies": { + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/hashtag": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/hashtag/-/hashtag-0.35.0.tgz", + "integrity": "sha512-LYJWzXuO2ZjKsvQwrLkNZiS2TsjwYkKjlDgtugzejquTBQ/o/nfSn/MmVx6EkYLOYizaJemmZbz3IBh+u732FA==", + "license": "MIT", + "dependencies": { + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/history": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/history/-/history-0.35.0.tgz", + "integrity": "sha512-onjDRLLxGbCfHexSxxrQaDaieIHyV28zCDrbxR5dxTfW8F8PxjuNyuaG0z6o468AXYECmclxkP+P4aT6poHEpQ==", + "license": "MIT", + "dependencies": { + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/html": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/html/-/html-0.35.0.tgz", + "integrity": "sha512-rXGFE5S5rKsg3tVnr1s4iEgOfCApNXGpIFI3T2jGEShaCZ5HLaBY9NVBXnE9Nb49e9bkDkpZ8FZd1qokCbQXbw==", + "license": "MIT", + "dependencies": { + "@lexical/selection": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/link": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/link/-/link-0.35.0.tgz", + "integrity": "sha512-+0Wx6cBwO8TfdMzpkYFacsmgFh8X1rkiYbq3xoLvk3qV8upYxaMzK1s8Q1cpKmWyI0aZrU6z7fiK4vUqB7+69w==", + "license": "MIT", + "dependencies": { + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/list": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/list/-/list-0.35.0.tgz", + "integrity": "sha512-owsmc8iwgExBX8sFe8fKTiwJVhYULt9hD1RZ/HwfaiEtRZZkINijqReOBnW2mJfRxBzhFSWc4NG3ISB+fHYzqw==", + "license": "MIT", + "dependencies": { + "@lexical/selection": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/mark": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/mark/-/mark-0.35.0.tgz", + "integrity": "sha512-W0hwMTAVeexvpk9/+J6n1G/sNkpI/Meq1yeDazahFLLAwXLHtvhIAq2P/klgFknDy1hr8X7rcsQuN/bqKcKHYg==", + "license": "MIT", + "dependencies": { + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/markdown": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/markdown/-/markdown-0.35.0.tgz", + "integrity": "sha512-BlNyXZAt4gWidMw0SRWrhBETY1BpPglFBZI7yzfqukFqgXRh7HUQA28OYeI/nsx9pgNob8TiUduUwShqqvOdEA==", + "license": "MIT", + "dependencies": { + "@lexical/code": "0.35.0", + "@lexical/link": "0.35.0", + "@lexical/list": "0.35.0", + "@lexical/rich-text": "0.35.0", + "@lexical/text": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/offset": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/offset/-/offset-0.35.0.tgz", + "integrity": "sha512-DRE4Df6qYf2XiV6foh6KpGNmGAv2ANqt3oVXpyS6W8hTx3+cUuAA1APhCZmLNuU107um4zmHym7taCu6uXW5Yg==", + "license": "MIT", + "dependencies": { + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/overflow": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/overflow/-/overflow-0.35.0.tgz", + "integrity": "sha512-B25YvnJQTGlZcrNv7b0PJBLWq3tl8sql497OHfYYLem7EOMPKKDGJScJAKM/91D4H/mMAsx5gnA/XgKobriuTg==", + "license": "MIT", + "dependencies": { + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/plain-text": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/plain-text/-/plain-text-0.35.0.tgz", + "integrity": "sha512-lwBCUNMJf7Gujp2syVWMpKRahfbTv5Wq+H3HK1Q1gKH1P2IytPRxssCHvexw9iGwprSyghkKBlbF3fGpEdIJvQ==", + "license": "MIT", + "dependencies": { + "@lexical/clipboard": "0.35.0", + "@lexical/selection": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/react": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/react/-/react-0.35.0.tgz", + "integrity": "sha512-uYAZSqumH8tRymMef+A0f2hQvMwplKK9DXamcefnk3vSNDHHqRWQXpiUo6kD+rKWuQmMbVa5RW4xRQebXEW+1A==", + "license": "MIT", + "dependencies": { + "@floating-ui/react": "^0.27.8", + "@lexical/devtools-core": "0.35.0", + "@lexical/dragon": "0.35.0", + "@lexical/hashtag": "0.35.0", + "@lexical/history": "0.35.0", + "@lexical/link": "0.35.0", + "@lexical/list": "0.35.0", + "@lexical/mark": "0.35.0", + "@lexical/markdown": "0.35.0", + "@lexical/overflow": "0.35.0", + "@lexical/plain-text": "0.35.0", + "@lexical/rich-text": "0.35.0", + "@lexical/table": "0.35.0", + "@lexical/text": "0.35.0", + "@lexical/utils": "0.35.0", + "@lexical/yjs": "0.35.0", + "lexical": "0.35.0", + "react-error-boundary": "^3.1.4" + }, + "peerDependencies": { + "react": ">=17.x", + "react-dom": ">=17.x" + } + }, + "node_modules/@lexical/rich-text": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/rich-text/-/rich-text-0.35.0.tgz", + "integrity": "sha512-qEHu8g7vOEzz9GUz1VIUxZBndZRJPh9iJUFI+qTDHj+tQqnd5LCs+G9yz6jgNfiuWWpezTp0i1Vz/udNEuDPKQ==", + "license": "MIT", + "dependencies": { + "@lexical/clipboard": "0.35.0", + "@lexical/selection": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/selection": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/selection/-/selection-0.35.0.tgz", + "integrity": "sha512-mMtDE7Q0nycXdFTTH/+ta6EBrBwxBB4Tg8QwsGntzQ1Cq//d838dpXpFjJOqHEeVHUqXpiuj+cBG8+bvz/rPRw==", + "license": "MIT", + "dependencies": { + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/table": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/table/-/table-0.35.0.tgz", + "integrity": "sha512-9jlTlkVideBKwsEnEkqkdg7A3mije1SvmfiqoYnkl1kKJCLA5iH90ywx327PU0p+bdnURAytWUeZPXaEuEl2OA==", + "license": "MIT", + "dependencies": { + "@lexical/clipboard": "0.35.0", + "@lexical/utils": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/text": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/text/-/text-0.35.0.tgz", + "integrity": "sha512-uaMh46BkysV8hK8wQwp5g/ByZW+2hPDt8ahAErxtf8NuzQem1FHG/f5RTchmFqqUDVHO3qLNTv4AehEGmXv8MA==", + "license": "MIT", + "dependencies": { + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/utils": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/utils/-/utils-0.35.0.tgz", + "integrity": "sha512-2H393EYDnFznYCDFOW3MHiRzwEO5M/UBhtUjvTT+9kc+qhX4U3zc8ixQalo5UmZ5B2nh7L/inXdTFzvSRXtsRA==", + "license": "MIT", + "dependencies": { + "@lexical/list": "0.35.0", + "@lexical/selection": "0.35.0", + "@lexical/table": "0.35.0", + "lexical": "0.35.0" + } + }, + "node_modules/@lexical/yjs": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/@lexical/yjs/-/yjs-0.35.0.tgz", + "integrity": "sha512-3DSP7QpmTGYU9bN/yljP0PIao4tNIQtsR4ycauWNSawxs/GQCZtSmAPcLRnCm6qpqsDDjUtKjO/1Ej8FRp0m0w==", + "license": "MIT", + "dependencies": { + "@lexical/offset": "0.35.0", + "@lexical/selection": "0.35.0", + "lexical": "0.35.0" + }, + "peerDependencies": { + "yjs": ">=13.5.22" + } + }, + "node_modules/@lezer/common": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.5.1.tgz", + "integrity": "sha512-6YRVG9vBkaY7p1IVxL4s44n5nUnaNnGM2/AckNgYOnxTG2kWh1vR8BMxPseWPjRNpb5VtXnMpeYAEAADoRV1Iw==", + "license": "MIT" + }, + "node_modules/@lezer/cpp": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@lezer/cpp/-/cpp-1.1.5.tgz", + "integrity": "sha512-DIhSXmYtJKLehrjzDFN+2cPt547ySQ41nA8yqcDf/GxMc+YM736xqltFkvADL2M0VebU5I+3+4ks2Vv+Kyq3Aw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/css": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@lezer/css/-/css-1.3.0.tgz", + "integrity": "sha512-pBL7hup88KbI7hXnZV3PQsn43DHy6TWyzuyk2AO9UyoXcDltvIdqWKE1dLL/45JVZ+YZkHe1WVHqO6wugZZWcw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/go": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@lezer/go/-/go-1.0.1.tgz", + "integrity": "sha512-xToRsYxwsgJNHTgNdStpcvmbVuKxTapV0dM0wey1geMMRc9aggoVyKgzYp41D2/vVOx+Ii4hmE206kvxIXBVXQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/highlight": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.3.tgz", + "integrity": "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.3.0" + } + }, + "node_modules/@lezer/html": { + "version": "1.3.13", + "resolved": "https://registry.npmjs.org/@lezer/html/-/html-1.3.13.tgz", + "integrity": "sha512-oI7n6NJml729m7pjm9lvLvmXbdoMoi2f+1pwSDJkl9d68zGr7a9Btz8NdHTGQZtW2DA25ybeuv/SyDb9D5tseg==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/java": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@lezer/java/-/java-1.1.3.tgz", + "integrity": "sha512-yHquUfujwg6Yu4Fd1GNHCvidIvJwi/1Xu2DaKl/pfWIA2c1oXkVvawH3NyXhCaFx4OdlYBVX5wvz2f7Aoa/4Xw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/javascript": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@lezer/javascript/-/javascript-1.5.4.tgz", + "integrity": "sha512-vvYx3MhWqeZtGPwDStM2dwgljd5smolYD2lR2UyFcHfxbBQebqx8yjmFmxtJ/E6nN6u1D9srOiVWm3Rb4tmcUA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.1.3", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/json": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.3.tgz", + "integrity": "sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.8.tgz", + "integrity": "sha512-bPWa0Pgx69ylNlMlPvBPryqeLYQjyJjqPx+Aupm5zydLIF3NE+6MMLT8Yi23Bd9cif9VS00aUebn+6fDIGBcDA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/markdown": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.6.3.tgz", + "integrity": "sha512-jpGm5Ps+XErS+xA4urw7ogEGkeZOahVQF21Z6oECF0sj+2liwZopd2+I8uH5I/vZsRuuze3OxBREIANLf6KKUw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.5.0", + "@lezer/highlight": "^1.0.0" + } + }, + "node_modules/@lezer/php": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@lezer/php/-/php-1.0.5.tgz", + "integrity": "sha512-W7asp9DhM6q0W6DYNwIkLSKOvxlXRrif+UXBMxzsJUuqmhE7oVU+gS3THO4S/Puh7Xzgm858UNaFi6dxTP8dJA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.1.0" + } + }, + "node_modules/@lezer/python": { + "version": "1.1.18", + "resolved": "https://registry.npmjs.org/@lezer/python/-/python-1.1.18.tgz", + "integrity": "sha512-31FiUrU7z9+d/ElGQLJFXl+dKOdx0jALlP3KEOsGTex8mvj+SoE1FgItcHWK/axkxCHGUSpqIHt6JAWfWu9Rhg==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/rust": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@lezer/rust/-/rust-1.0.2.tgz", + "integrity": "sha512-Lz5sIPBdF2FUXcWeCu1//ojFAZqzTQNRga0aYv6dYXqJqPfMdCAI0NzajWUd4Xijj1IKJLtjoXRPMvTKWBcqKg==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/sass": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@lezer/sass/-/sass-1.1.0.tgz", + "integrity": "sha512-3mMGdCTUZ/84ArHOuXWQr37pnf7f+Nw9ycPUeKX+wu19b7pSMcZGLbaXwvD2APMBDOGxPmpK/O6S1v1EvLoqgQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/xml": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@lezer/xml/-/xml-1.0.6.tgz", + "integrity": "sha512-CdDwirL0OEaStFue/66ZmFSeppuL6Dwjlk8qk153mSQwiSH/Dlri4GNymrNWnUmPl2Um7QfV1FO9KFUyX3Twww==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/yaml": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@lezer/yaml/-/yaml-1.0.4.tgz", + "integrity": "sha512-2lrrHqxalACEbxIbsjhqGpSW8kWpUKuY6RHgnSAFZa6qK62wvnPxA8hGOwOoDbwHcOFs5M4o27mjGu+P7TvBmw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.4.0" + } + }, + "node_modules/@marijn/find-cluster-break": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", + "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", + "license": "MIT" + }, + "node_modules/@mdxeditor/editor": { + "version": "3.52.4", + "resolved": "https://registry.npmjs.org/@mdxeditor/editor/-/editor-3.52.4.tgz", + "integrity": "sha512-Tr/QKR7pVrle9xF3ZCsUESOlLY8UZ0N/8RZcyyRWvnuEvePi4EAcbthwnyDkVpnwGppkUxPNrFTAnL7Y0R1Hwg==", + "license": "MIT", + "dependencies": { + "@codemirror/commands": "^6.2.4", + "@codemirror/lang-markdown": "^6.2.3", + "@codemirror/language-data": "^6.5.1", + "@codemirror/merge": "^6.4.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.23.0", + "@codesandbox/sandpack-react": "^2.20.0", + "@lexical/clipboard": "^0.35.0", + "@lexical/link": "^0.35.0", + "@lexical/list": "^0.35.0", + "@lexical/markdown": "^0.35.0", + "@lexical/plain-text": "^0.35.0", + "@lexical/react": "^0.35.0", + "@lexical/rich-text": "^0.35.0", + "@lexical/selection": "^0.35.0", + "@lexical/utils": "^0.35.0", + "@mdxeditor/gurx": "^1.2.4", + "@radix-ui/colors": "^3.0.0", + "@radix-ui/react-dialog": "^1.1.11", + "@radix-ui/react-icons": "^1.3.2", + "@radix-ui/react-popover": "^1.1.11", + "@radix-ui/react-popper": "^1.2.4", + "@radix-ui/react-select": "^2.2.2", + "@radix-ui/react-toggle-group": "^1.1.7", + "@radix-ui/react-toolbar": "^1.1.7", + "@radix-ui/react-tooltip": "^1.2.4", + "classnames": "^2.3.2", + "cm6-theme-basic-light": "^0.2.0", + "codemirror": "^6.0.1", + "downshift": "^7.6.0", + "js-yaml": "4.1.1", + "lexical": "^0.35.0", + "mdast-util-directive": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-frontmatter": "^2.0.1", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-highlight-mark": "^1.2.2", + "mdast-util-mdx": "^3.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-to-markdown": "^2.1.0", + "micromark-extension-directive": "^3.0.0", + "micromark-extension-frontmatter": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.1", + "micromark-extension-highlight-mark": "^1.2.0", + "micromark-extension-mdx-jsx": "^3.0.0", + "micromark-extension-mdx-md": "^2.0.0", + "micromark-extension-mdxjs": "^3.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.1", + "micromark-util-symbol": "^2.0.0", + "react-hook-form": "^7.56.1", + "unidiff": "^1.0.2" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "react": ">= 18 || >= 19", + "react-dom": ">= 18 || >= 19" + } + }, + "node_modules/@mdxeditor/gurx": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@mdxeditor/gurx/-/gurx-1.2.4.tgz", + "integrity": "sha512-9ZykIFYhKaXaaSPCs1cuI+FvYDegJjbKwmA4ASE/zY+hJY6EYqvoye4esiO85CjhOw9aoD/izD/CU78/egVqmg==", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "react": ">= 18 || >= 19", + "react-dom": ">= 18 || >= 19" + } + }, "node_modules/@mermaid-js/parser": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.3.0.tgz", - "integrity": "sha512-HsvL6zgE5sUPGgkIDlmAWR1HTNHz2Iy11BAWPTa4Jjabkpguy4Ze2gzfLrg6pdRuBvFwgUYyxiaNqZwrEEXepA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.0.0.tgz", + "integrity": "sha512-vvK0Hi/VWndxoh03Mmz6wa1KDriSPjS2XMZL/1l19HFwygiObEEoEwSDxOqyLzzAI6J2PU3261JjTMTO7x+BPw==", "license": "MIT", "dependencies": { - "langium": "3.0.0" + "langium": "^4.0.0" } }, "node_modules/@microsoft/eslint-formatter-sarif": { @@ -1612,9 +2636,9 @@ } }, "node_modules/@mui/core-downloads-tracker": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.16.14.tgz", - "integrity": "sha512-sbjXW+BBSvmzn61XyTMun899E7nGPTXwqD9drm1jBUAvWEhJpPFIRxwQQiATWZnd9rvdxtnhhdsDxEGWI0jxqA==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-7.3.8.tgz", + "integrity": "sha512-s9UHZo7QJVly7gNArEZkbbsimHqJZhElgBpXIJdehZ4OWXt+CCr0SBDgUCDJnQrqpd1dWK2dLq5rmO4mCBmI3w==", "license": "MIT", "funding": { "type": "opencollective", @@ -1622,22 +2646,22 @@ } }, "node_modules/@mui/icons-material": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.16.14.tgz", - "integrity": "sha512-heL4S+EawrP61xMXBm59QH6HODsu0gxtZi5JtnXF2r+rghzyU/3Uftlt1ij8rmJh+cFdKTQug1L9KkZB5JgpMQ==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-7.3.8.tgz", + "integrity": "sha512-88sWg/UJc1X82OMO+ISR4E3P58I3BjFVg0qkmDu7OWlN8VijneZD3ylFA+ImxuPjMHW3SHosfSJYy1fztoz0fw==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.23.9" + "@babel/runtime": "^7.28.6" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/mui-org" }, "peerDependencies": { - "@mui/material": "^5.0.0", + "@mui/material": "^7.3.8", "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react": "^17.0.0 || ^18.0.0 || ^19.0.0" }, @@ -1648,26 +2672,26 @@ } }, "node_modules/@mui/material": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.16.14.tgz", - "integrity": "sha512-eSXQVCMKU2xc7EcTxe/X/rC9QsV2jUe8eLM3MUCPYbo6V52eCE436akRIvELq/AqZpxx2bwkq7HC0cRhLB+yaw==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/material/-/material-7.3.8.tgz", + "integrity": "sha512-QKd1RhDXE1hf2sQDNayA9ic9jGkEgvZOf0tTkJxlBPG8ns8aS4rS8WwYURw2x5y3739p0HauUXX9WbH7UufFLw==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/core-downloads-tracker": "^5.16.14", - "@mui/system": "^5.16.14", - "@mui/types": "^7.2.15", - "@mui/utils": "^5.16.14", + "@babel/runtime": "^7.28.6", + "@mui/core-downloads-tracker": "^7.3.8", + "@mui/system": "^7.3.8", + "@mui/types": "^7.4.11", + "@mui/utils": "^7.3.8", "@popperjs/core": "^2.11.8", - "@types/react-transition-group": "^4.4.10", - "clsx": "^2.1.0", - "csstype": "^3.1.3", + "@types/react-transition-group": "^4.4.12", + "clsx": "^2.1.1", + "csstype": "^3.2.3", "prop-types": "^15.8.1", - "react-is": "^19.0.0", + "react-is": "^19.2.3", "react-transition-group": "^4.4.5" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "funding": { "type": "opencollective", @@ -1676,6 +2700,7 @@ "peerDependencies": { "@emotion/react": "^11.5.0", "@emotion/styled": "^11.3.0", + "@mui/material-pigment-css": "^7.3.8", "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" @@ -1687,29 +2712,32 @@ "@emotion/styled": { "optional": true }, + "@mui/material-pigment-css": { + "optional": true + }, "@types/react": { "optional": true } } }, "node_modules/@mui/material/node_modules/react-is": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.0.0.tgz", - "integrity": "sha512-H91OHcwjZsbq3ClIDHMzBShc1rotbfACdWENsmEf0IFvZ3FgGPtdHMcsv45bQ1hAbgdfiA8SnxTKfDS+x/8m2g==", + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.2.4.tgz", + "integrity": "sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA==", "license": "MIT" }, "node_modules/@mui/private-theming": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.16.14.tgz", - "integrity": "sha512-12t7NKzvYi819IO5IapW2BcR33wP/KAVrU8d7gLhGHoAmhDxyXlRoKiRij3TOD8+uzk0B6R9wHUNKi4baJcRNg==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-7.3.8.tgz", + "integrity": "sha512-du5dlPZ9XL3xW2apHoGDXBI+QLtyVJGrXNCfcNYfP/ojkz1RQ0rRV6VG9Rkm1DqEFRG8mjjTL7zmE1Bvn1eR4A==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/utils": "^5.16.14", + "@babel/runtime": "^7.28.6", + "@mui/utils": "^7.3.8", "prop-types": "^15.8.1" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "funding": { "type": "opencollective", @@ -1726,18 +2754,20 @@ } }, "node_modules/@mui/styled-engine": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.16.14.tgz", - "integrity": "sha512-UAiMPZABZ7p8mUW4akDV6O7N3+4DatStpXMZwPlt+H/dA0lt67qawN021MNND+4QTpjaiMYxbhKZeQcyWCbuKw==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-7.3.8.tgz", + "integrity": "sha512-JHAeXQzS0tJ+Fq3C6J4TVDsW+yKhO4uuxuiLaopNStJeQYBIUCXpKYyUCcgXym4AmhbznQnv9RlHywSH6b0FOg==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.23.9", - "@emotion/cache": "^11.13.5", - "csstype": "^3.1.3", + "@babel/runtime": "^7.28.6", + "@emotion/cache": "^11.14.0", + "@emotion/serialize": "^1.3.3", + "@emotion/sheet": "^1.4.0", + "csstype": "^3.2.3", "prop-types": "^15.8.1" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "funding": { "type": "opencollective", @@ -1758,22 +2788,22 @@ } }, "node_modules/@mui/system": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.16.14.tgz", - "integrity": "sha512-KBxMwCb8mSIABnKvoGbvM33XHyT+sN0BzEBG+rsSc0lLQGzs7127KWkCA6/H8h6LZ00XpBEME5MAj8mZLiQ1tw==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/system/-/system-7.3.8.tgz", + "integrity": "sha512-hoFRj4Zw2Km8DPWZp/nKG+ao5Jw5LSk2m/e4EGc6M3RRwXKEkMSG4TgtfVJg7dS2homRwtdXSMW+iRO0ZJ4+IA==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/private-theming": "^5.16.14", - "@mui/styled-engine": "^5.16.14", - "@mui/types": "^7.2.15", - "@mui/utils": "^5.16.14", - "clsx": "^2.1.0", - "csstype": "^3.1.3", + "@babel/runtime": "^7.28.6", + "@mui/private-theming": "^7.3.8", + "@mui/styled-engine": "^7.3.8", + "@mui/types": "^7.4.11", + "@mui/utils": "^7.3.8", + "clsx": "^2.1.1", + "csstype": "^3.2.3", "prop-types": "^15.8.1" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "funding": { "type": "opencollective", @@ -1798,10 +2828,13 @@ } }, "node_modules/@mui/types": { - "version": "7.2.21", - "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.2.21.tgz", - "integrity": "sha512-6HstngiUxNqLU+/DPqlUJDIPbzUBxIVHb1MmXP0eTWDIROiCR2viugXpEif0PPe2mLqqakPzzRClWAnK+8UJww==", + "version": "7.4.11", + "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.4.11.tgz", + "integrity": "sha512-fZ2xO9D08IKOxO2oUBi1nnVKH6oJUD+64cnv4YAaFoC0E5+i1+S5AHbNqqvZlYYsbPEQ6qEVwuBqY3jl5W4G+Q==", "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.28.6" + }, "peerDependencies": { "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0" }, @@ -1812,20 +2845,20 @@ } }, "node_modules/@mui/utils": { - "version": "5.16.14", - "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.16.14.tgz", - "integrity": "sha512-wn1QZkRzSmeXD1IguBVvJJHV3s6rxJrfb6YuC9Kk6Noh9f8Fb54nUs5JRkKm+BOerRhj5fLg05Dhx/H3Ofb8Mg==", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-7.3.8.tgz", + "integrity": "sha512-kZRcE2620CBGr+XI8YMmwPj6WIPwSF7uMJjvSfqd8zXVvlz0MCJbzRRUGNf8NgflCLthdji2DdS643TeyJ3+nA==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/types": "^7.2.15", - "@types/prop-types": "^15.7.12", + "@babel/runtime": "^7.28.6", + "@mui/types": "^7.4.11", + "@types/prop-types": "^15.7.15", "clsx": "^2.1.1", "prop-types": "^15.8.1", - "react-is": "^19.0.0" + "react-is": "^19.2.3" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "funding": { "type": "opencollective", @@ -1842,9 +2875,9 @@ } }, "node_modules/@mui/utils/node_modules/react-is": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.0.0.tgz", - "integrity": "sha512-H91OHcwjZsbq3ClIDHMzBShc1rotbfACdWENsmEf0IFvZ3FgGPtdHMcsv45bQ1hAbgdfiA8SnxTKfDS+x/8m2g==", + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.2.4.tgz", + "integrity": "sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA==", "license": "MIT" }, "node_modules/@nodelib/fs.scandir": { @@ -1885,6 +2918,12 @@ "node": ">= 8" } }, + "node_modules/@open-draft/deferred-promise": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz", + "integrity": "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==", + "license": "MIT" + }, "node_modules/@popperjs/core": { "version": "2.11.8", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", @@ -1895,1675 +2934,3371 @@ "url": "https://opencollective.com/popperjs" } }, - "node_modules/@remirror/core-constants": { + "node_modules/@radix-ui/colors": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@remirror/core-constants/-/core-constants-3.0.0.tgz", - "integrity": "sha512-42aWfPrimMfDKDi4YegyS7x+/0tlzaqwPQCULLanv3DMIlu96KTJR0fM5isWX2UViOqlGnX6YFgqWepcX+XMNg==", + "resolved": "https://registry.npmjs.org/@radix-ui/colors/-/colors-3.0.0.tgz", + "integrity": "sha512-FUOsGBkHrYJwCSEtWRCIfQbZG7q1e6DgxCIOe1SUQzDe/7rXXeA47s8yCn6fuTNQAj1Zq4oTFi9Yjp3wzElcxg==", "license": "MIT" }, - "node_modules/@remix-run/router": { - "version": "1.21.0", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.21.0.tgz", - "integrity": "sha512-xfSkCAchbdG5PnbrKqFWwia4Bi61nH+wm8wLEqfHDyp7Y3dZzgqS2itV8i4gAq9pC2HsTpwyBC6Ds8VHZ96JlA==", - "license": "MIT", - "engines": { - "node": ">=14.0.0" + "node_modules/@radix-ui/number": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", + "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==", + "license": "MIT" + }, + "node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", + "license": "MIT" + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", + "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", + "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", + "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", + "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", + "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-icons": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-icons/-/react-icons-1.3.2.tgz", + "integrity": "sha512-fyQIhGDhzfc9pK2kH6Pl9c4BDJGfMkPqkyIgYDthyNYoNg3wVhoJMMh19WS4Up/1KMPFVpNsT2q3WmXn2N1m6g==", + "license": "MIT", + "peerDependencies": { + "react": "^16.x || ^17.x || ^18.x || ^19.0.0 || ^19.0.0-rc" + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz", + "integrity": "sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", + "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", + "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", + "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.2.6.tgz", + "integrity": "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.7.tgz", + "integrity": "sha512-0HEb8R9E8A+jZjvmFCy/J4xhbXy3TV+9XSnGJ3KvTtjlIUy/YQ/p6UYZvi7YbeoeXdyU9+Y3scizK6hkY37baA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toggle": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle/-/react-toggle-1.1.10.tgz", + "integrity": "sha512-lS1odchhFTeZv3xwHH31YPObmJn8gOg7Lq12inrr0+BH/l3Tsq32VfjqH1oh80ARM3mlkfMic15n0kg4sD1poQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toggle-group": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle-group/-/react-toggle-group-1.1.11.tgz", + "integrity": "sha512-5umnS0T8JQzQT6HbPyO7Hh9dgd82NmS36DQr+X/YJ9ctFNCiiQd6IJAYYZ33LUwm8M+taCz5t2ui29fHZc4Y6Q==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-toggle": "1.1.10", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toolbar": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toolbar/-/react-toolbar-1.1.11.tgz", + "integrity": "sha512-4ol06/1bLoFu1nwUqzdD4Y5RZ9oDdKeiHIsntug54Hcr1pgaHiPqHFEaXI1IFP/EsOfROQZ8Mig9VTIRza6Tjg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-separator": "1.1.7", + "@radix-ui/react-toggle-group": "1.1.11" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz", + "integrity": "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", + "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-effect-event": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", + "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", + "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-previous": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", + "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", + "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", + "license": "MIT", + "dependencies": { + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", + "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", + "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", + "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", + "license": "MIT" + }, + "node_modules/@react-hook/intersection-observer": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@react-hook/intersection-observer/-/intersection-observer-3.1.2.tgz", + "integrity": "sha512-mWU3BMkmmzyYMSuhO9wu3eJVP21N8TcgYm9bZnTrMwuM818bEk+0NRM3hP+c/TqA9Ln5C7qE53p1H0QMtzYdvQ==", + "license": "MIT", + "dependencies": { + "@react-hook/passive-layout-effect": "^1.2.0", + "intersection-observer": "^0.10.0" + }, + "peerDependencies": { + "react": ">=16.8" + } + }, + "node_modules/@react-hook/passive-layout-effect": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@react-hook/passive-layout-effect/-/passive-layout-effect-1.2.1.tgz", + "integrity": "sha512-IwEphTD75liO8g+6taS+4oqz+nnroocNfWVHWz7j+N+ZO2vYrc6PV1q7GQhuahL0IOR7JccFTsFKQ/mb6iZWAg==", + "license": "MIT", + "peerDependencies": { + "react": ">=16.8" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.3.tgz", + "integrity": "sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", + "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", + "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz", + "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", + "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", + "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", + "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", + "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", + "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", + "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", + "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", + "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", + "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", + "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", + "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", + "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", + "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", + "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", + "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", + "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", + "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", + "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", + "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", + "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", + "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", + "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@stitches/core": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@stitches/core/-/core-1.2.8.tgz", + "integrity": "sha512-Gfkvwk9o9kE9r9XNBmJRfV8zONvXThnm1tcuojL04Uy5uRyqg93DC83lDebl0rocZCfKSjUv+fWYtMQmEDJldg==", + "license": "MIT" + }, + "node_modules/@tanstack/query-core": { + "version": "5.90.20", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.20.tgz", + "integrity": "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.90.20", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.20.tgz", + "integrity": "sha512-vXBxa+qeyveVO7OA0jX1z+DeyCA4JKnThKv411jd5SORpBKgkcVnYKCiBgECvADvniBX7tobwBmg01qq9JmMJw==", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.90.20" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, + "node_modules/@textea/json-viewer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@textea/json-viewer/-/json-viewer-4.0.1.tgz", + "integrity": "sha512-7FH4ti3kVNyKhe/gl85w+8KllXJY9XQalY2KZnBn9ST1CjhqZQCWJLkYf24aX2FOv2D/8cvAllYkFX46A7C9KQ==", + "license": "MIT", + "dependencies": { + "clsx": "^2.1.1", + "copy-to-clipboard": "^3.3.3", + "zustand": "^4.5.5" + }, + "peerDependencies": { + "@emotion/react": "^11", + "@emotion/styled": "^11", + "@mui/material": "^6", + "react": "^17 || ^18", + "react-dom": "^17 || ^18" + } + }, + "node_modules/@trivago/prettier-plugin-sort-imports": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@trivago/prettier-plugin-sort-imports/-/prettier-plugin-sort-imports-6.0.2.tgz", + "integrity": "sha512-3DgfkukFyC/sE/VuYjaUUWoFfuVjPK55vOFDsxD56XXynFMCZDYFogH2l/hDfOsQAm1myoU/1xByJ3tWqtulXA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@babel/generator": "^7.28.0", + "@babel/parser": "^7.28.0", + "@babel/traverse": "^7.28.0", + "@babel/types": "^7.28.0", + "javascript-natural-sort": "^0.7.1", + "lodash-es": "^4.17.21", + "minimatch": "^9.0.0", + "parse-imports-exports": "^0.2.4" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@vue/compiler-sfc": "3.x", + "prettier": "2.x - 3.x", + "prettier-plugin-ember-template-tag": ">= 2.0.0", + "prettier-plugin-svelte": "3.x", + "svelte": "4.x || 5.x" + }, + "peerDependenciesMeta": { + "@vue/compiler-sfc": { + "optional": true + }, + "prettier-plugin-ember-template-tag": { + "optional": true + }, + "prettier-plugin-svelte": { + "optional": true + }, + "svelte": { + "optional": true + } + } + }, + "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/d3": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", + "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", + "license": "MIT", + "dependencies": { + "@types/d3-array": "*", + "@types/d3-axis": "*", + "@types/d3-brush": "*", + "@types/d3-chord": "*", + "@types/d3-color": "*", + "@types/d3-contour": "*", + "@types/d3-delaunay": "*", + "@types/d3-dispatch": "*", + "@types/d3-drag": "*", + "@types/d3-dsv": "*", + "@types/d3-ease": "*", + "@types/d3-fetch": "*", + "@types/d3-force": "*", + "@types/d3-format": "*", + "@types/d3-geo": "*", + "@types/d3-hierarchy": "*", + "@types/d3-interpolate": "*", + "@types/d3-path": "*", + "@types/d3-polygon": "*", + "@types/d3-quadtree": "*", + "@types/d3-random": "*", + "@types/d3-scale": "*", + "@types/d3-scale-chromatic": "*", + "@types/d3-selection": "*", + "@types/d3-shape": "*", + "@types/d3-time": "*", + "@types/d3-time-format": "*", + "@types/d3-timer": "*", + "@types/d3-transition": "*", + "@types/d3-zoom": "*" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", + "license": "MIT" + }, + "node_modules/@types/d3-axis": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", + "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-brush": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", + "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" } }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.30.1.tgz", - "integrity": "sha512-pSWY+EVt3rJ9fQ3IqlrEUtXh3cGqGtPDH1FQlNZehO2yYxCHEX1SPsz1M//NXwYfbTlcKr9WObLnJX9FsS9K1Q==", - "cpu": [ - "arm" - ], - "dev": true, + "node_modules/@types/d3-chord": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", + "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-contour": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", + "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", "license": "MIT", - "optional": true, - "os": [ - "android" - ] + "dependencies": { + "@types/d3-array": "*", + "@types/geojson": "*" + } }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.30.1.tgz", - "integrity": "sha512-/NA2qXxE3D/BRjOJM8wQblmArQq1YoBVJjrjoTSBS09jgUisq7bqxNHJ8kjCHeV21W/9WDGwJEWSN0KQ2mtD/w==", - "cpu": [ - "arm64" - ], - "dev": true, + "node_modules/@types/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", + "license": "MIT" + }, + "node_modules/@types/d3-dispatch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz", + "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==", + "license": "MIT" + }, + "node_modules/@types/d3-drag": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", + "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", "license": "MIT", - "optional": true, - "os": [ - "android" - ] + "dependencies": { + "@types/d3-selection": "*" + } }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.30.1.tgz", - "integrity": "sha512-r7FQIXD7gB0WJ5mokTUgUWPl0eYIH0wnxqeSAhuIwvnnpjdVB8cRRClyKLQr7lgzjctkbp5KmswWszlwYln03Q==", - "cpu": [ - "arm64" - ], + "node_modules/@types/d3-dsv": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", + "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-fetch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", + "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", + "license": "MIT", + "dependencies": { + "@types/d3-dsv": "*" + } + }, + "node_modules/@types/d3-force": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", + "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", + "license": "MIT" + }, + "node_modules/@types/d3-format": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", + "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", + "license": "MIT" + }, + "node_modules/@types/d3-geo": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", + "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, + "node_modules/@types/d3-hierarchy": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", + "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-polygon": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", + "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", + "license": "MIT" + }, + "node_modules/@types/d3-quadtree": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", + "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", + "license": "MIT" + }, + "node_modules/@types/d3-random": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", + "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", + "license": "MIT" + }, + "node_modules/@types/d3-selection": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", + "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", + "license": "MIT" + }, + "node_modules/@types/d3-shape": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", + "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-time-format": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", + "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/d3-transition": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", + "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-zoom": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", + "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", + "license": "MIT", + "dependencies": { + "@types/d3-interpolate": "*", + "@types/d3-selection": "*" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "license": "MIT" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", + "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", + "license": "MIT" + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/humanize-duration": { + "version": "3.27.4", + "resolved": "https://registry.npmjs.org/@types/humanize-duration/-/humanize-duration-3.27.4.tgz", + "integrity": "sha512-yaf7kan2Sq0goxpbcwTQ+8E9RP6HutFBPv74T/IA/ojcHKhuKVlk2YFYyHhWZeLvZPzzLE3aatuQB4h0iqyyUA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", "dev": true, + "license": "MIT" + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] + "dependencies": { + "@types/unist": "*" + } }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.30.1.tgz", - "integrity": "sha512-x78BavIwSH6sqfP2xeI1hd1GpHL8J4W2BXcVM/5KYKoAD3nNsfitQhvWSw+TFtQTLZ9OmlF+FEInEHyubut2OA==", - "cpu": [ - "x64" - ], + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.10.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.13.tgz", + "integrity": "sha512-oH72nZRfDv9lADUBSo104Aq7gPHpQZc4BTx38r9xf9pg5LfP6EzSyH2n7qFmmxRQXh7YlUXODcYsg6PuTDSxGg==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/parse-json": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", + "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", + "license": "MIT" }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.30.1.tgz", - "integrity": "sha512-HYTlUAjbO1z8ywxsDFWADfTRfTIIy/oUlfIDmlHYmjUP2QRDTzBuWXc9O4CXM+bo9qfiCclmHk1x4ogBjOUpUQ==", - "cpu": [ - "arm64" - ], + "node_modules/@types/prismjs": { + "version": "1.26.6", + "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.6.tgz", + "integrity": "sha512-vqlvI7qlMvcCBbVe0AKAb4f97//Hy0EBTaiW8AalRnG/xAN5zOiWWyrNqNXeq8+KAuvRewjCVY1+IPxk4RdNYw==", + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "19.2.0", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.0.tgz", + "integrity": "sha512-1LOH8xovvsKsCBq1wnT4ntDUdCJKmnEakhsuoUSy6ExlHCkGP2hqnatagYTgFk6oeL0VU31u7SNjunPN+GchtA==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] + "dependencies": { + "csstype": "^3.0.2" + } }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.30.1.tgz", - "integrity": "sha512-1MEdGqogQLccphhX5myCJqeGNYTNcmTyaic9S7CG3JhwuIByJ7J05vGbZxsizQthP1xpVx7kd3o31eOogfEirw==", - "cpu": [ - "x64" - ], + "node_modules/@types/react-dom": { + "version": "19.2.0", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.0.tgz", + "integrity": "sha512-brtBs0MnE9SMx7px208g39lRmC5uHZs96caOJfTjFcYSLHNamvaSMfJNagChVNkup2SdtOxKX1FDBkRSJe1ZAg==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] + "peerDependencies": { + "@types/react": "^19.2.0" + } }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.30.1.tgz", - "integrity": "sha512-PaMRNBSqCx7K3Wc9QZkFx5+CX27WFpAMxJNiYGAXfmMIKC7jstlr32UhTgK6T07OtqR+wYlWm9IxzennjnvdJg==", - "cpu": [ - "arm" - ], + "node_modules/@types/react-syntax-highlighter": { + "version": "15.5.13", + "resolved": "https://registry.npmjs.org/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.13.tgz", + "integrity": "sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@types/react": "*" + } }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.30.1.tgz", - "integrity": "sha512-B8Rcyj9AV7ZlEFqvB5BubG5iO6ANDsRKlhIxySXcF1axXYUyqwBok+XZPgIYGBgs7LDXfWfifxhw0Ik57T0Yug==", - "cpu": [ - "arm" - ], - "dev": true, + "node_modules/@types/react-transition-group": { + "version": "4.4.12", + "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.12.tgz", + "integrity": "sha512-8TV6R3h2j7a91c+1DXdJi3Syo69zzIZbz7Lg5tORM5LEJG7X/E6a1V3drRyBRZq7/utz7A+c4OgYLiLcYGHG6w==", "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "peerDependencies": { + "@types/react": "*" + } }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.30.1.tgz", - "integrity": "sha512-hqVyueGxAj3cBKrAI4aFHLV+h0Lv5VgWZs9CUGqr1z0fZtlADVV1YPOij6AhcK5An33EXaxnDLmJdQikcn5NEw==", - "cpu": [ - "arm64" - ], - "dev": true, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "optional": true }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.30.1.tgz", - "integrity": "sha512-i4Ab2vnvS1AE1PyOIGp2kXni69gU2DAUVt6FSXeIqUCPIR3ZlheMW3oP2JkukDfu3PsexYRbOiJrY+yVNSk9oA==", - "cpu": [ - "arm64" - ], + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz", + "integrity": "sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@eslint-community/regexpp": "^4.12.2", + "@typescript-eslint/scope-manager": "8.56.0", + "@typescript-eslint/type-utils": "8.56.0", + "@typescript-eslint/utils": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0", + "ignore": "^7.0.5", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.56.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.30.1.tgz", - "integrity": "sha512-fARcF5g296snX0oLGkVxPmysetwUk2zmHcca+e9ObOovBR++9ZPOhqFUM61UUZ2EYpXVPN1redgqVoBB34nTpQ==", - "cpu": [ - "loong64" - ], + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "engines": { + "node": ">= 4" + } }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.30.1.tgz", - "integrity": "sha512-GLrZraoO3wVT4uFXh67ElpwQY0DIygxdv0BNW9Hkm3X34wu+BkqrDrkcsIapAY+N2ATEbvak0XQ9gxZtCIA5Rw==", - "cpu": [ - "ppc64" - ], + "node_modules/@typescript-eslint/parser": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.0.tgz", + "integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@typescript-eslint/scope-manager": "8.56.0", + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0", + "debug": "^4.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.30.1.tgz", - "integrity": "sha512-0WKLaAUUHKBtll0wvOmh6yh3S0wSU9+yas923JIChfxOaaBarmb/lBKPF0w/+jTVozFnOXJeRGZ8NvOxvk/jcw==", - "cpu": [ - "riscv64" - ], + "node_modules/@typescript-eslint/project-service": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.0.tgz", + "integrity": "sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.56.0", + "@typescript-eslint/types": "^8.56.0", + "debug": "^4.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.30.1.tgz", - "integrity": "sha512-GWFs97Ruxo5Bt+cvVTQkOJ6TIx0xJDD/bMAOXWJg8TCSTEK8RnFeOeiFTxKniTc4vMIaWvCplMAFBt9miGxgkA==", - "cpu": [ - "s390x" - ], + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.0.tgz", + "integrity": "sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.30.1.tgz", - "integrity": "sha512-UtgGb7QGgXDIO+tqqJ5oZRGHsDLO8SlpE4MhqpY9Llpzi5rJMvrK6ZGhsRCST2abZdBqIBeXW6WPD5fGK5SDwg==", - "cpu": [ - "x64" - ], + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.0.tgz", + "integrity": "sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.30.1.tgz", - "integrity": "sha512-V9U8Ey2UqmQsBT+xTOeMzPzwDzyXmnAoO4edZhL7INkwQcaW1Ckv3WJX3qrrp/VHaDkEWIBWhRwP47r8cdrOow==", - "cpu": [ - "x64" - ], + "node_modules/@typescript-eslint/type-utils": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.0.tgz", + "integrity": "sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0", + "@typescript-eslint/utils": "8.56.0", + "debug": "^4.4.3", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.30.1.tgz", - "integrity": "sha512-WabtHWiPaFF47W3PkHnjbmWawnX/aE57K47ZDT1BXTS5GgrBUEpvOzq0FI0V/UYzQJgdb8XlhVNH8/fwV8xDjw==", - "cpu": [ - "arm64" - ], + "node_modules/@typescript-eslint/types": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz", + "integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.30.1.tgz", - "integrity": "sha512-pxHAU+Zv39hLUTdQQHUVHf4P+0C47y/ZloorHpzs2SXMRqeAWmGghzAhfOlzFHHwjvgokdFAhC4V+6kC1lRRfw==", - "cpu": [ - "ia32" - ], + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.0.tgz", + "integrity": "sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "dependencies": { + "@typescript-eslint/project-service": "8.56.0", + "@typescript-eslint/tsconfig-utils": "8.56.0", + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0", + "debug": "^4.4.3", + "minimatch": "^9.0.5", + "semver": "^7.7.3", + "tinyglobby": "^0.2.15", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.30.1.tgz", - "integrity": "sha512-D6qjsXGcvhTjv0kI4fU8tUuBDF/Ueee4SVX79VfNDXZa64TfCW1Slkb6Z7O1p7vflqZjcmOVdZlqf8gvJxc6og==", - "cpu": [ - "x64" - ], + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "dependencies": { + "balanced-match": "^1.0.0" + } }, - "node_modules/@tanstack/query-core": { - "version": "5.64.1", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.64.1.tgz", - "integrity": "sha512-978Wx4Wl4UJZbmvU/rkaM9cQtXXrbhK0lsz/UZhYIbyKYA8E4LdomTwyh2GHZ4oU0BKKoDH4YlKk2VscCUgNmg==", - "license": "MIT", + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@tanstack/react-query": { - "version": "5.64.1", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.64.1.tgz", - "integrity": "sha512-vW5ggHpIO2Yjj44b4sB+Fd3cdnlMJppXRBJkEHvld6FXh3j5dwWJoQo7mGtKI2RbSFyiyu/PhGAy0+Vv5ev9Eg==", + "node_modules/@typescript-eslint/utils": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.0.tgz", + "integrity": "sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==", + "dev": true, "license": "MIT", "dependencies": { - "@tanstack/query-core": "5.64.1" + "@eslint-community/eslint-utils": "^4.9.1", + "@typescript-eslint/scope-manager": "8.56.0", + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "react": "^18 || ^19" + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, - "node_modules/@textea/json-viewer": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/@textea/json-viewer/-/json-viewer-3.5.0.tgz", - "integrity": "sha512-codh4YXkWPtMjucpn1krGxyJLQA2QhpfM0y3Sur7D/mONOnESoI5ZLmX3ZFo9heXPndDQgzCHsjpErvkN5+hxw==", + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.0.tgz", + "integrity": "sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==", + "dev": true, "license": "MIT", "dependencies": { - "clsx": "^2.1.1", - "copy-to-clipboard": "^3.3.3", - "zustand": "^4.5.5" + "@typescript-eslint/types": "8.56.0", + "eslint-visitor-keys": "^5.0.0" }, - "peerDependencies": { - "@emotion/react": "^11", - "@emotion/styled": "^11", - "@mui/material": "^5", - "react": "^17 || ^18", - "react-dom": "^17 || ^18" - } - }, - "node_modules/@tiptap/core": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/core/-/core-2.11.2.tgz", - "integrity": "sha512-Z437c/sQg31yrRVgLJVkQuih+7Og5tjRx6FE/zE47QgEayqQ9yXH0LrTAbPiY6IfY1X+f2A0h3e5Y/WGD6rC3Q==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, - "peerDependencies": { - "@tiptap/pm": "^2.7.0" - } - }, - "node_modules/@tiptap/extension-blockquote": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-blockquote/-/extension-blockquote-2.11.2.tgz", - "integrity": "sha512-5XeU1o5UfjMCFX3AwgeErwDKlpUr5YPhta2tQqNsQUQ7QvumIdK/3apNT15/d8pySAjdAphDWEd/CZ2di5hq6A==", - "license": "MIT", "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@tiptap/extension-bold": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-bold/-/extension-bold-2.11.2.tgz", - "integrity": "sha512-pSls6UnKiPMm2c0m1viuZ0aFexxUmTRm17vDA2Gy5PhRm5qSsnHlSxyEuEcKNOi/rIx+oJehvG1oO4uI+kmCKg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.0.tgz", + "integrity": "sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/@tiptap/extension-bubble-menu": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-bubble-menu/-/extension-bubble-menu-2.11.2.tgz", - "integrity": "sha512-G+m7JLhe6SGcDugm8q3RXVLVnCm4t67FGNlOLRzq25VNgD7FDNwjgISp04W+qcJa0+Z5cbQt/4naUji5QEH97A==", + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@vitejs/plugin-react": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.4.tgz", + "integrity": "sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==", + "dev": true, "license": "MIT", "dependencies": { - "tippy.js": "^6.3.7" + "@babel/core": "^7.29.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-rc.3", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.18.0" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "engines": { + "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, - "node_modules/@tiptap/extension-bullet-list": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-bullet-list/-/extension-bullet-list-2.11.2.tgz", - "integrity": "sha512-zqZYT7lmmivEDEO+6w5bl5kV3UP1L2dw5mksyMGtxpvoDgbFHZ85+ron6SeHee8C7vJc6aIptc1p6NxIS5/l0A==", + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "bin": { + "acorn": "bin/acorn" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">=0.4.0" } }, - "node_modules/@tiptap/extension-code": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-code/-/extension-code-2.11.2.tgz", - "integrity": "sha512-G8vvb17QAYQij3haz9RoDvArK1LSOZHqGzQ2dJ3/d0W5oqOyUrTnseN66fRZjWhBT3pns0VL2erwe/NBIqLOIw==", + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, "peerDependencies": { - "@tiptap/core": "^2.7.0" + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/@tiptap/extension-code-block": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-code-block/-/extension-code-block-2.11.2.tgz", - "integrity": "sha512-O6gVfql3uFZNq9yaUDa98VgV58BqaUSeOUnhZwLzpB/4VlqzTyW6/kvFxhKcSp7f+GmrMQaV4PXRs+tZcq6EFw==", + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, "funding": { "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/@tiptap/extension-color": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-color/-/extension-color-2.11.2.tgz", - "integrity": "sha512-KUfASlEXrZeioBeSnpQWmCIiWtVaHDXJ+ZD0giSGSTQ4JwqdsIYZd8HpUrQjsfTkrqxW13zxQ1VklfvSPM8PQA==", + "node_modules/anser": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/anser/-/anser-2.3.5.tgz", + "integrity": "sha512-vcZjxvvVoxTeR5XBNJB38oTu/7eDCZlwdz32N1eNgpyPF7j/Z7Idf+CUwQOkKKpJ7RJyjxgLHCM7vdIK0iCNMQ==", + "license": "MIT" + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/extension-text-style": "^2.7.0" + "engines": { + "node": ">=8" } }, - "node_modules/@tiptap/extension-document": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-document/-/extension-document-2.11.2.tgz", - "integrity": "sha512-/EZhIAN1x7DYgGM0xv7y7wo5ceBmHb0+rOIPuBerVFeTn+VcC3tST/Q64bdvcxgNe2E59Ti0CUdYEA51wc2u5Q==", + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "color-convert": "^2.0.1" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@tiptap/extension-dropcursor": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-dropcursor/-/extension-dropcursor-2.11.2.tgz", - "integrity": "sha512-HbXC9cMVZUY3kyKwbDtVH452CY1qlyLbIvTaN0+dxkFgcVeQZZtfIxU7DwMmqCDmDnsh0CdDqUgUvcXS2UQTwA==", + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, + "node_modules/aria-hidden": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", + "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "tslib": "^2.0.0" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "engines": { + "node": ">=10" } }, - "node_modules/@tiptap/extension-floating-menu": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-floating-menu/-/extension-floating-menu-2.11.2.tgz", - "integrity": "sha512-DoFGgguE24rxPkZTD7sH3GFi9E3JKQGeGw0sFTwXx1ZFnyCtqbLcPOfT4THlvUEcixt68Mk48M1NTFVOGn/dyA==", + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, "license": "MIT", "dependencies": { - "tippy.js": "^6.3.7" + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "engines": { + "node": ">= 0.4" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/extension-gapcursor": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-gapcursor/-/extension-gapcursor-2.11.2.tgz", - "integrity": "sha512-ssJOrcc8dzlo5/Qq3+EixASDHTj3mqCyAv7Ohed1QYEYr+TsSpsTbjR0eMLjWHlgbt24TXL2Wr0ldjYCU8T1ZA==", + "node_modules/array-includes": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", + "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", + "dev": true, "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.0", + "es-object-atoms": "^1.1.1", + "get-intrinsic": "^1.3.0", + "is-string": "^1.1.1", + "math-intrinsics": "^1.1.0" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/extension-hard-break": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-hard-break/-/extension-hard-break-2.11.2.tgz", - "integrity": "sha512-FNcXemfuwkiP4drZ9m90BC6GD4nyikfYHYEUyYuVd74Mm6w5vXpueWXus3mUcdT78xTs1XpQVibDorilLu7X8w==", + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/extension-heading": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-heading/-/extension-heading-2.11.2.tgz", - "integrity": "sha512-y/wAEXYB0a8y5WmSYGCIXAhus1ydudn0pokKIzT/OD00XutAVh14qOB5h/+m8iXwGU/UYMP7SUCtK82txZqwKA==", + "node_modules/array.prototype.flat": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/extension-highlight": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-highlight/-/extension-highlight-2.11.2.tgz", - "integrity": "sha512-ztq2lGthTIY/zPYtdYrG7+0dc4R4abkZqDVAmLxkFcwjs/mejq7nMG7WM2Unn2cIGo96m8Ibz/UtoOPJDt/+/Q==", + "node_modules/array.prototype.flatmap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-shim-unscopables": "^1.0.2" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">= 0.4" } }, - "node_modules/@tiptap/extension-history": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-history/-/extension-history-2.11.2.tgz", - "integrity": "sha512-BamS6YjKsETgP7msmm0oIpqmNSLJWbivm3XurR3uSUqJZYrQo1Fv+No4HAR7eAACxoOnYGcDmYsrombRVs9lxw==", + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/extension-horizontal-rule": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-horizontal-rule/-/extension-horizontal-rule-2.11.2.tgz", - "integrity": "sha512-R7MkTQzxkBy0bXJfq6L+6ax01/hmTEUvPPoyjwDSfU1Ktc1ihBJGUdTNtohT1KoQGQYt2d9khBohVspsXoCmFw==", + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" + "engines": { + "node": ">= 0.4" } }, - "node_modules/@tiptap/extension-image": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-image/-/extension-image-2.11.2.tgz", - "integrity": "sha512-Ag+Arj6sclTqhvR+v5I2UD5e2lsWTcXLj0aS2aEsfGpytltk6rcLj6iDjx/SmJrE1BN8ognJsdzmFdZF/rNLpg==", + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/attr-accept": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.5.tgz", + "integrity": "sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">=4" } }, - "node_modules/@tiptap/extension-italic": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-italic/-/extension-italic-2.11.2.tgz", - "integrity": "sha512-652oTa+iDiR7sMtmePSy+303HSNJxvxmV/6IvQoMdffJU0oPiWcWnCCL0qrWgtHh15dplj36EtB/znENWbvVOw==", + "node_modules/autosuggest-highlight": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/autosuggest-highlight/-/autosuggest-highlight-3.3.4.tgz", + "integrity": "sha512-j6RETBD2xYnrVcoV1S5R4t3WxOlWZKyDQjkwnggDPSjF5L4jV98ZltBpvPvbkM1HtoSe5o+bNrTHyjPbieGeYA==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "dependencies": { + "remove-accents": "^0.4.2" } }, - "node_modules/@tiptap/extension-link": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-link/-/extension-link-2.11.2.tgz", - "integrity": "sha512-Mbre+JotLMUg9jdWWrwIReiRVMkA2kMzmtD2Aqy/n5P+wuI84898qIZSkhPEzDOGzp0mluUO/iGsz0NdTto/JQ==", + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, "license": "MIT", "dependencies": { - "linkifyjs": "^4.2.0" + "possible-typed-array-names": "^1.0.0" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "engines": { + "node": ">= 0.4" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" - } - }, - "node_modules/@tiptap/extension-list-item": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-list-item/-/extension-list-item-2.11.2.tgz", - "integrity": "sha512-cxysDCvw45bem53qLZtTCkle1pttO4Y/FGqYm1hl66ol3cZsuLbjpOb4aDB6wRhyd701Ws6MjOYM+cZsmtTNpw==", - "license": "MIT", "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/extension-ordered-list": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-ordered-list/-/extension-ordered-list-2.11.2.tgz", - "integrity": "sha512-TR8OqwKkQ0OCp40V9hcRJUcO1PSzCYWXy0mvW351lOYO8D6uE+1ouVkEV9qjXBC30sVCnQykSp/FR9UjsIuiVw==", + "node_modules/axios": { + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", + "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "dependencies": { + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", + "proxy-from-env": "^1.1.0" } }, - "node_modules/@tiptap/extension-paragraph": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-paragraph/-/extension-paragraph-2.11.2.tgz", - "integrity": "sha512-iydTjeZbPJuqctOaAx7QebLPvz9J/hBxPptuhe4GZmqInknAk7+SFJagYeGNb14wfXKOvDZ9DMqv6mBiqSA90Q==", + "node_modules/babel-plugin-macros": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", + "integrity": "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "@babel/runtime": "^7.12.5", + "cosmiconfig": "^7.0.0", + "resolve": "^1.19.0" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "engines": { + "node": ">=10", + "npm": ">=6" } }, - "node_modules/@tiptap/extension-placeholder": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-placeholder/-/extension-placeholder-2.11.2.tgz", - "integrity": "sha512-7rv6nylqX57Q+K+AH794Kg9U7OrLyujhXXqQvd9iZdBP7bTCNUlFu0cGlIyHdM/eWJjoUblZs0VLV2IApk4xjQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0" - } + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" }, - "node_modules/@tiptap/extension-strike": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-strike/-/extension-strike-2.11.2.tgz", - "integrity": "sha512-n/rznmhqFlENGSlFY9t3pWnWzSmvDpUj3sjVhdpYteis+OCzabN9+c5KdQTBPMjtwRuRleQiKWnHmxvif0heEg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz", + "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" } }, - "node_modules/@tiptap/extension-text": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-text/-/extension-text-2.11.2.tgz", - "integrity": "sha512-fJZeKYM5jeJ7NpS3FWLnC/NAvg+mZNbcTaRgXMo5ljBCgiMcYHhYg9p/RHk4SeICZBBpR9WSSZXHMACd9CbJiA==", + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" } }, - "node_modules/@tiptap/extension-text-align": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-text-align/-/extension-text-align-2.11.2.tgz", - "integrity": "sha512-0QfjepmK+iHubjTsJshuhSZvF54M+vaqpReDraf9NKVx+WkiZ+mxJX4S5nZwoFsp21sEz7Q7z03+uXgb/yj7mQ==", + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, - "node_modules/@tiptap/extension-text-style": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-text-style/-/extension-text-style-2.11.2.tgz", - "integrity": "sha512-RAa7BTwEOJRZN3EB2lg03KXyu7JC/Ce96cerh3D0Fo78yrtKOArPaiVHoTki6ZEIG43ccHEit1PPjMYxivPPeg==", + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" - }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" } }, - "node_modules/@tiptap/extension-underline": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/extension-underline/-/extension-underline-2.11.2.tgz", - "integrity": "sha512-Gq7hfV3D/3E1zoE6JXSYZ3boKfbjebFu7IuQZ6w6QSS/IkGN8c6kW+WtkhOKN3jV7Z5uF5KIVp3XCqNIwChWNQ==", + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0" - } - }, - "node_modules/@tiptap/pm": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/pm/-/pm-2.11.2.tgz", - "integrity": "sha512-lNOMFRcD0mGy7Hf8tFMHW/fnglvq3dA0grs0QrSY4cHyYbH9BHtQjLMDceczXdXbXZq7nEqC40UBWNnqtaclpw==", - "license": "MIT", - "dependencies": { - "prosemirror-changeset": "^2.2.1", - "prosemirror-collab": "^1.3.1", - "prosemirror-commands": "^1.6.2", - "prosemirror-dropcursor": "^1.8.1", - "prosemirror-gapcursor": "^1.3.2", - "prosemirror-history": "^1.4.1", - "prosemirror-inputrules": "^1.4.0", - "prosemirror-keymap": "^1.2.2", - "prosemirror-markdown": "^1.13.1", - "prosemirror-menu": "^1.2.4", - "prosemirror-model": "^1.23.0", - "prosemirror-schema-basic": "^1.2.3", - "prosemirror-schema-list": "^1.4.1", - "prosemirror-state": "^1.4.3", - "prosemirror-tables": "^1.6.1", - "prosemirror-trailing-node": "^3.0.0", - "prosemirror-transform": "^1.10.2", - "prosemirror-view": "^1.37.0" + "engines": { + "node": ">= 0.4" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@tiptap/react": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/react/-/react-2.11.2.tgz", - "integrity": "sha512-fhrfgXQwyc34L6ju+zzNGMa6J94+W20yww0BHyrqojUzjpYskVdO7/37h4OBfFGEhrAhLEXXDxRZcrNPtBf86A==", + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", "license": "MIT", "dependencies": { - "@tiptap/extension-bubble-menu": "^2.11.2", - "@tiptap/extension-floating-menu": "^2.11.2", - "@types/use-sync-external-store": "^0.0.6", - "fast-deep-equal": "^3", - "use-sync-external-store": "^1" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" }, - "peerDependencies": { - "@tiptap/core": "^2.7.0", - "@tiptap/pm": "^2.7.0", - "react": "^17.0.0 || ^18.0.0 || ^19.0.0", - "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" + "engines": { + "node": ">= 0.4" } }, - "node_modules/@tiptap/starter-kit": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/@tiptap/starter-kit/-/starter-kit-2.11.2.tgz", - "integrity": "sha512-FUIblP9BSmBzskf/aX7AIcUK5XP5Gi/VqUqm5evCkzlR1FrggLoy+vY+CX0me4oE/WYk4KAgIRXkE9tcbwotQA==", - "license": "MIT", - "dependencies": { - "@tiptap/core": "^2.11.2", - "@tiptap/extension-blockquote": "^2.11.2", - "@tiptap/extension-bold": "^2.11.2", - "@tiptap/extension-bullet-list": "^2.11.2", - "@tiptap/extension-code": "^2.11.2", - "@tiptap/extension-code-block": "^2.11.2", - "@tiptap/extension-document": "^2.11.2", - "@tiptap/extension-dropcursor": "^2.11.2", - "@tiptap/extension-gapcursor": "^2.11.2", - "@tiptap/extension-hard-break": "^2.11.2", - "@tiptap/extension-heading": "^2.11.2", - "@tiptap/extension-history": "^2.11.2", - "@tiptap/extension-horizontal-rule": "^2.11.2", - "@tiptap/extension-italic": "^2.11.2", - "@tiptap/extension-list-item": "^2.11.2", - "@tiptap/extension-ordered-list": "^2.11.2", - "@tiptap/extension-paragraph": "^2.11.2", - "@tiptap/extension-strike": "^2.11.2", - "@tiptap/extension-text": "^2.11.2", - "@tiptap/extension-text-style": "^2.11.2", - "@tiptap/pm": "^2.11.2" + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/ueberdosis" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@trivago/prettier-plugin-sort-imports": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/@trivago/prettier-plugin-sort-imports/-/prettier-plugin-sort-imports-5.2.1.tgz", - "integrity": "sha512-NDZndt0fmVThIx/8cExuJHLZagUVzfGCoVrwH9x6aZvwfBdkrDFTYujecek6X2WpG4uUFsVaPg5+aNQPSyjcmw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@babel/generator": "^7.26.2", - "@babel/parser": "^7.26.2", - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.26.0", - "javascript-natural-sort": "^0.7.1", - "lodash": "^4.17.21" - }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "license": "MIT", "engines": { - "node": ">18.12" - }, - "peerDependencies": { - "@vue/compiler-sfc": "3.x", - "prettier": "2.x - 3.x", - "prettier-plugin-svelte": "3.x", - "svelte": "4.x || 5.x" - }, - "peerDependenciesMeta": { - "@vue/compiler-sfc": { - "optional": true + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001767", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001767.tgz", + "integrity": "sha512-34+zUAMhSH+r+9eKmYG+k2Rpt8XttfE4yXAjoZvkAPs15xcYQhyBYdalJ65BzivAvGRMViEjy6oKr/S91loekQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" }, - "prettier-plugin-svelte": { - "optional": true + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" }, - "svelte": { - "optional": true + { + "type": "github", + "url": "https://github.com/sponsors/ai" } - } + ], + "license": "CC-BY-4.0" }, - "node_modules/@types/babel__core": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", - "dev": true, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", "license": "MIT", - "dependencies": { - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7", - "@types/babel__generator": "*", - "@types/babel__template": "*", - "@types/babel__traverse": "*" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/@types/babel__generator": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", - "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.0.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", - "dev": true, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", "license": "MIT", - "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/@types/babel__traverse": { - "version": "7.20.6", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", - "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", - "dev": true, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", "license": "MIT", - "dependencies": { - "@babel/types": "^7.20.7" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/@types/d3": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", - "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", "license": "MIT", - "dependencies": { - "@types/d3-array": "*", - "@types/d3-axis": "*", - "@types/d3-brush": "*", - "@types/d3-chord": "*", - "@types/d3-color": "*", - "@types/d3-contour": "*", - "@types/d3-delaunay": "*", - "@types/d3-dispatch": "*", - "@types/d3-drag": "*", - "@types/d3-dsv": "*", - "@types/d3-ease": "*", - "@types/d3-fetch": "*", - "@types/d3-force": "*", - "@types/d3-format": "*", - "@types/d3-geo": "*", - "@types/d3-hierarchy": "*", - "@types/d3-interpolate": "*", - "@types/d3-path": "*", - "@types/d3-polygon": "*", - "@types/d3-quadtree": "*", - "@types/d3-random": "*", - "@types/d3-scale": "*", - "@types/d3-scale-chromatic": "*", - "@types/d3-selection": "*", - "@types/d3-shape": "*", - "@types/d3-time": "*", - "@types/d3-time-format": "*", - "@types/d3-timer": "*", - "@types/d3-transition": "*", - "@types/d3-zoom": "*" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/@types/d3-array": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", - "integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==", - "license": "MIT" - }, - "node_modules/@types/d3-axis": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", - "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/@types/d3-brush": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", - "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", + "node_modules/chart.js": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz", + "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==", "license": "MIT", "dependencies": { - "@types/d3-selection": "*" + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" } }, - "node_modules/@types/d3-chord": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", - "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", - "license": "MIT" - }, - "node_modules/@types/d3-color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", - "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", - "license": "MIT" + "node_modules/chevrotain": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.1.1.tgz", + "integrity": "sha512-f0yv5CPKaFxfsPTBzX7vGuim4oIC1/gcS7LUGdBSwl2dU6+FON6LVUksdOo1qJjoUvXNn45urgh8C+0a24pACQ==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/cst-dts-gen": "11.1.1", + "@chevrotain/gast": "11.1.1", + "@chevrotain/regexp-to-ast": "11.1.1", + "@chevrotain/types": "11.1.1", + "@chevrotain/utils": "11.1.1", + "lodash-es": "4.17.23" + } }, - "node_modules/@types/d3-contour": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", - "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", + "node_modules/chevrotain-allstar": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz", + "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==", "license": "MIT", "dependencies": { - "@types/d3-array": "*", - "@types/geojson": "*" + "lodash-es": "^4.17.21" + }, + "peerDependencies": { + "chevrotain": "^11.0.0" } }, - "node_modules/@types/d3-delaunay": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", - "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", "license": "MIT" }, - "node_modules/@types/d3-dispatch": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.6.tgz", - "integrity": "sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ==", + "node_modules/clean-set": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/clean-set/-/clean-set-1.1.2.tgz", + "integrity": "sha512-cA8uCj0qSoG9e0kevyOWXwPaELRPVg5Pxp6WskLMwerx257Zfnh8Nl0JBH59d7wQzij2CK7qEfJQK3RjuKKIug==", "license": "MIT" }, - "node_modules/@types/d3-drag": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", - "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", - "license": "MIT", + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "license": "ISC", "dependencies": { - "@types/d3-selection": "*" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" } }, - "node_modules/@types/d3-dsv": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", - "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", - "license": "MIT" + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } }, - "node_modules/@types/d3-ease": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", - "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", - "license": "MIT" + "node_modules/cm6-theme-basic-dark": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/cm6-theme-basic-dark/-/cm6-theme-basic-dark-0.2.0.tgz", + "integrity": "sha512-+mNNJecRtxS/KkloMDCQF0oTrT6aFGRZTjnBcdT5UG1pcDO4Brq8l1+0KR/8dZ7hub2gOGOzoi3rGFD8GzlH7Q==", + "license": "MIT", + "peerDependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/highlight": "^1.0.0" + } }, - "node_modules/@types/d3-fetch": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", - "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", + "node_modules/cm6-theme-basic-light": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/cm6-theme-basic-light/-/cm6-theme-basic-light-0.2.0.tgz", + "integrity": "sha512-1prg2gv44sYfpHscP26uLT/ePrh0mlmVwMSoSd3zYKQ92Ab3jPRLzyCnpyOCQLJbK+YdNs4HvMRqMNYdy4pMhA==", + "license": "MIT", + "peerDependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/highlight": "^1.0.0" + } + }, + "node_modules/codemirror": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.2.tgz", + "integrity": "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==", "license": "MIT", "dependencies": { - "@types/d3-dsv": "*" + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/commands": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/search": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" } }, - "node_modules/@types/d3-force": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", - "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", - "license": "MIT" + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } }, - "node_modules/@types/d3-format": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", - "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, - "node_modules/@types/d3-geo": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", - "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "license": "MIT", "dependencies": { - "@types/geojson": "*" + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" } }, - "node_modules/@types/d3-hierarchy": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", - "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", - "license": "MIT" + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "node_modules/@types/d3-interpolate": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", - "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", "license": "MIT", - "dependencies": { - "@types/d3-color": "*" + "engines": { + "node": ">= 10" } }, - "node_modules/@types/d3-path": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-1.0.11.tgz", - "integrity": "sha512-4pQMp8ldf7UaB/gR8Fvvy69psNHkTpD/pVw3vmEi8iZAB9EPMBruB1JvHO4BIq9QkUUd2lV1F5YXpMNj7JPBpw==", + "node_modules/compute-scroll-into-view": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-2.0.4.tgz", + "integrity": "sha512-y/ZA3BGnxoM/QHHQ2Uy49CLtnWPbt4tTPpEEZiEmmiWBFKjej7nEyH8Ryz54jH0MLXflUYA3Er2zUxPSJu5R+g==", "license": "MIT" }, - "node_modules/@types/d3-polygon": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", - "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, "license": "MIT" }, - "node_modules/@types/d3-quadtree": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", - "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", + "node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", "license": "MIT" }, - "node_modules/@types/d3-random": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", - "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", + "node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", "license": "MIT" }, - "node_modules/@types/d3-scale": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", - "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", "license": "MIT", - "dependencies": { - "@types/d3-time": "*" + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, - "node_modules/@types/d3-scale-chromatic": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", - "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", - "license": "MIT" - }, - "node_modules/@types/d3-selection": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", - "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", - "license": "MIT" - }, - "node_modules/@types/d3-shape": { - "version": "1.3.12", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-1.3.12.tgz", - "integrity": "sha512-8oMzcd4+poSLGgV0R1Q1rOlx/xdmozS4Xab7np0eamFFUYq71AU9pOCJEFnkXW2aI/oXdVYJzw6pssbSut7Z9Q==", + "node_modules/copy-to-clipboard": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", + "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", "license": "MIT", "dependencies": { - "@types/d3-path": "^1" + "toggle-selection": "^1.0.6" } }, - "node_modules/@types/d3-time": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", - "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", - "license": "MIT" - }, - "node_modules/@types/d3-time-format": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", - "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", - "license": "MIT" - }, - "node_modules/@types/d3-timer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", - "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", - "license": "MIT" - }, - "node_modules/@types/d3-transition": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", - "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", "license": "MIT", "dependencies": { - "@types/d3-selection": "*" + "layout-base": "^1.0.0" } }, - "node_modules/@types/d3-zoom": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", - "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", + "node_modules/cosmiconfig": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", + "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", "license": "MIT", "dependencies": { - "@types/d3-interpolate": "*", - "@types/d3-selection": "*" + "@types/parse-json": "^4.0.0", + "import-fresh": "^3.2.1", + "parse-json": "^5.0.0", + "path-type": "^4.0.0", + "yaml": "^1.10.0" + }, + "engines": { + "node": ">=10" } }, - "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "dev": true, - "license": "MIT" + "node_modules/cosmiconfig/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "license": "ISC", + "engines": { + "node": ">= 6" + } }, - "node_modules/@types/geojson": { - "version": "7946.0.15", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.15.tgz", - "integrity": "sha512-9oSxFzDCT2Rj6DfcHF8G++jxBKS7mBqXl5xrRW+Kbvjry6Uduya2iiwqHPhVXpasAVMBYKkEPGgKhd3+/HZ6xA==", + "node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", "license": "MIT" }, - "node_modules/@types/inflection": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@types/inflection/-/inflection-1.13.2.tgz", - "integrity": "sha512-VxXY8dNLrxn7nDvsud77K60uD3a9RSmKfa0k/N/zvP2G55R5/8DSO5Ferz3mQdlAo8jPnpQLilCx9rABdPHSVg==", + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, - "license": "MIT" + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true, - "license": "MIT" + "node_modules/css-mediaquery": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/css-mediaquery/-/css-mediaquery-0.1.2.tgz", + "integrity": "sha512-COtn4EROW5dBGlE/4PiKnh6rZpAPxDeFLaEEwt4i10jpDMFt2EhQGS79QmmrO+iKCHv0PU/HrOWEhijFd1x99Q==", + "license": "BSD" }, - "node_modules/@types/linkify-it": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", - "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==", + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", "license": "MIT" }, - "node_modules/@types/markdown-it": { - "version": "14.1.2", - "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz", - "integrity": "sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==", + "node_modules/cytoscape": { + "version": "3.33.1", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz", + "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==", + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", "license": "MIT", "dependencies": { - "@types/linkify-it": "^5", - "@types/mdurl": "^2" + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" } }, - "node_modules/@types/mdurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", - "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==", - "license": "MIT" + "node_modules/cytoscape-fcose": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", + "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", + "license": "MIT", + "dependencies": { + "cose-base": "^2.2.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } }, - "node_modules/@types/node": { - "version": "22.10.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.6.tgz", - "integrity": "sha512-qNiuwC4ZDAUNcY47xgaSuS92cjf8JbSUoaKS77bmLG1rU7MlATVSiw/IlrjtIyyskXBZ8KkNfjK/P5na7rgXbQ==", - "dev": true, + "node_modules/cytoscape-fcose/node_modules/cose-base": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", + "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "layout-base": "^2.0.0" } }, - "node_modules/@types/parse-json": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", - "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", + "node_modules/cytoscape-fcose/node_modules/layout-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", + "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", "license": "MIT" }, - "node_modules/@types/prop-types": { - "version": "15.7.14", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.14.tgz", - "integrity": "sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==", - "license": "MIT" + "node_modules/d": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/d/-/d-1.0.2.tgz", + "integrity": "sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw==", + "license": "ISC", + "dependencies": { + "es5-ext": "^0.10.64", + "type": "^2.7.2" + }, + "engines": { + "node": ">=0.12" + } }, - "node_modules/@types/react": { - "version": "18.3.18", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.18.tgz", - "integrity": "sha512-t4yC+vtgnkYjNSKlFx1jkAhH8LgTo2N/7Qvi83kdEaUtMDiwpbLAktKDaAMlRcJ5eSxZkH74eEGt1ky31d7kfQ==", - "dev": true, - "license": "MIT", + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "license": "ISC", "dependencies": { - "@types/prop-types": "*", - "csstype": "^3.0.2" + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" } }, - "node_modules/@types/react-dom": { - "version": "18.3.5", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.5.tgz", - "integrity": "sha512-P4t6saawp+b/dFrUr2cvkVsfvPguwsxtH6dNIYRllMsefqFzkZk5UIjzyDOv5g1dXIPdG4Sp1yCR4Z6RCUsG/Q==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "@types/react": "^18.0.0" + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "license": "ISC", + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" } }, - "node_modules/@types/react-transition-group": { - "version": "4.4.12", - "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.12.tgz", - "integrity": "sha512-8TV6R3h2j7a91c+1DXdJi3Syo69zzIZbz7Lg5tORM5LEJG7X/E6a1V3drRyBRZq7/utz7A+c4OgYLiLcYGHG6w==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*" + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/@types/recharts": { - "version": "1.8.29", - "resolved": "https://registry.npmjs.org/@types/recharts/-/recharts-1.8.29.tgz", - "integrity": "sha512-ulKklaVsnFIIhTQsQw226TnOibrddW1qUQNFVhoQEyY1Z7FRQrNecFCGt7msRuJseudzE9czVawZb17dK/aPXw==", - "dev": true, - "license": "MIT", + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "license": "ISC", "dependencies": { - "@types/d3-shape": "^1", - "@types/react": "*" + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" } }, - "node_modules/@types/trusted-types": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", - "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", - "license": "MIT", - "optional": true - }, - "node_modules/@types/use-sync-external-store": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz", - "integrity": "sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==", - "license": "MIT" - }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.20.0.tgz", - "integrity": "sha512-naduuphVw5StFfqp4Gq4WhIBE2gN1GEmMUExpJYknZJdRnc+2gDzB8Z3+5+/Kv33hPQRDGzQO/0opHE72lZZ6A==", - "dev": true, - "license": "MIT", + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "license": "ISC", "dependencies": { - "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.20.0", - "@typescript-eslint/type-utils": "8.20.0", - "@typescript-eslint/utils": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0", - "graphemer": "^1.4.0", - "ignore": "^5.3.1", - "natural-compare": "^1.4.0", - "ts-api-utils": "^2.0.0" + "delaunator": "5" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "node": ">=12" } }, - "node_modules/@typescript-eslint/parser": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.20.0.tgz", - "integrity": "sha512-gKXG7A5HMyjDIedBi6bUrDcun8GIjnI8qOwVLiY3rx6T/sHP/19XLJOnIq/FgQvWLHja5JN/LSE7eklNBr612g==", - "dev": true, - "license": "MIT", + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", "dependencies": { - "@typescript-eslint/scope-manager": "8.20.0", - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/typescript-estree": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0", - "debug": "^4.3.4" + "d3-dispatch": "1 - 3", + "d3-selection": "3" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "license": "ISC", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "engines": { + "node": ">=12" } }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.20.0.tgz", - "integrity": "sha512-J7+VkpeGzhOt3FeG1+SzhiMj9NzGD/M6KoGn9f4dbz3YzK9hvbhVTmLj/HiTp9DazIzJ8B4XcM80LrR9Dm1rJw==", - "dev": true, - "license": "MIT", + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "license": "ISC", "dependencies": { - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0" + "d3-dsv": "1 - 3" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "node": ">=12" } }, - "node_modules/@typescript-eslint/type-utils": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.20.0.tgz", - "integrity": "sha512-bPC+j71GGvA7rVNAHAtOjbVXbLN5PkwqMvy1cwGeaxUoRQXVuKCebRoLzm+IPW/NtFFpstn1ummSIasD5t60GA==", - "dev": true, - "license": "MIT", + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "license": "ISC", "dependencies": { - "@typescript-eslint/typescript-estree": "8.20.0", - "@typescript-eslint/utils": "8.20.0", - "debug": "^4.3.4", - "ts-api-utils": "^2.0.0" + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "node": ">=12" } }, - "node_modules/@typescript-eslint/types": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.20.0.tgz", - "integrity": "sha512-cqaMiY72CkP+2xZRrFt3ExRBu0WmVitN/rYPZErA80mHjHx/Svgp8yfbzkJmDoQ/whcytOPO9/IZXnOc+wigRA==", - "dev": true, - "license": "MIT", + "node_modules/d3-format": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", + "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", + "license": "ISC", "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "node": ">=12" } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.20.0.tgz", - "integrity": "sha512-Y7ncuy78bJqHI35NwzWol8E0X7XkRVS4K4P4TCyzWkOJih5NDvtoRDW4Ba9YJJoB2igm9yXDdYI/+fkiiAxPzA==", - "dev": true, - "license": "MIT", + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "license": "ISC", "dependencies": { - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.0.0" + "d3-array": "2.5.0 - 3" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.8.0" + "node": ">=12" } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", "license": "ISC", "dependencies": { - "brace-expansion": "^2.0.1" + "d3-color": "1 - 3" }, "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "node": ">=12" } }, - "node_modules/@typescript-eslint/utils": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.20.0.tgz", - "integrity": "sha512-dq70RUw6UK9ei7vxc4KQtBRk7qkHZv447OUZ6RPQMQl71I3NZxQJX/f32Smr+iqWrB02pHKn2yAdHBb0KNrRMA==", - "dev": true, - "license": "MIT", + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "license": "BSD-3-Clause", "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.20.0", - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/typescript-estree": "8.20.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" } }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.20.0.tgz", - "integrity": "sha512-v/BpkeeYAsPkKCkR8BDwcno0llhzWVqPOamQrAEMdpZav2Y9OVjd9dwJyBLJWwf335B5DmlifECIkZRJCaGaHA==", - "dev": true, - "license": "MIT", + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "license": "BSD-3-Clause", "dependencies": { - "@typescript-eslint/types": "8.20.0", - "eslint-visitor-keys": "^4.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "internmap": "^1.0.0" } }, - "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", + "license": "BSD-3-Clause" + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "license": "BSD-3-Clause", + "dependencies": { + "d3-path": "1" } }, - "node_modules/@ungap/structured-clone": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.1.tgz", - "integrity": "sha512-fEzPV3hSkSMltkw152tJKNARhOupqbH96MZWyRjNaYZOMIzbrTeQDG+MTc6Mr2pgzFQzFxAfmhGDNP5QK++2ZA==", - "dev": true, + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", "license": "ISC" }, - "node_modules/@vitejs/plugin-react": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.3.4.tgz", - "integrity": "sha512-SCCPBJtYLdE8PX/7ZQAs1QAZ8Jqwih+0VBLum1EGqmCCQal+MIUqLCzj3ZUy8ufbC0cAM4LRlSTm7IQJwWT4ug==", - "dev": true, - "license": "MIT", + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", "dependencies": { - "@babel/core": "^7.26.0", - "@babel/plugin-transform-react-jsx-self": "^7.25.9", - "@babel/plugin-transform-react-jsx-source": "^7.25.9", - "@types/babel__core": "^7.20.5", - "react-refresh": "^0.14.2" + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" }, "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "peerDependencies": { - "vite": "^4.2.0 || ^5.0.0 || ^6.0.0" + "node": ">=12" } }, - "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", - "license": "MIT", - "bin": { - "acorn": "bin/acorn" + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" }, "engines": { - "node": ">=0.4.0" + "node": ">=12" } }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "license": "MIT", + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" + "d3-path": "^3.1.0" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", "engines": { - "node": ">=8" + "node": ">=12" } }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "license": "MIT", + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", "dependencies": { - "color-convert": "^2.0.1" + "d3-array": "2 - 3" }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "node": ">=12" } }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "license": "Python-2.0" - }, - "node_modules/array-buffer-byte-length": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", - "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", - "dev": true, - "license": "MIT", + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", "dependencies": { - "call-bound": "^1.0.3", - "is-array-buffer": "^3.0.5" + "d3-time": "1 - 3" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=12" } }, - "node_modules/array-includes": { - "version": "3.1.8", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", - "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", - "dev": true, - "license": "MIT", + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.2", - "es-object-atoms": "^1.0.0", - "get-intrinsic": "^1.2.4", - "is-string": "^1.0.7" + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" }, "engines": { - "node": ">= 0.4" + "node": ">=12" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "peerDependencies": { + "d3-selection": "2 - 3" } }, - "node_modules/array.prototype.findlast": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", - "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", - "dev": true, - "license": "MIT", + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.2", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "es-shim-unscopables": "^1.0.2" + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=12" } }, - "node_modules/array.prototype.flat": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", - "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", - "dev": true, + "node_modules/dagre-d3-es": { + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz", + "integrity": "sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-shim-unscopables": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "d3": "^7.9.0", + "lodash-es": "^4.17.21" } }, - "node_modules/array.prototype.flatmap": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", - "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", "dev": true, "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-shim-unscopables": "^1.0.2" + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -3572,37 +6307,34 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.tosorted": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", - "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", "dev": true, "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.3", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "es-shim-unscopables": "^1.0.2" + "is-data-view": "^1.0.2" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" } }, - "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", - "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", "dev": true, "license": "MIT", "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "is-array-buffer": "^3.0.4" + "is-data-view": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -3611,146 +6343,77 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "license": "MIT" - }, - "node_modules/attr-accept": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.5.tgz", - "integrity": "sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ==", + "node_modules/date-fns": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-3.6.0.tgz", + "integrity": "sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==", "license": "MIT", - "engines": { - "node": ">=4" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" } }, - "node_modules/autosuggest-highlight": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/autosuggest-highlight/-/autosuggest-highlight-3.3.4.tgz", - "integrity": "sha512-j6RETBD2xYnrVcoV1S5R4t3WxOlWZKyDQjkwnggDPSjF5L4jV98ZltBpvPvbkM1HtoSe5o+bNrTHyjPbieGeYA==", - "license": "MIT", - "dependencies": { - "remove-accents": "^0.4.2" - } + "node_modules/dayjs": { + "version": "1.11.19", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", + "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", + "license": "MIT" }, - "node_modules/available-typed-arrays": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", - "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", - "dev": true, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "license": "MIT", "dependencies": { - "possible-typed-array-names": "^1.0.0" + "ms": "^2.1.3" }, "engines": { - "node": ">= 0.4" + "node": ">=6.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios": { - "version": "1.7.9", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz", - "integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==", - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/babel-plugin-macros": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", - "integrity": "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==", + "node_modules/decode-named-character-reference": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz", + "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.12.5", - "cosmiconfig": "^7.0.0", - "resolve": "^1.19.0" + "character-entities": "^2.0.0" }, - "engines": { - "node": ">=10", - "npm": ">=6" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, + "node_modules/decode-uri-component": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.4.1.tgz", + "integrity": "sha512-+8VxcR21HhTy8nOt6jf20w0c9CADrw1O8d+VZ/YzzCt4bJ3uBjw+D1q2osAB8RnpwwaeYBxy0HyKQxD5JBMuuQ==", "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, "engines": { - "node": ">=8" + "node": ">=14.16" } }, - "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } + "license": "MIT" }, - "node_modules/call-bind": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", - "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "license": "MIT", "dependencies": { - "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.2" + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -3759,28 +6422,15 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", - "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", - "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", - "dev": true, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "license": "MIT", "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "get-intrinsic": "^1.2.6" + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" }, "engines": { "node": ">= 0.4" @@ -3789,757 +6439,963 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "license": "ISC", + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", "license": "MIT", "engines": { - "node": ">=6" + "node": ">=0.4.0" } }, - "node_modules/caniuse-lite": { - "version": "1.0.30001692", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001692.tgz", - "integrity": "sha512-A95VKan0kdtrsnMubMKxEKUKImOPSuCpYgxSQBo036P5YYgVIcOYJEgt/txJWqObiRQeISNCfef9nvlQ0vbV7A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "node": ">=6" } }, - "node_modules/chart.js": { - "version": "4.4.7", - "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.7.tgz", - "integrity": "sha512-pwkcKfdzTMAU/+jNosKhNL2bHtJc/sSmYgVbuGTEDhzkrhmyihmP7vUc/5ZK9WopidMDHNe3Wm7jOd/WhuHWuw==", + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", + "license": "MIT" + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", "license": "MIT", "dependencies": { - "@kurkle/color": "^0.3.0" + "dequal": "^2.0.0" }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/diacritic": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/diacritic/-/diacritic-0.0.2.tgz", + "integrity": "sha512-iQCeDkSPwkfwWPr+HZZ49WRrM2FSI9097Q9w7agyRCdLcF9Eh2Ek0sHKcmMWx2oZVBjRBE/sziGFjZu0uf1Jbg==", + "license": "MIT" + }, + "node_modules/diff": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.2.tgz", + "integrity": "sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A==", + "license": "BSD-3-Clause", "engines": { - "pnpm": ">=8" + "node": ">=0.3.1" } }, - "node_modules/chevrotain": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz", - "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==", + "node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, "license": "Apache-2.0", "dependencies": { - "@chevrotain/cst-dts-gen": "11.0.3", - "@chevrotain/gast": "11.0.3", - "@chevrotain/regexp-to-ast": "11.0.3", - "@chevrotain/types": "11.0.3", - "@chevrotain/utils": "11.0.3", - "lodash-es": "4.17.21" + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" } }, - "node_modules/chevrotain-allstar": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz", - "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==", + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", "license": "MIT", "dependencies": { - "lodash-es": "^4.17.21" - }, - "peerDependencies": { - "chevrotain": "^11.0.0" + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" } }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" + "node_modules/dompurify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" } }, - "node_modules/clsx": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", - "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", - "license": "MIT", + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", "engines": { - "node": ">=6" + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" } }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/downshift": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/downshift/-/downshift-7.6.2.tgz", + "integrity": "sha512-iOv+E1Hyt3JDdL9yYcOgW7nZ7GQ2Uz6YbggwXvKUSleetYhU2nXD482Rz6CzvM4lvI1At34BYruKAL4swRGxaA==", "license": "MIT", "dependencies": { - "color-name": "~1.1.4" + "@babel/runtime": "^7.14.8", + "compute-scroll-into-view": "^2.0.4", + "prop-types": "^15.7.2", + "react-is": "^17.0.2", + "tslib": "^2.3.0" }, - "engines": { - "node": ">=7.0.0" + "peerDependencies": { + "react": ">=16.12.0" } }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "node_modules/downshift/node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "license": "MIT" }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", "license": "MIT", "dependencies": { - "delayed-stream": "~1.0.0" + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" }, "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "license": "MIT", - "engines": { - "node": ">= 10" + "node": ">= 0.4" } }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "node_modules/electron-to-chromium": { + "version": "1.5.286", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz", + "integrity": "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==", "dev": true, - "license": "MIT" - }, - "node_modules/confbox": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", - "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", - "license": "MIT" + "license": "ISC" }, - "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "license": "MIT" }, - "node_modules/copy-to-clipboard": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", - "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", - "license": "MIT", - "dependencies": { - "toggle-selection": "^1.0.6" - } - }, - "node_modules/cose-base": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", - "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", "license": "MIT", "dependencies": { - "layout-base": "^1.0.0" + "is-arrayish": "^0.2.1" } }, - "node_modules/cosmiconfig": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", - "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", + "node_modules/es-abstract": { + "version": "1.24.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz", + "integrity": "sha512-zHXBLhP+QehSSbsS9Pt23Gg964240DPd6QCf8WpkqEXxQ7fhdZzYsocOr5u7apWonsS5EjZDmTF+/slGMyasvw==", + "dev": true, "license": "MIT", "dependencies": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" }, "engines": { - "node": ">=10" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/cosmiconfig/node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "license": "ISC", + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", "engines": { - "node": ">= 6" + "node": ">= 0.4" } }, - "node_modules/crelt": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", - "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", - "license": "MIT" - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, "engines": { - "node": ">= 8" + "node": ">= 0.4" } }, - "node_modules/css-mediaquery": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/css-mediaquery/-/css-mediaquery-0.1.2.tgz", - "integrity": "sha512-COtn4EROW5dBGlE/4PiKnh6rZpAPxDeFLaEEwt4i10jpDMFt2EhQGS79QmmrO+iKCHv0PU/HrOWEhijFd1x99Q==", - "license": "BSD" - }, - "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "license": "MIT" - }, - "node_modules/cytoscape": { - "version": "3.31.0", - "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.31.0.tgz", - "integrity": "sha512-zDGn1K/tfZwEnoGOcHc0H4XazqAAXAuDpcYw9mUnUjATjqljyCNGJv8uEvbvxGaGHaVshxMecyl6oc6uKzRfbw==", + "node_modules/es-iterator-helpers": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.2.tgz", + "integrity": "sha512-BrUQ0cPTB/IwXj23HtwHjS9n7O4h9FX94b4xc5zlTHxeLgTAdzYUDyy6KdExAl9lbN5rtfe44xpjpmj9grxs5w==", + "dev": true, "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.1", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.1.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.3.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "iterator.prototype": "^1.1.5", + "safe-array-concat": "^1.1.3" + }, "engines": { - "node": ">=0.10" + "node": ">= 0.4" } }, - "node_modules/cytoscape-cose-bilkent": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", - "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", "license": "MIT", "dependencies": { - "cose-base": "^1.0.0" + "es-errors": "^1.3.0" }, - "peerDependencies": { - "cytoscape": "^3.2.0" + "engines": { + "node": ">= 0.4" } }, - "node_modules/cytoscape-fcose": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", - "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", "license": "MIT", "dependencies": { - "cose-base": "^2.2.0" + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, - "peerDependencies": { - "cytoscape": "^3.2.0" + "engines": { + "node": ">= 0.4" } }, - "node_modules/cytoscape-fcose/node_modules/cose-base": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", - "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", + "node_modules/es-shim-unscopables": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", + "dev": true, "license": "MIT", "dependencies": { - "layout-base": "^2.0.0" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" } }, - "node_modules/cytoscape-fcose/node_modules/layout-base": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", - "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", - "license": "MIT" - }, - "node_modules/d3": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", - "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", - "license": "ISC", + "node_modules/es-to-primitive": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "license": "MIT", "dependencies": { - "d3-array": "3", - "d3-axis": "3", - "d3-brush": "3", - "d3-chord": "3", - "d3-color": "3", - "d3-contour": "4", - "d3-delaunay": "6", - "d3-dispatch": "3", - "d3-drag": "3", - "d3-dsv": "3", - "d3-ease": "3", - "d3-fetch": "3", - "d3-force": "3", - "d3-format": "3", - "d3-geo": "3", - "d3-hierarchy": "3", - "d3-interpolate": "3", - "d3-path": "3", - "d3-polygon": "3", - "d3-quadtree": "3", - "d3-random": "3", - "d3-scale": "4", - "d3-scale-chromatic": "3", - "d3-selection": "3", - "d3-shape": "3", - "d3-time": "3", - "d3-time-format": "4", - "d3-timer": "3", - "d3-transition": "3", - "d3-zoom": "3" + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" }, "engines": { - "node": ">=12" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/d3-array": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", - "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "node_modules/es5-ext": { + "version": "0.10.64", + "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz", + "integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==", + "hasInstallScript": true, "license": "ISC", "dependencies": { - "internmap": "1 - 2" + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.3", + "esniff": "^2.0.1", + "next-tick": "^1.1.0" }, "engines": { - "node": ">=12" + "node": ">=0.10" } }, - "node_modules/d3-axis": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", - "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", - "license": "ISC", - "engines": { - "node": ">=12" + "node_modules/es6-iterator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", + "integrity": "sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==", + "license": "MIT", + "dependencies": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" } }, - "node_modules/d3-brush": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", - "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "node_modules/es6-symbol": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.4.tgz", + "integrity": "sha512-U9bFFjX8tFiATgtkJ1zg25+KviIXpgRvRHS8sau3GfhVzThRQrOeksPeT0BWW2MNZs1OEWJ1DPXOQMn0KKRkvg==", "license": "ISC", "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "3", - "d3-transition": "3" + "d": "^1.0.2", + "ext": "^1.7.0" }, "engines": { - "node": ">=12" + "node": ">=0.12" } }, - "node_modules/d3-chord": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", - "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", - "license": "ISC", - "dependencies": { - "d3-path": "1 - 3" + "node_modules/esbuild": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" }, "engines": { - "node": ">=12" + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" } }, - "node_modules/d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", - "license": "ISC", + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=6" } }, - "node_modules/d3-contour": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", - "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", - "license": "ISC", - "dependencies": { - "d3-array": "^3.2.0" - }, + "node_modules/escape-carriage": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/escape-carriage/-/escape-carriage-1.3.1.tgz", + "integrity": "sha512-GwBr6yViW3ttx1kb7/Oh+gKQ1/TrhYwxKqVmg5gS+BK+Qe2KrOa/Vh7w3HPBvgGf0LfcDGoY9I6NHKoA5Hozhw==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/d3-delaunay": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", - "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", - "license": "ISC", + "node_modules/eslint": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", + "dev": true, + "license": "MIT", "dependencies": { - "delaunator": "5" + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" }, "engines": { - "node": ">=12" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } } }, - "node_modules/d3-dispatch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", - "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", - "license": "ISC", + "node_modules/eslint-plugin-react": { + "version": "7.37.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.5.tgz", + "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.3", + "array.prototype.tosorted": "^1.1.4", + "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.2.1", + "estraverse": "^5.3.0", + "hasown": "^2.0.2", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.9", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.1", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.12", + "string.prototype.repeat": "^1.0.0" + }, "engines": { - "node": ">=12" + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, - "node_modules/d3-drag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", - "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", - "license": "ISC", + "node_modules/eslint-plugin-react-hooks": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-7.0.0.tgz", + "integrity": "sha512-fNXaOwvKwq2+pXiRpXc825Vd63+KM4DLL40Rtlycb8m7fYpp6efrTp1sa6ZbP/Ap58K2bEKFXRmhURE+CJAQWw==", + "dev": true, + "license": "MIT", "dependencies": { - "d3-dispatch": "1 - 3", - "d3-selection": "3" + "@babel/core": "^7.24.4", + "@babel/parser": "^7.24.4", + "hermes-parser": "^0.25.1", + "zod": "^3.22.4 || ^4.0.0", + "zod-validation-error": "^3.0.3 || ^4.0.0" }, "engines": { - "node": ">=12" + "node": ">=18" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" } }, - "node_modules/d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", - "license": "ISC", + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "dev": true, + "license": "MIT", "dependencies": { - "commander": "7", - "iconv-lite": "0.6", - "rw": "1" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { - "csv2json": "bin/dsv2json.js", - "csv2tsv": "bin/dsv2dsv.js", - "dsv2dsv": "bin/dsv2dsv.js", - "dsv2json": "bin/dsv2json.js", - "json2csv": "bin/json2dsv.js", - "json2dsv": "bin/json2dsv.js", - "json2tsv": "bin/json2dsv.js", - "tsv2csv": "bin/dsv2dsv.js", - "tsv2json": "bin/dsv2json.js" + "resolve": "bin/resolve" }, - "engines": { - "node": ">=12" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/d3-ease": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", - "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=12" + "node_modules/eslint-plugin-react/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" } }, - "node_modules/d3-fetch": { + "node_modules/eslint-plugin-security": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", - "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", - "license": "ISC", + "resolved": "https://registry.npmjs.org/eslint-plugin-security/-/eslint-plugin-security-3.0.1.tgz", + "integrity": "sha512-XjVGBhtDZJfyuhIxnQ/WMm385RbX3DBu7H1J7HNNhmB2tnGxMeqVSnYv79oAj992ayvIBZghsymwkYFS6cGH4Q==", + "dev": true, + "license": "Apache-2.0", "dependencies": { - "d3-dsv": "1 - 3" + "safe-regex": "^2.1.1" }, "engines": { - "node": ">=12" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/d3-force": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", - "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", - "license": "ISC", + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "d3-dispatch": "1 - 3", - "d3-quadtree": "1 - 3", - "d3-timer": "1 - 3" + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" }, "engines": { - "node": ">=12" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/d3-format": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", - "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", - "license": "ISC", + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=12" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/d3-geo": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", - "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", - "license": "ISC", - "dependencies": { - "d3-array": "2.5.0 - 3" - }, + "node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=12" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "node_modules/esniff": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz", + "integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==", "license": "ISC", + "dependencies": { + "d": "^1.0.1", + "es5-ext": "^0.10.62", + "event-emitter": "^0.3.5", + "type": "^2.7.2" + }, "engines": { - "node": ">=12" + "node": ">=0.10" } }, - "node_modules/d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", - "license": "ISC", + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "d3-color": "1 - 3" + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" }, "engines": { - "node": ">=12" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/d3-path": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", - "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", - "license": "ISC", + "node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=12" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/d3-polygon": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", - "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", - "license": "ISC", + "node_modules/esquery": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz", + "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, "engines": { - "node": ">=12" + "node": ">=0.10" } }, - "node_modules/d3-quadtree": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", - "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", - "license": "ISC", + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, "engines": { - "node": ">=12" + "node": ">=4.0" } }, - "node_modules/d3-random": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", - "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", - "license": "ISC", + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", "engines": { - "node": ">=12" + "node": ">=4.0" } }, - "node_modules/d3-sankey": { - "version": "0.12.3", - "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", - "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", - "license": "BSD-3-Clause", - "dependencies": { - "d3-array": "1 - 2", - "d3-shape": "^1.2.0" + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/d3-sankey/node_modules/d3-array": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", - "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", - "license": "BSD-3-Clause", + "node_modules/estree-util-visit": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-2.0.0.tgz", + "integrity": "sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==", + "license": "MIT", "dependencies": { - "internmap": "^1.0.0" + "@types/estree-jsx": "^1.0.0", + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/d3-sankey/node_modules/d3-path": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", - "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", - "license": "BSD-3-Clause" - }, - "node_modules/d3-sankey/node_modules/d3-shape": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", - "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", - "license": "BSD-3-Clause", - "dependencies": { - "d3-path": "1" + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" } }, - "node_modules/d3-sankey/node_modules/internmap": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", - "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", - "license": "ISC" - }, - "node_modules/d3-scale": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", - "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", - "license": "ISC", + "node_modules/event-emitter": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", + "integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==", + "license": "MIT", "dependencies": { - "d3-array": "2.10.0 - 3", - "d3-format": "1 - 3", - "d3-interpolate": "1.2.0 - 3", - "d3-time": "2.1.1 - 3", - "d3-time-format": "2 - 4" - }, - "engines": { - "node": ">=12" + "d": "1", + "es5-ext": "~0.10.14" } }, - "node_modules/d3-scale-chromatic": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", - "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "node_modules/eventemitter3": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz", + "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==", + "license": "MIT" + }, + "node_modules/ext": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.7.0.tgz", + "integrity": "sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==", "license": "ISC", "dependencies": { - "d3-color": "1 - 3", - "d3-interpolate": "1 - 3" - }, - "engines": { - "node": ">=12" + "type": "^2.7.2" } }, - "node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, "license": "ISC", - "engines": { - "node": ">=12" + "dependencies": { + "reusify": "^1.0.4" } }, - "node_modules/d3-shape": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", - "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", - "license": "ISC", + "node_modules/fault": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz", + "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==", + "license": "MIT", "dependencies": { - "d3-path": "^3.1.0" + "format": "^0.2.0" }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } } }, - "node_modules/d3-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", - "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", - "license": "ISC", + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", "dependencies": { - "d3-array": "2 - 3" + "flat-cache": "^4.0.0" }, "engines": { - "node": ">=12" + "node": ">=16.0.0" } }, - "node_modules/d3-time-format": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", - "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", - "license": "ISC", + "node_modules/file-selector": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/file-selector/-/file-selector-2.1.2.tgz", + "integrity": "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig==", + "license": "MIT", "dependencies": { - "d3-time": "1 - 3" + "tslib": "^2.7.0" }, "engines": { - "node": ">=12" + "node": ">= 12" } }, - "node_modules/d3-timer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", - "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", - "license": "ISC", + "node_modules/filter-obj": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-5.1.0.tgz", + "integrity": "sha512-qWeTREPoT7I0bifpPUXtxkZJ1XJzxWtfoWWkdVGqa+eCr3SHW/Ocp89o8vLvbUuQnadybJpjOKu4V+RwO6sGng==", + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/d3-transition": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", - "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", - "license": "ISC", + "node_modules/find-root": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", + "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==", + "license": "MIT" + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", "dependencies": { - "d3-color": "1 - 3", - "d3-dispatch": "1 - 3", - "d3-ease": "1 - 3", - "d3-interpolate": "1 - 3", - "d3-timer": "1 - 3" + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=12" + "node": ">=10" }, - "peerDependencies": { - "d3-selection": "2 - 3" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/d3-zoom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", - "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", - "license": "ISC", + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "2 - 3", - "d3-transition": "2 - 3" + "flatted": "^3.2.9", + "keyv": "^4.5.4" }, "engines": { - "node": ">=12" + "node": ">=16" } }, - "node_modules/dagre-d3-es": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.11.tgz", - "integrity": "sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==", + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], "license": "MIT", - "dependencies": { - "d3": "^7.9.0", - "lodash-es": "^4.17.21" + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } } }, - "node_modules/data-view-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", - "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", "dev": true, "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.2" + "is-callable": "^1.2.7" }, "engines": { "node": ">= 0.4" @@ -4548,34 +7404,74 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/data-view-byte-length": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", - "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/format": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", + "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, "funding": { - "url": "https://github.com/sponsors/inspect-js" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/data-view-byte-offset": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", - "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", "dev": true, "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" }, "engines": { "node": ">= 0.4" @@ -4584,81 +7480,61 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/date-fns": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-3.6.0.tgz", - "integrity": "sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==", + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, "license": "MIT", "funding": { - "type": "github", - "url": "https://github.com/sponsors/kossnocorp" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/dayjs": { - "version": "1.11.13", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", - "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==", - "license": "MIT" - }, - "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "node_modules/generator-function": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", + "integrity": "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==", + "dev": true, "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "node": ">= 0.4" } }, - "node_modules/decode-uri-component": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.4.1.tgz", - "integrity": "sha512-+8VxcR21HhTy8nOt6jf20w0c9CADrw1O8d+VZ/YzzCt4bJ3uBjw+D1q2osAB8RnpwwaeYBxy0HyKQxD5JBMuuQ==", + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, "license": "MIT", "engines": { - "node": ">=14.16" + "node": ">=6.9.0" } }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "license": "ISC", "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/define-properties": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "license": "MIT", "dependencies": { - "define-data-property": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -4667,264 +7543,179 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/delaunator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", - "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", - "license": "ISC", - "dependencies": { - "robust-predicates": "^3.0.2" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", "license": "MIT", "engines": { - "node": ">=0.4.0" + "node": ">=6" } }, - "node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "license": "Apache-2.0", + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", "dependencies": { - "esutils": "^2.0.2" + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/dom-helpers": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", - "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.8.7", - "csstype": "^3.0.2" - } - }, - "node_modules/dompurify": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.3.tgz", - "integrity": "sha512-U1U5Hzc2MO0oW3DF+G9qYN0aT7atAou4AgI0XjWz061nyBPbdxkfdhfy5uMgGn6+oLFCfn44ZGbdDqCzVmlOWA==", - "license": "(MPL-2.0 OR Apache-2.0)", - "optionalDependencies": { - "@types/trusted-types": "^2.0.7" + "node": ">= 0.4" } }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, "license": "MIT", "dependencies": { - "call-bind-apply-helpers": "^1.0.1", + "call-bound": "^1.0.3", "es-errors": "^1.3.0", - "gopd": "^1.2.0" + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.5.80", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.80.tgz", - "integrity": "sha512-LTrKpW0AqIuHwmlVNV+cjFYTnXtM9K37OGhpe0ZI10ScPSxqVSryZHIY3WnCS5NSYbBODRTZyhRMS2h5FAEqAw==", - "dev": true, - "license": "ISC" - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" }, "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-abstract": { - "version": "1.23.9", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.9.tgz", - "integrity": "sha512-py07lI0wjxAC/DcfK1S6G7iANonniZwTISvdPzk9hzeH0IZIshbuuFxLIU96OyF89Yb9hiqWn8M/bY83KY5vzA==", + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, - "license": "MIT", + "license": "ISC", "dependencies": { - "array-buffer-byte-length": "^1.0.2", - "arraybuffer.prototype.slice": "^1.0.4", - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "data-view-buffer": "^1.0.2", - "data-view-byte-length": "^1.0.2", - "data-view-byte-offset": "^1.0.1", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "es-set-tostringtag": "^2.1.0", - "es-to-primitive": "^1.3.0", - "function.prototype.name": "^1.1.8", - "get-intrinsic": "^1.2.7", - "get-proto": "^1.0.0", - "get-symbol-description": "^1.1.0", - "globalthis": "^1.0.4", - "gopd": "^1.2.0", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "internal-slot": "^1.1.0", - "is-array-buffer": "^3.0.5", - "is-callable": "^1.2.7", - "is-data-view": "^1.0.2", - "is-regex": "^1.2.1", - "is-shared-array-buffer": "^1.0.4", - "is-string": "^1.1.1", - "is-typed-array": "^1.1.15", - "is-weakref": "^1.1.0", - "math-intrinsics": "^1.1.0", - "object-inspect": "^1.13.3", - "object-keys": "^1.1.1", - "object.assign": "^4.1.7", - "own-keys": "^1.0.1", - "regexp.prototype.flags": "^1.5.3", - "safe-array-concat": "^1.1.3", - "safe-push-apply": "^1.0.0", - "safe-regex-test": "^1.1.0", - "set-proto": "^1.0.0", - "string.prototype.trim": "^1.2.10", - "string.prototype.trimend": "^1.0.9", - "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.3", - "typed-array-byte-length": "^1.0.3", - "typed-array-byte-offset": "^1.0.4", - "typed-array-length": "^1.0.7", - "unbox-primitive": "^1.1.0", - "which-typed-array": "^1.1.18" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, "engines": { - "node": ">= 0.4" + "node": "*" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, "engines": { - "node": ">= 0.4" + "node": ">=10.13.0" } }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "node_modules/globals": { + "version": "17.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-17.3.0.tgz", + "integrity": "sha512-yMqGUQVVCkD4tqjOJf3TnrvaaHDMYp4VlUSObbkIiuCPe/ofdMBFIAcBbCSRFWOnos6qRiTVStDwqPLUclaxIw==", + "dev": true, "license": "MIT", "engines": { - "node": ">= 0.4" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/es-iterator-helpers": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.1.tgz", - "integrity": "sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==", + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", "dev": true, "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", "define-properties": "^1.2.1", - "es-abstract": "^1.23.6", - "es-errors": "^1.3.0", - "es-set-tostringtag": "^2.0.3", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.6", - "globalthis": "^1.0.4", - "gopd": "^1.2.0", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.2.0", - "has-symbols": "^1.1.0", - "internal-slot": "^1.1.0", - "iterator.prototype": "^1.1.4", - "safe-array-concat": "^1.1.3" + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-object-atoms": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.1.tgz", - "integrity": "sha512-BPOBuyUF9QIVhuNLhbToCLHP6+0MHwZ7xLBkPPCZqK4JmpJgGnv10035STzzQwFpqdzNFMB3irvDI63IagvDwA==", + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/hachure-fill": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz", + "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==", + "license": "MIT" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", "dev": true, "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-shim-unscopables": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", - "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", "dependencies": { - "hasown": "^2.0.0" + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-to-primitive": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", - "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", "dev": true, "license": "MIT", "dependencies": { - "is-callable": "^1.2.7", - "is-date-object": "^1.0.5", - "is-symbol": "^1.0.4" + "dunder-proto": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -4933,586 +7724,574 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/esbuild": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.2.tgz", - "integrity": "sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==", - "dev": true, - "hasInstallScript": true, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" }, "engines": { - "node": ">=18" + "node": ">= 0.4" }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.24.2", - "@esbuild/android-arm": "0.24.2", - "@esbuild/android-arm64": "0.24.2", - "@esbuild/android-x64": "0.24.2", - "@esbuild/darwin-arm64": "0.24.2", - "@esbuild/darwin-x64": "0.24.2", - "@esbuild/freebsd-arm64": "0.24.2", - "@esbuild/freebsd-x64": "0.24.2", - "@esbuild/linux-arm": "0.24.2", - "@esbuild/linux-arm64": "0.24.2", - "@esbuild/linux-ia32": "0.24.2", - "@esbuild/linux-loong64": "0.24.2", - "@esbuild/linux-mips64el": "0.24.2", - "@esbuild/linux-ppc64": "0.24.2", - "@esbuild/linux-riscv64": "0.24.2", - "@esbuild/linux-s390x": "0.24.2", - "@esbuild/linux-x64": "0.24.2", - "@esbuild/netbsd-arm64": "0.24.2", - "@esbuild/netbsd-x64": "0.24.2", - "@esbuild/openbsd-arm64": "0.24.2", - "@esbuild/openbsd-x64": "0.24.2", - "@esbuild/sunos-x64": "0.24.2", - "@esbuild/win32-arm64": "0.24.2", - "@esbuild/win32-ia32": "0.24.2", - "@esbuild/win32-x64": "0.24.2" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, "engines": { - "node": ">=6" + "node": ">= 0.4" } }, - "node_modules/escape-string-regexp": { + "node_modules/hast-util-parse-selector": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", "license": "MIT", - "engines": { - "node": ">=10" + "dependencies": { + "@types/hast": "^3.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/eslint": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.18.0.tgz", - "integrity": "sha512-+waTfRWQlSbpt3KWE+CjrPPYnbq9kfZIYUqapc0uBXyjTp8aYXZDsUH16m39Ryq3NjAVP4tjuF7KaukeqoCoaA==", - "dev": true, + "node_modules/hastscript": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", + "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.0", - "@eslint/core": "^0.10.0", - "@eslint/eslintrc": "^3.2.0", - "@eslint/js": "9.18.0", - "@eslint/plugin-kit": "^0.2.5", - "@humanfs/node": "^0.16.6", - "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.4.1", - "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.6", - "debug": "^4.3.2", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.2.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", - "esquery": "^1.5.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^8.0.0", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0" }, "funding": { - "url": "https://eslint.org/donate" - }, - "peerDependencies": { - "jiti": "*" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - } + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/eslint-plugin-react": { - "version": "7.37.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.4.tgz", - "integrity": "sha512-BGP0jRmfYyvOyvMoRX/uoUeW+GqNj9y16bPQzqAHf3AYII/tDs+jMN0dBVkl88/OZwNGwrVFxE7riHsXVfy/LQ==", + "node_modules/hermes-estree": { + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/hermes-estree/-/hermes-estree-0.25.1.tgz", + "integrity": "sha512-0wUoCcLp+5Ev5pDW2OriHC2MJCbwLwuRx+gAqMTOkGKJJiBCLjtrvy4PWUGn6MIVefecRpzoOZ/UV6iGdOr+Cw==", + "dev": true, + "license": "MIT" + }, + "node_modules/hermes-parser": { + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/hermes-parser/-/hermes-parser-0.25.1.tgz", + "integrity": "sha512-6pEjquH3rqaI6cYAXYPcz9MS4rY6R4ngRgrgfDshRptUZIc3lw0MCIJIGDj9++mfySOuPTHB4nrSW99BCvOPIA==", "dev": true, "license": "MIT", "dependencies": { - "array-includes": "^3.1.8", - "array.prototype.findlast": "^1.2.5", - "array.prototype.flatmap": "^1.3.3", - "array.prototype.tosorted": "^1.1.4", - "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.2.1", - "estraverse": "^5.3.0", - "hasown": "^2.0.2", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.8", - "object.fromentries": "^2.0.8", - "object.values": "^1.2.1", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.5", - "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.12", - "string.prototype.repeat": "^1.0.0" - }, + "hermes-estree": "0.25.1" + } + }, + "node_modules/highlight.js": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", + "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "license": "BSD-3-Clause", "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" + "node": "*" + } + }, + "node_modules/highlightjs-vue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/highlightjs-vue/-/highlightjs-vue-1.0.0.tgz", + "integrity": "sha512-PDEfEF102G23vHmPhLyPboFCD+BkMGu+GuJe2d9/eH4FsCwvgBpnc9n0pGE+ffKdph38s6foEZiEjdgHdzp+IA==", + "license": "CC0-1.0" + }, + "node_modules/hoist-non-react-statics": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", + "license": "BSD-3-Clause", + "dependencies": { + "react-is": "^16.7.0" } }, - "node_modules/eslint-plugin-react-hooks": { - "version": "5.1.0-rc.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.1.0-rc.1.tgz", - "integrity": "sha512-nAD017D/00XFwjP4F7cXaIbCxQ9A4pGaqjLs5347px37w/WclOtPqz8bBiTQFoj+teVQei6Ahr1h1aZiuaXMSw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" + "node_modules/hoist-non-react-statics/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT" + }, + "node_modules/humanize-duration": { + "version": "3.33.2", + "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.33.2.tgz", + "integrity": "sha512-K7Ny/ULO1hDm2nnhvAY+SJV1skxFb61fd073SG1IWJl+D44ULrruCuTyjHKjBVVcSuTlnY99DKtgEG39CM5QOQ==", + "license": "Unlicense", + "funding": { + "url": "https://github.com/sponsors/EvanHahn" } }, - "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.5", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", - "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", - "dev": true, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "license": "MIT", "dependencies": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" + "safer-buffer": ">= 2.1.2 < 3.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">=0.10.0" } }, - "node_modules/eslint-plugin-react/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" + "license": "MIT", + "engines": { + "node": ">= 4" } }, - "node_modules/eslint-plugin-security": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-security/-/eslint-plugin-security-3.0.1.tgz", - "integrity": "sha512-XjVGBhtDZJfyuhIxnQ/WMm385RbX3DBu7H1J7HNNhmB2tnGxMeqVSnYv79oAj992ayvIBZghsymwkYFS6cGH4Q==", - "dev": true, - "license": "Apache-2.0", + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "license": "MIT", "dependencies": { - "safe-regex": "^2.1.1" + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "node": ">=6" }, "funding": { - "url": "https://opencollective.com/eslint" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint-scope": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz", - "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==", + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, + "license": "MIT", "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "node": ">=0.8.19" } }, - "node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "dev": true, - "license": "Apache-2.0", + "node_modules/inflection": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/inflection/-/inflection-3.0.2.tgz", + "integrity": "sha512-+Bg3+kg+J6JUWn8J6bzFmOWkTQ6L/NHfDRSYU+EVvuKHDxUDHAXgqixHfVlzuBQaPOTac8hn43aPhMNk6rMe3g==", + "license": "MIT", "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "node": ">=18.0.0" } }, - "node_modules/eslint/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" } }, - "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true, - "license": "BSD-2-Clause", + "license": "ISC" + }, + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "license": "MIT", "dependencies": { - "acorn": "^8.14.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "node": ">= 0.4" } }, - "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "license": "Apache-2.0", + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, + "node": ">=12" + } + }, + "node_modules/intersection-observer": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/intersection-observer/-/intersection-observer-0.10.0.tgz", + "integrity": "sha512-fn4bQ0Xq8FTej09YC/jqKZwtijpvARlRp6wxL5WTA6yPe2YWSJ5RJh7Nm79rK2qB0wr6iDQzH60XGq5V/7u8YQ==", + "deprecated": "The Intersection Observer polyfill is no longer needed and can safely be removed. Intersection Observer has been Baseline since 2019.", + "license": "W3C-20150513" + }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "license": "MIT", "funding": { - "url": "https://opencollective.com/eslint" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/esquery": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", - "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", - "dev": true, - "license": "BSD-3-Clause", + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "license": "MIT", "dependencies": { - "estraverse": "^5.1.0" + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" }, - "engines": { - "node": ">=0.10" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", "dev": true, - "license": "BSD-2-Clause", + "license": "MIT", "dependencies": { - "estraverse": "^5.2.0" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" }, "engines": { - "node": ">=4.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "license": "MIT" }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", "dev": true, - "license": "BSD-2-Clause", + "license": "MIT", + "dependencies": { + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", - "license": "MIT" - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "license": "MIT" - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", "dev": true, "license": "MIT", "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" + "has-bigints": "^1.0.2" }, "engines": { - "node": ">=8.6.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "is-glob": "^4.0.1" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { - "node": ">= 6" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", "dev": true, - "license": "MIT" + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/fastq": { - "version": "1.18.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz", - "integrity": "sha512-QKHXPW0hD8g4UET03SdOdunzSouc9N4AuHdsX8XNcTsuz+yYFILVNIX4l9yHABMhiEI9Db0JTTIpu0wB+Y1QQw==", - "dev": true, - "license": "ISC", + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", "dependencies": { - "reusify": "^1.0.4" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/file-entry-cache": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", "dev": true, "license": "MIT", "dependencies": { - "flat-cache": "^4.0.0" + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" }, "engines": { - "node": ">=16.0.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/file-selector": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/file-selector/-/file-selector-2.1.2.tgz", - "integrity": "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig==", + "node_modules/is-date-object": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, "license": "MIT", "dependencies": { - "tslib": "^2.7.0" + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" }, "engines": { - "node": ">= 12" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, "engines": { - "node": ">=8" + "node": ">=0.10.0" } }, - "node_modules/filter-obj": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-5.1.0.tgz", - "integrity": "sha512-qWeTREPoT7I0bifpPUXtxkZJ1XJzxWtfoWWkdVGqa+eCr3SHW/Ocp89o8vLvbUuQnadybJpjOKu4V+RwO6sGng==", + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, "engines": { - "node": ">=14.16" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/find-root": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", - "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==", - "license": "MIT" + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "node_modules/is-generator-function": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.2.tgz", + "integrity": "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==", "dev": true, "license": "MIT", "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" + "call-bound": "^1.0.4", + "generator-function": "^2.0.0", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" }, "engines": { - "node": ">=10" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/flat-cache": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", - "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "license": "MIT", "dependencies": { - "flatted": "^3.2.9", - "keyv": "^4.5.4" + "is-extglob": "^2.1.1" }, "engines": { - "node": ">=16" + "node": ">=0.10.0" } }, - "node_modules/flatted": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", - "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", - "dev": true, - "license": "ISC" + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, "license": "MIT", "engines": { - "node": ">=4.0" + "node": ">= 0.4" }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, "license": "MIT", - "dependencies": { - "is-callable": "^1.1.3" + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/form-data": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", - "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, "license": "MIT", "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { - "node": ">= 6" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, - "hasInstallScript": true, "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=8" } }, - "node_modules/function.prototype.name": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", - "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", "dev": true, "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "functions-have-names": "^1.2.3", - "hasown": "^2.0.2", - "is-callable": "^1.2.7" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -5521,51 +8300,27 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", "dev": true, "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", "dev": true, "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", - "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", - "license": "MIT", "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "function-bind": "^1.1.2", - "get-proto": "^1.0.0", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" + "call-bound": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -5574,29 +8329,33 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, "license": "MIT", "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-symbol-description": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", - "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "node_modules/is-symbol": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", "dev": true, "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6" + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -5605,62 +8364,43 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "which-typed-array": "^1.1.16" }, "engines": { - "node": "*" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/globals": { - "version": "15.14.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-15.14.0.tgz", - "integrity": "sha512-OkToC372DtlQeje9/zHIo5CT8lRP/FUgEOKBEhU4e0abL7J7CD24fD9ohiLN5hagG/kWCYj4K5oaxxtj2Z0Dig==", "license": "MIT", "engines": { - "node": ">=18" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/globalthis": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", - "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", "dev": true, "license": "MIT", "dependencies": { - "define-properties": "^1.2.1", - "gopd": "^1.0.1" + "call-bound": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -5669,11 +8409,16 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, "engines": { "node": ">= 0.4" }, @@ -5681,1074 +8426,1423 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", "dev": true, "license": "MIT" }, - "node_modules/hachure-fill": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz", - "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==", - "license": "MIT" + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" }, - "node_modules/has-bigints": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", - "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "node_modules/iterator.prototype": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", + "integrity": "sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==", "dev": true, "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "get-proto": "^1.0.0", + "has-symbols": "^1.1.0", + "set-function-name": "^2.0.2" + }, "engines": { "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/javascript-natural-sort": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz", + "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==", "dev": true, + "license": "MIT" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jschardet": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/jschardet/-/jschardet-3.1.4.tgz", + "integrity": "sha512-/kmVISmrwVwtyYU40iQUOp3SUPk2dhNCMsZBQX0R1/jZ8maaXJ/oZIzUOiyOqcgtLnETFKYChbJ5iDC/eWmFHg==", + "dev": true, + "license": "LGPL-2.1+", "engines": { - "node": ">=8" + "node": ">=0.1.90" } }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0" + "bin": { + "jsesc": "bin/jsesc" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">=6" } }, - "node_modules/has-proto": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", - "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "dev": true, "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.0" + "bin": { + "json5": "lib/cli.js" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=6" } }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node_modules/jsonexport": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonexport/-/jsonexport-3.2.0.tgz", + "integrity": "sha512-GbO9ugb0YTZatPd/hqCGR0FSwbr82H6OzG04yzdrG7XOe4QZ0jhQ+kOsB29zqkzoYJLmLxbbrFiuwbQu891XnQ==", + "license": "Apache-2.0", + "bin": { + "jsonexport": "bin/jsonexport.js" } }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "node_modules/jsx-ast-utils": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", + "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", "dev": true, "license": "MIT", "dependencies": { - "has-symbols": "^1.0.3" + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "object.assign": "^4.1.4", + "object.values": "^1.1.6" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=4.0" } }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "node_modules/jwt-decode": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-4.0.0.tgz", + "integrity": "sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==", "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "license": "BSD-3-Clause", - "dependencies": { - "react-is": "^16.7.0" + "node": ">=18" } }, - "node_modules/hoist-non-react-statics/node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "license": "MIT" - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "node_modules/katex": { + "version": "0.16.28", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.28.tgz", + "integrity": "sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], "license": "MIT", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" + "commander": "^8.3.0" }, - "engines": { - "node": ">=0.10.0" + "bin": { + "katex": "cli.js" } }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", "license": "MIT", "engines": { - "node": ">= 4" + "node": ">= 12" } }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, "license": "MIT", "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "json-buffer": "3.0.1" } }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" }, - "node_modules/inflection": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/inflection/-/inflection-3.0.2.tgz", - "integrity": "sha512-+Bg3+kg+J6JUWn8J6bzFmOWkTQ6L/NHfDRSYU+EVvuKHDxUDHAXgqixHfVlzuBQaPOTac8hn43aPhMNk6rMe3g==", + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", "license": "MIT", "engines": { - "node": ">=18.0.0" + "node": ">=6" } }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "license": "ISC", + "node_modules/langium": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/langium/-/langium-4.2.1.tgz", + "integrity": "sha512-zu9QWmjpzJcomzdJQAHgDVhLGq5bLosVak1KVa40NzQHXfqr4eAHupvnPOVXEoLkg6Ocefvf/93d//SB7du4YQ==", + "license": "MIT", "dependencies": { - "once": "^1.3.0", - "wrappy": "1" + "chevrotain": "~11.1.1", + "chevrotain-allstar": "~0.3.1", + "vscode-languageserver": "~9.0.1", + "vscode-languageserver-textdocument": "~1.0.11", + "vscode-uri": "~3.1.0" + }, + "engines": { + "node": ">=20.10.0", + "npm": ">=10.2.3" } }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, - "license": "ISC" + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", + "license": "MIT" }, - "node_modules/internal-slot": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", - "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, "license": "MIT", "dependencies": { - "es-errors": "^1.3.0", - "hasown": "^2.0.2", - "side-channel": "^1.1.0" + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" }, "engines": { - "node": ">= 0.4" + "node": ">= 0.8.0" } }, - "node_modules/internmap": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", - "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", - "license": "ISC", - "engines": { - "node": ">=12" - } + "node_modules/lexical": { + "version": "0.35.0", + "resolved": "https://registry.npmjs.org/lexical/-/lexical-0.35.0.tgz", + "integrity": "sha512-3VuV8xXhh5xJA6tzvfDvE0YBCMkIZUmxtRilJQDDdCgJCc+eut6qAv2qbN+pbqvarqcQqPN1UF+8YvsjmyOZpw==", + "license": "MIT" }, - "node_modules/is-array-buffer": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", - "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, "license": "MIT", "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "get-intrinsic": "^1.2.6" + "p-locate": "^5.0.0" }, "engines": { - "node": ">= 0.4" + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", "license": "MIT" }, - "node_modules/is-async-function": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.0.tgz", - "integrity": "sha512-GExz9MtyhlZyXYLxzlJRj5WUCE661zhDa1Yna52CN57AJsymh+DvXXjyveSioqSRdxvUrdKdvqB1b5cVKsNpWQ==", + "node_modules/lodash-es": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz", + "integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==", + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true, + "license": "MIT" + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "get-proto": "^1.0.1", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" + "js-tokens": "^3.0.0 || ^4.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "bin": { + "loose-envify": "cli.js" } }, - "node_modules/is-bigint": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", - "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", - "dev": true, + "node_modules/lowlight": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/lowlight/-/lowlight-1.20.0.tgz", + "integrity": "sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==", "license": "MIT", "dependencies": { - "has-bigints": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "fault": "^1.0.0", + "highlight.js": "~10.7.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-boolean-object": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.1.tgz", - "integrity": "sha512-l9qO6eFlUETHtuihLcYOaLKByJ1f+N4kthcU9YjHy3N+B3hWv0y/2Nd0mu/7lTFnRQHTrSdXF50HQ3bl5fEnng==", - "dev": true, + "node_modules/lowlight/node_modules/fault": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", + "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "format": "^0.2.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "license": "MIT", + "bin": { + "lz-string": "bin/bin.js" + } + }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", "license": "MIT", - "engines": { - "node": ">= 0.4" - }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "node_modules/markdown-to-jsx": { + "version": "9.7.4", + "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-9.7.4.tgz", + "integrity": "sha512-W4ERh57uGFDT4Z0HDlk8fR8/vYwj+9cMMVwhLdVtvVLqGatGjroZIMk5IY/PMbV0ig0sos2FJVwUMQj/7l2VDg==", "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, "engines": { - "node": ">= 0.4" + "node": ">= 18" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "peerDependencies": { + "react": ">= 16.0.0", + "solid-js": ">=1.0.0", + "vue": ">=3.0.0" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + }, + "react-native": { + "optional": true + }, + "solid-js": { + "optional": true + }, + "vue": { + "optional": true + } } }, - "node_modules/is-data-view": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", - "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", - "dev": true, + "node_modules/marked": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.2.tgz", + "integrity": "sha512-s5HZGFQea7Huv5zZcAGhJLT3qLpAfnY7v7GWkICUr0+Wd5TFEtdlRR2XUL5Gg+RH7u2Df595ifrxR03mBaw7gA==", "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "get-intrinsic": "^1.2.6", - "is-typed-array": "^1.1.13" + "bin": { + "marked": "bin/marked.js" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">= 20" } }, - "node_modules/is-date-object": { + "node_modules/math-intrinsics": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", - "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", - "dev": true, + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "has-tostringtag": "^1.0.2" - }, "engines": { "node": ">= 0.4" + } + }, + "node_modules/mdast-util-directive": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.1.0.tgz", + "integrity": "sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-visit-parents": "^6.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", "license": "MIT", - "engines": { - "node": ">=0.10.0" + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-finalizationregistry": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", - "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", - "dev": true, + "node_modules/mdast-util-frontmatter": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz", + "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "escape-string-regexp": "^5.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-extension-frontmatter": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "node_modules/mdast-util-frontmatter/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "license": "MIT", "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-generator-function": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", - "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", - "dev": true, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "get-proto": "^1.0.0", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", "license": "MIT", "dependencies": { - "is-extglob": "^2.1.1" + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-map": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", - "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", - "dev": true, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", "license": "MIT", - "engines": { - "node": ">= 0.4" + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, + "node_modules/mdast-util-highlight-mark": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/mdast-util-highlight-mark/-/mdast-util-highlight-mark-1.2.2.tgz", + "integrity": "sha512-OYumVoytj+B9YgwzBhBcYUCLYHIPvJtAvwnMyKhUXbfUFuER5S+FDZyu9fadUxm2TCT5fRYK3jQXh2ioWAxrMw==", "license": "MIT", - "engines": { - "node": ">=0.12.0" + "dependencies": { + "micromark-extension-highlight-mark": "1.2.0" } }, - "node_modules/is-number-object": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", - "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", - "dev": true, + "node_modules/mdast-util-mdx": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz", + "integrity": "sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", "license": "MIT", - "engines": { - "node": ">=8" + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-regex": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", - "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", - "dev": true, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", + "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-set": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", - "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", - "dev": true, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", "license": "MIT", - "engines": { - "node": ">= 0.4" + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", - "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", - "dev": true, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-string": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", - "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", - "dev": true, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-symbol": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", - "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", - "dev": true, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2", - "has-symbols": "^1.1.0", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" + "@types/mdast": "^4.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", - "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", - "dev": true, + "node_modules/mermaid": { + "version": "11.12.3", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.3.tgz", + "integrity": "sha512-wN5ZSgJQIC+CHJut9xaKWsknLxaFBwCPwPkGTSUYrTiHORWvpT8RxGk849HPnpUAQ+/9BPRqYb80jTpearrHzQ==", "license": "MIT", "dependencies": { - "which-typed-array": "^1.1.16" + "@braintree/sanitize-url": "^7.1.1", + "@iconify/utils": "^3.0.1", + "@mermaid-js/parser": "^1.0.0", + "@types/d3": "^7.4.3", + "cytoscape": "^3.29.3", + "cytoscape-cose-bilkent": "^4.1.0", + "cytoscape-fcose": "^2.2.0", + "d3": "^7.9.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.13", + "dayjs": "^1.11.18", + "dompurify": "^3.2.5", + "katex": "^0.16.22", + "khroma": "^2.1.0", + "lodash-es": "^4.17.23", + "marked": "^16.2.1", + "roughjs": "^4.6.6", + "stylis": "^4.3.6", + "ts-dedent": "^2.2.0", + "uuid": "^11.1.0" + } + }, + "node_modules/mermaid/node_modules/marked": { + "version": "16.4.2", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.4.2.tgz", + "integrity": "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" }, "engines": { - "node": ">= 0.4" + "node": ">= 20" + } + }, + "node_modules/mermaid/node_modules/stylis": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz", + "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==", + "license": "MIT" + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-directive": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz", + "integrity": "sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "parse-entities": "^4.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-weakmap": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", - "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", - "dev": true, + "node_modules/micromark-extension-frontmatter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz", + "integrity": "sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==", "license": "MIT", - "engines": { - "node": ">= 0.4" + "dependencies": { + "fault": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-weakref": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.0.tgz", - "integrity": "sha512-SXM8Nwyys6nT5WP6pltOwKytLV7FqQ4UiibxVmW+EIosHcmCqkkjViTb5SNssDlkCiEYRP1/pdWUKVvZBmsR2Q==", - "dev": true, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/is-weakset": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", - "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", - "dev": true, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", + "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "get-intrinsic": "^1.2.6" - }, - "engines": { - "node": ">= 0.4" + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true, - "license": "MIT" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/iterator.prototype": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", - "integrity": "sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==", - "dev": true, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", "license": "MIT", "dependencies": { - "define-data-property": "^1.1.4", - "es-object-atoms": "^1.0.0", - "get-intrinsic": "^1.2.6", - "get-proto": "^1.0.0", - "has-symbols": "^1.1.0", - "set-function-name": "^2.0.2" + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" }, - "engines": { - "node": ">= 0.4" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/javascript-natural-sort": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz", - "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==", - "dev": true, - "license": "MIT" - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, + "node_modules/micromark-extension-highlight-mark": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-extension-highlight-mark/-/micromark-extension-highlight-mark-1.2.0.tgz", + "integrity": "sha512-huGtbd/9kQsMk8u7nrVMaS5qH/47yDG6ZADggo5Owz5JoY8wdfQjfuy118/QiYNCvdFuFDbzT0A7K7Hp2cBsXA==", "license": "MIT", "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "uvu": "^0.5.6" } }, - "node_modules/jschardet": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/jschardet/-/jschardet-3.1.4.tgz", - "integrity": "sha512-/kmVISmrwVwtyYU40iQUOp3SUPk2dhNCMsZBQX0R1/jZ8maaXJ/oZIzUOiyOqcgtLnETFKYChbJ5iDC/eWmFHg==", - "dev": true, - "license": "LGPL-2.1+", - "engines": { - "node": ">=0.1.90" + "node_modules/micromark-extension-mdx-expression": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.1.tgz", + "integrity": "sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-mdx-expression": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "node_modules/micromark-extension-mdx-jsx": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.2.tgz", + "integrity": "sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ==", "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "micromark-factory-mdx-expression": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "vfile-message": "^4.0.0" }, - "engines": { - "node": ">=6" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "license": "MIT" - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, + "node_modules/micromark-extension-mdx-md": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz", + "integrity": "sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==", "license": "MIT", - "bin": { - "json5": "lib/cli.js" + "dependencies": { + "micromark-util-types": "^2.0.0" }, - "engines": { - "node": ">=6" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/jsonexport": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/jsonexport/-/jsonexport-3.2.0.tgz", - "integrity": "sha512-GbO9ugb0YTZatPd/hqCGR0FSwbr82H6OzG04yzdrG7XOe4QZ0jhQ+kOsB29zqkzoYJLmLxbbrFiuwbQu891XnQ==", - "license": "Apache-2.0", - "bin": { - "jsonexport": "bin/jsonexport.js" + "node_modules/micromark-extension-mdxjs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz", + "integrity": "sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==", + "license": "MIT", + "dependencies": { + "acorn": "^8.0.0", + "acorn-jsx": "^5.0.0", + "micromark-extension-mdx-expression": "^3.0.0", + "micromark-extension-mdx-jsx": "^3.0.0", + "micromark-extension-mdx-md": "^2.0.0", + "micromark-extension-mdxjs-esm": "^3.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/jsx-ast-utils": { - "version": "3.3.5", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", - "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", - "dev": true, + "node_modules/micromark-extension-mdxjs-esm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz", + "integrity": "sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==", "license": "MIT", "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flat": "^1.3.1", - "object.assign": "^4.1.4", - "object.values": "^1.1.6" + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-position-from-estree": "^2.0.0", + "vfile-message": "^4.0.0" }, - "engines": { - "node": ">=4.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/jwt-decode": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-4.0.0.tgz", - "integrity": "sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==", + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "engines": { - "node": ">=18" + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/katex": { - "version": "0.16.20", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.20.tgz", - "integrity": "sha512-jjuLaMGD/7P8jUTpdKhA9IoqnH+yMFB3sdAFtq5QdAqeP2PjiSbnC3EaguKPNtv6dXXanHxp1ckwvF4a86LBig==", + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", "funding": [ - "https://opencollective.com/katex", - "https://github.com/sponsors/katex" + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } ], "license": "MIT", "dependencies": { - "commander": "^8.3.0" - }, - "bin": { - "katex": "cli.js" + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/katex/node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "node_modules/micromark-factory-mdx-expression": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.3.tgz", + "integrity": "sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "engines": { - "node": ">= 12" + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-position-from-estree": "^2.0.0", + "vfile-message": "^4.0.0" } }, - "node_modules/keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "dev": true, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "json-buffer": "3.0.1" + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/khroma": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", - "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" - }, - "node_modules/kolorist": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/kolorist/-/kolorist-1.8.0.tgz", - "integrity": "sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==", - "license": "MIT" - }, - "node_modules/langium": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/langium/-/langium-3.0.0.tgz", - "integrity": "sha512-+Ez9EoiByeoTu/2BXmEaZ06iPNXM6thWJp02KfBO/raSMyCJ4jw7AkWWa+zBCTm0+Tw1Fj9FOxdqSskyN5nAwg==", + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "chevrotain": "~11.0.3", - "chevrotain-allstar": "~0.3.0", - "vscode-languageserver": "~9.0.1", - "vscode-languageserver-textdocument": "~1.0.11", - "vscode-uri": "~3.0.8" - }, - "engines": { - "node": ">=16.0.0" + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/layout-base": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", - "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", - "license": "MIT" - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "license": "MIT" - }, - "node_modules/linkify-it": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", - "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "uc.micro": "^2.0.0" + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/linkifyjs": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/linkifyjs/-/linkifyjs-4.2.0.tgz", - "integrity": "sha512-pCj3PrQyATaoTYKHrgWRF3SJwsm61udVh+vuls/Rl6SptiDhgE7ziUIudAedRY9QEfynmM7/RmLEfPUyw1HPCw==", - "license": "MIT" - }, - "node_modules/local-pkg": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.1.tgz", - "integrity": "sha512-9rrA30MRRP3gBD3HTGnC6cDFpaE1kVDWxWgqWJUN0RvDNAo+Nz/9GxB+nHOH0ifbVFy0hSA1V6vFDvnx54lTEQ==", + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "mlly": "^1.7.3", - "pkg-types": "^1.2.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "license": "MIT" - }, - "node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", - "license": "MIT" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "license": "ISC", + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "yallist": "^3.0.2" + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/markdown-it": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", - "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "argparse": "^2.0.1", - "entities": "^4.4.0", - "linkify-it": "^5.0.0", - "mdurl": "^2.0.0", - "punycode.js": "^2.3.1", - "uc.micro": "^2.1.0" - }, - "bin": { - "markdown-it": "bin/markdown-it.mjs" + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/markdown-to-jsx": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.7.3.tgz", - "integrity": "sha512-o35IhJDFP6Fv60zPy+hbvZSQMmgvSGdK5j8NRZ7FeZMY+Bgqw+dSg7SC1ZEzC26++CiOUCqkbq96/c3j/FfTEQ==", + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-events-to-acorn": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.3.tgz", + "integrity": "sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "engines": { - "node": ">= 10" - }, - "peerDependencies": { - "react": ">= 0.14.0" + "dependencies": { + "@types/estree": "^1.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "estree-util-visit": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "vfile-message": "^4.0.0" } }, - "node_modules/marked": { - "version": "13.0.3", - "resolved": "https://registry.npmjs.org/marked/-/marked-13.0.3.tgz", - "integrity": "sha512-rqRix3/TWzE9rIoFGIn8JmsVfhiuC8VIQ8IdX5TfzmeBucdY05/0UlzKaw0eVtpcN/OdVFpBk7CjKGo9iHJ/zA==", + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "bin": { - "marked": "bin/marked.js" - }, - "engines": { - "node": ">= 18" + "dependencies": { + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "engines": { - "node": ">= 0.4" + "dependencies": { + "micromark-util-types": "^2.0.0" } }, - "node_modules/mdurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", - "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "engines": { - "node": ">= 8" + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/mermaid": { - "version": "11.4.1", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.4.1.tgz", - "integrity": "sha512-Mb01JT/x6CKDWaxigwfZYuYmDZ6xtrNwNlidKZwkSrDaY9n90tdrJTV5Umk+wP1fZscGptmKFXHsXMDEVZ+Q6A==", + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "@braintree/sanitize-url": "^7.0.1", - "@iconify/utils": "^2.1.32", - "@mermaid-js/parser": "^0.3.0", - "@types/d3": "^7.4.3", - "cytoscape": "^3.29.2", - "cytoscape-cose-bilkent": "^4.1.0", - "cytoscape-fcose": "^2.2.0", - "d3": "^7.9.0", - "d3-sankey": "^0.12.3", - "dagre-d3-es": "7.0.11", - "dayjs": "^1.11.10", - "dompurify": "^3.2.1", - "katex": "^0.16.9", - "khroma": "^2.1.0", - "lodash-es": "^4.17.21", - "marked": "^13.0.2", - "roughjs": "^4.6.6", - "stylis": "^4.3.1", - "ts-dedent": "^2.2.0", - "uuid": "^9.0.1" + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/mermaid/node_modules/stylis": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.5.tgz", - "integrity": "sha512-K7npNOKGRYuhAFFzkzMGfxFDpN6gDwf8hcMiE+uveTVbBgm93HrNP3ZDUpKqzZ4pG7TP6fmb+EMAQPjq9FqqvA==", + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT" }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" }, "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", "license": "MIT", "engines": { "node": ">= 0.6" @@ -6766,6 +9860,15 @@ "node": ">= 0.6" } }, + "node_modules/mime-types/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -6780,15 +9883,24 @@ } }, "node_modules/mlly": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz", - "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", "license": "MIT", "dependencies": { - "acorn": "^8.14.0", - "pathe": "^2.0.1", - "pkg-types": "^1.3.0", - "ufo": "^1.5.4" + "acorn": "^8.15.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "ufo": "^1.6.1" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "license": "MIT", + "engines": { + "node": ">=4" } }, "node_modules/ms": { @@ -6798,9 +9910,9 @@ "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, "funding": [ { @@ -6823,6 +9935,12 @@ "dev": true, "license": "MIT" }, + "node_modules/next-tick": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", + "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==", + "license": "ISC" + }, "node_modules/node-polyglot": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-polyglot/-/node-polyglot-2.6.0.tgz", @@ -6838,9 +9956,9 @@ } }, "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", "dev": true, "license": "MIT" }, @@ -6854,9 +9972,9 @@ } }, "node_modules/object-inspect": { - "version": "1.13.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", - "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", "dev": true, "license": "MIT", "engines": { @@ -6897,14 +10015,15 @@ } }, "node_modules/object.entries": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.8.tgz", - "integrity": "sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", + "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" + "es-object-atoms": "^1.1.1" }, "engines": { "node": ">= 0.4" @@ -6949,9 +10068,9 @@ } }, "node_modules/oidc-client-ts": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/oidc-client-ts/-/oidc-client-ts-3.1.0.tgz", - "integrity": "sha512-IDopEXjiwjkmJLYZo6BTlvwOtnlSniWZkKZoXforC/oLZHC9wkIxd25Kwtmo5yKFMMVcsp3JY6bhcNJqdYk8+g==", + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/oidc-client-ts/-/oidc-client-ts-3.4.1.tgz", + "integrity": "sha512-jNdst/U28Iasukx/L5MP6b274Vr7ftQs6qAhPBCvz6Wt5rPCA+Q/tUmCzfCHHWweWw5szeMy2Gfrm1rITwUKrw==", "license": "Apache-2.0", "dependencies": { "jwt-decode": "^4.0.0" @@ -6988,10 +10107,10 @@ "node": ">= 0.8.0" } }, - "node_modules/orderedmap": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/orderedmap/-/orderedmap-2.1.1.tgz", - "integrity": "sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g==", + "node_modules/outvariant": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/outvariant/-/outvariant-1.4.0.tgz", + "integrity": "sha512-AlWY719RF02ujitly7Kk/0QlV+pXGFDHrHf9O2OKqyqgBieaPOIeuSkL8sRK6j2WK+/ZAURq2kZsY0d8JapUiw==", "license": "MIT" }, "node_modules/own-keys": { @@ -7045,9 +10164,15 @@ } }, "node_modules/package-manager-detector": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-0.2.8.tgz", - "integrity": "sha512-ts9KSdroZisdvKMWVAVCXiKqnqNfXz4+IbrBG8/BWx/TR5le+jfenvoBuIZ6UWM9nz47W7AbD9qYfAwfWMIwzA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.6.0.tgz", + "integrity": "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==", + "license": "MIT" + }, + "node_modules/packageurl-js": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/packageurl-js/-/packageurl-js-2.0.1.tgz", + "integrity": "sha512-N5ixXjzTy4QDQH0Q9YFjqIWd6zH6936Djpl2m9QNFmDv5Fum8q8BjkpAcHNMzOFE0IwQrFhJWex3AN6kS0OSwg==", "license": "MIT" }, "node_modules/parent-module": { @@ -7062,6 +10187,41 @@ "node": ">=6" } }, + "node_modules/parse-entities": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", + "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/parse-imports-exports": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/parse-imports-exports/-/parse-imports-exports-0.2.4.tgz", + "integrity": "sha512-4s6vd6dx1AotCx/RCI2m7t7GCh5bDRUtGNvRfHSP2wbBQdMi67pPe7mtzmgwcaQ8VKK/6IB7Glfyu3qdZJPybQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-statements": "1.0.11" + } + }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -7080,6 +10240,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parse-statements": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/parse-statements/-/parse-statements-1.0.11.tgz", + "integrity": "sha512-HlsyYdMBnbPQ9Jr/VgJ1YF4scnldvJpJxCVx6KgqPL4dxppsWrJHCIIxQXMJrqGnsRkNPATbeMJ8Yxu7JMsYcA==", + "dev": true, + "license": "MIT" + }, "node_modules/path-data-parser": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz", @@ -7132,9 +10299,9 @@ } }, "node_modules/pathe": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.1.tgz", - "integrity": "sha512-6jpjMpOth5S9ITVu5clZ7NOgHNsv5vRQdheL9ztp2vZmM6fRbLvyua1tiBIL4lk8SAe3ARzeXEly6siXCjDHDw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "license": "MIT" }, "node_modules/picocolors": { @@ -7144,35 +10311,39 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", "engines": { - "node": ">=8.6" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/pkg-types": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.0.tgz", - "integrity": "sha512-kS7yWjVFCkIw9hqdJBoMxDdzEngmkr5FXeWZZfQ6GoYacjVnsW6l2CcYW/0ThD0vF4LPJgVYnrg4d0uuhwYQbg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", "license": "MIT", "dependencies": { "confbox": "^0.1.8", - "mlly": "^1.7.3", - "pathe": "^1.1.2" + "mlly": "^1.7.4", + "pathe": "^2.0.1" } }, - "node_modules/pkg-types/node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", - "license": "MIT" - }, "node_modules/points-on-curve": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz", @@ -7190,9 +10361,9 @@ } }, "node_modules/possible-typed-array-names": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", - "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", "dev": true, "license": "MIT", "engines": { @@ -7200,9 +10371,9 @@ } }, "node_modules/postcss": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.0.tgz", - "integrity": "sha512-27VKOqrYfPncKA2NrFOVhP5MGAfHKLYn/Q0mz9cNQyRAKYi3VNHwYU2qKKqPCqgBmeeJ0uAFB56NumXZ5ZReXg==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -7220,7 +10391,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -7239,9 +10410,9 @@ } }, "node_modules/prettier": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", - "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz", + "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==", "dev": true, "license": "MIT", "bin": { @@ -7254,6 +10425,15 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/prismjs": { + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.30.0.tgz", + "integrity": "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/prop-types": { "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", @@ -7271,199 +10451,14 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "license": "MIT" }, - "node_modules/prosemirror-changeset": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/prosemirror-changeset/-/prosemirror-changeset-2.2.1.tgz", - "integrity": "sha512-J7msc6wbxB4ekDFj+n9gTW/jav/p53kdlivvuppHsrZXCaQdVgRghoZbSS3kwrRyAstRVQ4/+u5k7YfLgkkQvQ==", - "license": "MIT", - "dependencies": { - "prosemirror-transform": "^1.0.0" - } - }, - "node_modules/prosemirror-collab": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/prosemirror-collab/-/prosemirror-collab-1.3.1.tgz", - "integrity": "sha512-4SnynYR9TTYaQVXd/ieUvsVV4PDMBzrq2xPUWutHivDuOshZXqQ5rGbZM84HEaXKbLdItse7weMGOUdDVcLKEQ==", - "license": "MIT", - "dependencies": { - "prosemirror-state": "^1.0.0" - } - }, - "node_modules/prosemirror-commands": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/prosemirror-commands/-/prosemirror-commands-1.6.2.tgz", - "integrity": "sha512-0nDHH++qcf/BuPLYvmqZTUUsPJUCPBUXt0J1ErTcDIS369CTp773itzLGIgIXG4LJXOlwYCr44+Mh4ii6MP1QA==", - "license": "MIT", - "dependencies": { - "prosemirror-model": "^1.0.0", - "prosemirror-state": "^1.0.0", - "prosemirror-transform": "^1.10.2" - } - }, - "node_modules/prosemirror-dropcursor": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/prosemirror-dropcursor/-/prosemirror-dropcursor-1.8.1.tgz", - "integrity": "sha512-M30WJdJZLyXHi3N8vxN6Zh5O8ZBbQCz0gURTfPmTIBNQ5pxrdU7A58QkNqfa98YEjSAL1HUyyU34f6Pm5xBSGw==", - "license": "MIT", - "dependencies": { - "prosemirror-state": "^1.0.0", - "prosemirror-transform": "^1.1.0", - "prosemirror-view": "^1.1.0" - } - }, - "node_modules/prosemirror-gapcursor": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/prosemirror-gapcursor/-/prosemirror-gapcursor-1.3.2.tgz", - "integrity": "sha512-wtjswVBd2vaQRrnYZaBCbyDqr232Ed4p2QPtRIUK5FuqHYKGWkEwl08oQM4Tw7DOR0FsasARV5uJFvMZWxdNxQ==", - "license": "MIT", - "dependencies": { - "prosemirror-keymap": "^1.0.0", - "prosemirror-model": "^1.0.0", - "prosemirror-state": "^1.0.0", - "prosemirror-view": "^1.0.0" - } - }, - "node_modules/prosemirror-history": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/prosemirror-history/-/prosemirror-history-1.4.1.tgz", - "integrity": "sha512-2JZD8z2JviJrboD9cPuX/Sv/1ChFng+xh2tChQ2X4bB2HeK+rra/bmJ3xGntCcjhOqIzSDG6Id7e8RJ9QPXLEQ==", - "license": "MIT", - "dependencies": { - "prosemirror-state": "^1.2.2", - "prosemirror-transform": "^1.0.0", - "prosemirror-view": "^1.31.0", - "rope-sequence": "^1.3.0" - } - }, - "node_modules/prosemirror-inputrules": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/prosemirror-inputrules/-/prosemirror-inputrules-1.4.0.tgz", - "integrity": "sha512-6ygpPRuTJ2lcOXs9JkefieMst63wVJBgHZGl5QOytN7oSZs3Co/BYbc3Yx9zm9H37Bxw8kVzCnDsihsVsL4yEg==", - "license": "MIT", - "dependencies": { - "prosemirror-state": "^1.0.0", - "prosemirror-transform": "^1.0.0" - } - }, - "node_modules/prosemirror-keymap": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/prosemirror-keymap/-/prosemirror-keymap-1.2.2.tgz", - "integrity": "sha512-EAlXoksqC6Vbocqc0GtzCruZEzYgrn+iiGnNjsJsH4mrnIGex4qbLdWWNza3AW5W36ZRrlBID0eM6bdKH4OStQ==", - "license": "MIT", - "dependencies": { - "prosemirror-state": "^1.0.0", - "w3c-keyname": "^2.2.0" - } - }, - "node_modules/prosemirror-markdown": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/prosemirror-markdown/-/prosemirror-markdown-1.13.1.tgz", - "integrity": "sha512-Sl+oMfMtAjWtlcZoj/5L/Q39MpEnVZ840Xo330WJWUvgyhNmLBLN7MsHn07s53nG/KImevWHSE6fEj4q/GihHw==", - "license": "MIT", - "dependencies": { - "@types/markdown-it": "^14.0.0", - "markdown-it": "^14.0.0", - "prosemirror-model": "^1.20.0" - } - }, - "node_modules/prosemirror-menu": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/prosemirror-menu/-/prosemirror-menu-1.2.4.tgz", - "integrity": "sha512-S/bXlc0ODQup6aiBbWVsX/eM+xJgCTAfMq/nLqaO5ID/am4wS0tTCIkzwytmao7ypEtjj39i7YbJjAgO20mIqA==", - "license": "MIT", - "dependencies": { - "crelt": "^1.0.0", - "prosemirror-commands": "^1.0.0", - "prosemirror-history": "^1.0.0", - "prosemirror-state": "^1.0.0" - } - }, - "node_modules/prosemirror-model": { - "version": "1.24.1", - "resolved": "https://registry.npmjs.org/prosemirror-model/-/prosemirror-model-1.24.1.tgz", - "integrity": "sha512-YM053N+vTThzlWJ/AtPtF1j0ebO36nvbmDy4U7qA2XQB8JVaQp1FmB9Jhrps8s+z+uxhhVTny4m20ptUvhk0Mg==", - "license": "MIT", - "dependencies": { - "orderedmap": "^2.0.0" - } - }, - "node_modules/prosemirror-schema-basic": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/prosemirror-schema-basic/-/prosemirror-schema-basic-1.2.3.tgz", - "integrity": "sha512-h+H0OQwZVqMon1PNn0AG9cTfx513zgIG2DY00eJ00Yvgb3UD+GQ/VlWW5rcaxacpCGT1Yx8nuhwXk4+QbXUfJA==", - "license": "MIT", - "dependencies": { - "prosemirror-model": "^1.19.0" - } - }, - "node_modules/prosemirror-schema-list": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/prosemirror-schema-list/-/prosemirror-schema-list-1.5.0.tgz", - "integrity": "sha512-gg1tAfH1sqpECdhIHOA/aLg2VH3ROKBWQ4m8Qp9mBKrOxQRW61zc+gMCI8nh22gnBzd1t2u1/NPLmO3nAa3ssg==", - "license": "MIT", - "dependencies": { - "prosemirror-model": "^1.0.0", - "prosemirror-state": "^1.0.0", - "prosemirror-transform": "^1.7.3" - } - }, - "node_modules/prosemirror-state": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/prosemirror-state/-/prosemirror-state-1.4.3.tgz", - "integrity": "sha512-goFKORVbvPuAQaXhpbemJFRKJ2aixr+AZMGiquiqKxaucC6hlpHNZHWgz5R7dS4roHiwq9vDctE//CZ++o0W1Q==", - "license": "MIT", - "dependencies": { - "prosemirror-model": "^1.0.0", - "prosemirror-transform": "^1.0.0", - "prosemirror-view": "^1.27.0" - } - }, - "node_modules/prosemirror-tables": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/prosemirror-tables/-/prosemirror-tables-1.6.2.tgz", - "integrity": "sha512-97dKocVLrEVTQjZ4GBLdrrMw7Gv3no8H8yMwf5IRM9OoHrzbWpcH5jJxYgNQIRCtdIqwDctT1HdMHrGTiwp1dQ==", - "license": "MIT", - "dependencies": { - "prosemirror-keymap": "^1.2.2", - "prosemirror-model": "^1.24.1", - "prosemirror-state": "^1.4.3", - "prosemirror-transform": "^1.10.2", - "prosemirror-view": "^1.37.1" - } - }, - "node_modules/prosemirror-trailing-node": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/prosemirror-trailing-node/-/prosemirror-trailing-node-3.0.0.tgz", - "integrity": "sha512-xiun5/3q0w5eRnGYfNlW1uU9W6x5MoFKWwq/0TIRgt09lv7Hcser2QYV8t4muXbEr+Fwo0geYn79Xs4GKywrRQ==", - "license": "MIT", - "dependencies": { - "@remirror/core-constants": "3.0.0", - "escape-string-regexp": "^4.0.0" - }, - "peerDependencies": { - "prosemirror-model": "^1.22.1", - "prosemirror-state": "^1.4.2", - "prosemirror-view": "^1.33.8" - } - }, - "node_modules/prosemirror-transform": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/prosemirror-transform/-/prosemirror-transform-1.10.2.tgz", - "integrity": "sha512-2iUq0wv2iRoJO/zj5mv8uDUriOHWzXRnOTVgCzSXnktS/2iQRa3UUQwVlkBlYZFtygw6Nh1+X4mGqoYBINn5KQ==", - "license": "MIT", - "dependencies": { - "prosemirror-model": "^1.21.0" - } - }, - "node_modules/prosemirror-view": { - "version": "1.37.1", - "resolved": "https://registry.npmjs.org/prosemirror-view/-/prosemirror-view-1.37.1.tgz", - "integrity": "sha512-MEAnjOdXU1InxEmhjgmEzQAikaS6lF3hD64MveTPpjOGNTl87iRLA1HupC/DEV6YuK7m4Q9DHFNTjwIVtqz5NA==", + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", "license": "MIT", - "dependencies": { - "prosemirror-model": "^1.20.0", - "prosemirror-state": "^1.0.0", - "prosemirror-transform": "^1.1.0" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, "node_modules/proxy-from-env": { @@ -7482,19 +10477,10 @@ "node": ">=6" } }, - "node_modules/punycode.js": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", - "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/query-string": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-9.1.1.tgz", - "integrity": "sha512-MWkCOVIcJP9QSKU52Ngow6bsAWAPlPK2MludXvcrS2bGZSl+T1qX9MZvRIkqUIkGLJquMJHWfsT6eRqUpp4aWg==", + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-9.3.1.tgz", + "integrity": "sha512-5fBfMOcDi5SA9qj5jZhWAcTtDfKF5WFdd2uD9nVNlbxVv1baq65aALy6qofpNEGELHvisjjasxQp7BlM9gvMzw==", "license": "MIT", "dependencies": { "decode-uri-component": "^0.4.1", @@ -7530,28 +10516,27 @@ "license": "MIT" }, "node_modules/ra-core": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/ra-core/-/ra-core-5.4.4.tgz", - "integrity": "sha512-hP1pxNtuVWbsV4e0OWfGUrtbf0H9RpO2K1K1ffmbxqBaX3ZYlEP8sz6KFXyAEcMjnY9wfcGt/bab/P5sX0gD2A==", + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/ra-core/-/ra-core-5.14.2.tgz", + "integrity": "sha512-Bri5hM+EVKeom3iRYeBwayp7AnBANhfL6eheiARhdFaz63DK+GjSjLwV9IWqJPkl6fP+5xk7hiPspg8wArHq0A==", "license": "MIT", "dependencies": { - "@tanstack/react-query": "^5.21.7", - "clsx": "^2.1.1", "date-fns": "^3.6.0", "eventemitter3": "^5.0.1", "inflection": "^3.0.0", "jsonexport": "^3.2.0", - "lodash": "~4.17.5", + "lodash": "^4.17.21", "query-string": "^7.1.3", "react-error-boundary": "^4.0.13", - "react-is": "^18.2.0" + "react-is": "^18.2.0 || ^19.0.0" }, "peerDependencies": { + "@tanstack/react-query": "^5.83.0", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0", - "react-hook-form": "^7.53.0", - "react-router": "^6.22.0", - "react-router-dom": "^6.22.0" + "react-hook-form": "^7.65.0", + "react-router": "^6.28.1 || ^7.1.1", + "react-router-dom": "^6.28.1 || ^7.1.1" } }, "node_modules/ra-core/node_modules/decode-uri-component": { @@ -7590,6 +10575,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/ra-core/node_modules/react-error-boundary": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-4.1.2.tgz", + "integrity": "sha512-GQDxZ5Jd+Aq/qUxbCm1UtzmL/s++V7zKgE8yMktJiCQXCCFZnMZh9ng+6/Ne6PjNSXH0L9CjeOEREfRnq6Duag==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "peerDependencies": { + "react": ">=16.13.1" + } + }, "node_modules/ra-core/node_modules/split-on-first": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", @@ -7600,83 +10597,58 @@ } }, "node_modules/ra-i18n-polyglot": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/ra-i18n-polyglot/-/ra-i18n-polyglot-5.4.4.tgz", - "integrity": "sha512-C7ZNhw+65ZOofSYViHkITBYKHTPGYiasaMqBppbOlfDyqpwIqYC2PIG/Mtbmgxgtb9AJhOFwdUskd3ZrrxC+ag==", + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/ra-i18n-polyglot/-/ra-i18n-polyglot-5.14.2.tgz", + "integrity": "sha512-iV9muILwbG8y4azS7mNrACPssPa+vFoN3ftEXAEPKmcSiZDhLqcil4d+Igsc/CoVMOS5DELwsGko/+frxCoANg==", "license": "MIT", "dependencies": { "node-polyglot": "^2.2.2", - "ra-core": "^5.4.4" - } - }, - "node_modules/ra-input-rich-text": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/ra-input-rich-text/-/ra-input-rich-text-5.4.4.tgz", - "integrity": "sha512-i2shpSTt0Ho2rZrdOVQfFfI/ep4VwcuKGUct9rOd17HJM4djl/00jY7qEccyH4mDuARRQ4Byp002nLr4zKg1eg==", - "license": "MIT", - "dependencies": { - "@tiptap/core": "^2.0.3", - "@tiptap/extension-color": "^2.0.3", - "@tiptap/extension-highlight": "^2.0.3", - "@tiptap/extension-image": "^2.0.3", - "@tiptap/extension-link": "^2.0.3", - "@tiptap/extension-placeholder": "^2.0.3", - "@tiptap/extension-text-align": "^2.0.3", - "@tiptap/extension-text-style": "^2.0.3", - "@tiptap/extension-underline": "^2.0.3", - "@tiptap/pm": "^2.0.3", - "@tiptap/react": "^2.0.3", - "@tiptap/starter-kit": "^2.0.3", - "clsx": "^2.1.1" - }, - "peerDependencies": { - "@mui/icons-material": "^5.15.20", - "@mui/material": "^5.15.20", - "ra-core": "^5.0.0", - "ra-ui-materialui": "^5.0.0", - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" + "ra-core": "^5.14.2" } }, "node_modules/ra-language-english": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/ra-language-english/-/ra-language-english-5.4.4.tgz", - "integrity": "sha512-OhcLrvax/dD//XOIMNRy0UhejUa3YG793tpd8JOaPiW7WJTINdINkTiUToB+s0X/WH4sdM6jrwKTJPLxeGWUSQ==", + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/ra-language-english/-/ra-language-english-5.14.2.tgz", + "integrity": "sha512-eclr/jC9+ad36/YDsGu1xCq4ioyrklfvq/6pJlusS6/HOni8OqLo1LeFIY2FUfAID6Cg/M+iJ1l91i9EqODsvw==", "license": "MIT", "dependencies": { - "ra-core": "^5.4.4" + "ra-core": "^5.14.2" } }, "node_modules/ra-ui-materialui": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/ra-ui-materialui/-/ra-ui-materialui-5.4.4.tgz", - "integrity": "sha512-cKMVxYKjnDZ2I1hW0zZOkvF+lUg6h8fHyQx/9kFnKwuwPmARFgM8hoNlJ3z6FZvTju6gWAX8n44wQkmHPurXSQ==", + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/ra-ui-materialui/-/ra-ui-materialui-5.14.2.tgz", + "integrity": "sha512-zNFIUXr6/DldHAzqZ8spfjlzi4Wi53481ZNrWfjfMCZH14U8ln4Zu9gR6AMTQ/F/1oLjadxiXuPPFyOm+2g+hQ==", "license": "MIT", "dependencies": { - "@tanstack/react-query": "^5.21.7", "autosuggest-highlight": "^3.1.1", "clsx": "^2.1.1", "css-mediaquery": "^0.1.2", - "dompurify": "^2.4.3", + "diacritic": "^0.0.2", + "dompurify": "^3.2.4", "inflection": "^3.0.0", "jsonexport": "^3.2.0", "lodash": "~4.17.5", "query-string": "^7.1.3", "react-dropzone": "^14.2.3", "react-error-boundary": "^4.0.13", + "react-hotkeys-hook": "^5.1.0", "react-transition-group": "^4.4.5" }, "peerDependencies": { - "@mui/icons-material": "^5.15.20", - "@mui/material": "^5.15.20", - "@mui/utils": "^5.15.20", + "@mui/icons-material": "^5.16.12 || ^6.0.0 || ^7.0.0", + "@mui/material": "^5.16.12 || ^6.0.0 || ^7.0.0", + "@mui/system": "^5.15.20 || ^6.0.0 || ^7.0.0", + "@mui/utils": "^5.15.20 || ^6.0.0 || ^7.0.0", + "@tanstack/react-query": "^5.83.0", + "csstype": "^3.1.3", "ra-core": "^5.0.0", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0", "react-hook-form": "*", - "react-is": "^18.0.0", - "react-router": "^6.22.0", - "react-router-dom": "^6.22.0" + "react-is": "^18.0.0 || ^19.0.0", + "react-router": "^6.28.1 || ^7.1.1", + "react-router-dom": "^6.28.1 || ^7.1.1" } }, "node_modules/ra-ui-materialui/node_modules/decode-uri-component": { @@ -7688,12 +10660,6 @@ "node": ">=0.10" } }, - "node_modules/ra-ui-materialui/node_modules/dompurify": { - "version": "2.5.8", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.5.8.tgz", - "integrity": "sha512-o1vSNgrmYMQObbSSvF/1brBYEQPHhV1+gsmrusO7/GXtp1T9rCS8cXFqVxK/9crT1jA6Ccv+5MTSjBNqr7Sovw==", - "license": "(MPL-2.0 OR Apache-2.0)" - }, "node_modules/ra-ui-materialui/node_modules/filter-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", @@ -7721,6 +10687,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/ra-ui-materialui/node_modules/react-error-boundary": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-4.1.2.tgz", + "integrity": "sha512-GQDxZ5Jd+Aq/qUxbCm1UtzmL/s++V7zKgE8yMktJiCQXCCFZnMZh9ng+6/Ne6PjNSXH0L9CjeOEREfRnq6Duag==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "peerDependencies": { + "react": ">=16.13.1" + } + }, "node_modules/ra-ui-materialui/node_modules/split-on-first": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", @@ -7731,34 +10709,32 @@ } }, "node_modules/react": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", - "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/react/-/react-19.0.0.tgz", + "integrity": "sha512-V8AVnmPIICiWpGfm6GLzCR/W5FXLchHop40W4nXBmdlEceh16rCN8O8LNWm5bh5XUX91fh7KpA+W0TgMKmgTpQ==", "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0" - }, "engines": { "node": ">=0.10.0" - } - }, - "node_modules/react-admin": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/react-admin/-/react-admin-5.4.4.tgz", - "integrity": "sha512-hPRARv8Pza+6p+YGwXpyL5rx95n3oLUzqa8XiXXg49Ea3Lww/OCUKGsL7W9ES2ATx0VAwUy9PWg55xOGJh5Esw==", - "license": "MIT", - "dependencies": { - "@emotion/react": "^11.4.1", - "@emotion/styled": "^11.3.0", - "@mui/icons-material": "^5.15.20", - "@mui/material": "^5.15.20", - "ra-core": "^5.4.4", - "ra-i18n-polyglot": "^5.4.4", - "ra-language-english": "^5.4.4", - "ra-ui-materialui": "^5.4.4", - "react-hook-form": "^7.53.0", - "react-router": "^6.22.0", - "react-router-dom": "^6.22.0" + } + }, + "node_modules/react-admin": { + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/react-admin/-/react-admin-5.14.2.tgz", + "integrity": "sha512-tEr/QvMVTOfeb/is5x4WZNV98vRXhufgT/bNexgree5aa6JimVTi4Z4kumYM3hQ15+BxhqaOaUQqxkZC7oy5qw==", + "license": "MIT", + "dependencies": { + "@emotion/react": "^11.14.0", + "@emotion/styled": "^11.14.0", + "@mui/icons-material": "^5.16.12 || ^6.0.0 || ^7.0.0", + "@mui/material": "^5.16.12 || ^6.0.0 || ^7.0.0", + "@tanstack/react-query": "^5.83.0", + "ra-core": "^5.14.2", + "ra-i18n-polyglot": "^5.14.2", + "ra-language-english": "^5.14.2", + "ra-ui-materialui": "^5.14.2", + "react-hook-form": "^7.65.0", + "react-router": "^6.28.1 || ^7.1.1", + "react-router-dom": "^6.28.1 || ^7.1.1" }, "peerDependencies": { "react": "^18.0.0 || ^19.0.0", @@ -7766,32 +10742,40 @@ } }, "node_modules/react-chartjs-2": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.0.tgz", - "integrity": "sha512-UfZZFnDsERI3c3CZGxzvNJd02SHjaSJ8kgW1djn65H1KK8rehwTjyrRKOG3VTMG8wtHZ5rgAO5oTHtHi9GCCmw==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.1.tgz", + "integrity": "sha512-h5IPXKg9EXpjoBzUfyWJvllMjG2mQ4EiuHQFhms/AjUm0XSZHhyRy2xVmLXHKrtcdrPO4mnGqRtYoD0vp95A0A==", "license": "MIT", "peerDependencies": { "chart.js": "^4.1.1", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, + "node_modules/react-devtools-inline": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/react-devtools-inline/-/react-devtools-inline-4.4.0.tgz", + "integrity": "sha512-ES0GolSrKO8wsKbsEkVeiR/ZAaHQTY4zDh1UW8DImVmm8oaGLl3ijJDvSGe+qDRKPZdPRnDtWWnSvvrgxXdThQ==", + "license": "MIT", + "dependencies": { + "es6-symbol": "^3" + } + }, "node_modules/react-dom": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", - "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.0.0.tgz", + "integrity": "sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==", "license": "MIT", "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.2" + "scheduler": "^0.25.0" }, "peerDependencies": { - "react": "^18.3.1" + "react": "^19.0.0" } }, "node_modules/react-dropzone": { - "version": "14.3.5", - "resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.3.5.tgz", - "integrity": "sha512-9nDUaEEpqZLOz5v5SUcFA0CjM4vq8YbqO0WRls+EYT7+DvxUdzDPKNCPLqGfj3YL9MsniCLCD4RFA6M95V6KMQ==", + "version": "14.4.0", + "resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.4.0.tgz", + "integrity": "sha512-8VvsHqg9WGAr+wAnP0oVErK5HOwAoTOzRsxLPzbBXrtXtFfukkxMyuvdI/lJ+5OxtsrzmvWE5Eoo3Y4hMsaxpA==", "license": "MIT", "dependencies": { "attr-accept": "^2.2.4", @@ -7806,21 +10790,25 @@ } }, "node_modules/react-error-boundary": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-4.1.2.tgz", - "integrity": "sha512-GQDxZ5Jd+Aq/qUxbCm1UtzmL/s++V7zKgE8yMktJiCQXCCFZnMZh9ng+6/Ne6PjNSXH0L9CjeOEREfRnq6Duag==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz", + "integrity": "sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.12.5" }, + "engines": { + "node": ">=10", + "npm": ">=6" + }, "peerDependencies": { "react": ">=16.13.1" } }, "node_modules/react-hook-form": { - "version": "7.54.2", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.54.2.tgz", - "integrity": "sha512-eHpAUgUjWbZocoQYUHposymRb4ZP6d0uwUnooL2uOybA9/3tPUvoAKqEWK1WaSiTxxOfTpffNZP7QwlnM3/gEg==", + "version": "7.71.1", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.71.1.tgz", + "integrity": "sha512-9SUJKCGKo8HUSsCO+y0CtqkqI5nNuaDqTxyqPsZPqIwudpj4rCrAz/jZV+jn57bx5gtZKOh3neQu94DXMc+w5w==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -7833,65 +10821,170 @@ "react": "^16.8.0 || ^17 || ^18 || ^19" } }, + "node_modules/react-hotkeys-hook": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/react-hotkeys-hook/-/react-hotkeys-hook-5.2.4.tgz", + "integrity": "sha512-BgKg+A1+TawkYluh5Bo4cTmcgMN5L29uhJbDUQdHwPX+qgXRjIPYU5kIDHyxnAwCkCBiu9V5OpB2mpyeluVF2A==", + "license": "MIT", + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, "node_modules/react-is": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", - "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.0.0.tgz", + "integrity": "sha512-H91OHcwjZsbq3ClIDHMzBShc1rotbfACdWENsmEf0IFvZ3FgGPtdHMcsv45bQ1hAbgdfiA8SnxTKfDS+x/8m2g==", "license": "MIT" }, "node_modules/react-oidc-context": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/react-oidc-context/-/react-oidc-context-3.2.0.tgz", - "integrity": "sha512-ZLaCRLWV84Cn9pFdsatmblqxLMv0np69GWVXq9RWGqAjppdOGXNIbIxWMByIio0oSCVUwdeqwYRnJme0tjqd8A==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/react-oidc-context/-/react-oidc-context-3.3.0.tgz", + "integrity": "sha512-302T/ma4AOVAxrHdYctDSKXjCq9KNHT564XEO2yOPxRfxEP58xa4nz+GQinNl8x7CnEXECSM5JEjQJk3Cr5BvA==", "license": "MIT", "engines": { "node": ">=18" }, "peerDependencies": { "oidc-client-ts": "^3.1.0", - "react": ">=16.8.0" + "react": ">=16.14.0" } }, "node_modules/react-refresh": { - "version": "0.14.2", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.2.tgz", - "integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==", + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", + "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" } }, + "node_modules/react-remove-scroll": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", + "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", + "license": "MIT", + "dependencies": { + "react-remove-scroll-bar": "^2.3.7", + "react-style-singleton": "^2.2.3", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.3", + "use-sidecar": "^1.1.3" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", + "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", + "license": "MIT", + "dependencies": { + "react-style-singleton": "^2.2.2", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/react-router": { - "version": "6.28.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.28.1.tgz", - "integrity": "sha512-2omQTA3rkMljmrvvo6WtewGdVh45SpL9hGiCI9uUrwGGfNFDIvGK4gYJsKlJoNVi6AQZcopSCballL+QGOm7fA==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.0.tgz", + "integrity": "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw==", "license": "MIT", "dependencies": { - "@remix-run/router": "1.21.0" + "cookie": "^1.0.1", + "set-cookie-parser": "^2.6.0" }, "engines": { - "node": ">=14.0.0" + "node": ">=20.0.0" }, "peerDependencies": { - "react": ">=16.8" + "react": ">=18", + "react-dom": ">=18" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } } }, "node_modules/react-router-dom": { - "version": "6.28.1", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.28.1.tgz", - "integrity": "sha512-YraE27C/RdjcZwl5UCqF/ffXnZDxpJdk9Q6jw38SZHjXs7NNdpViq2l2c7fO7+4uWaEfcwfGCv3RSg4e1By/fQ==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.0.tgz", + "integrity": "sha512-5CO/l5Yahi2SKC6rGZ+HDEjpjkGaG/ncEP7eWFTvFxbHP8yeeI0PxTDjimtpXYlR3b3i9/WIL4VJttPrESIf2g==", "license": "MIT", "dependencies": { - "@remix-run/router": "1.21.0", - "react-router": "6.28.1" + "react-router": "7.13.0" }, "engines": { - "node": ">=14.0.0" + "node": ">=20.0.0" }, "peerDependencies": { - "react": ">=16.8", - "react-dom": ">=16.8" + "react": ">=18", + "react-dom": ">=18" + } + }, + "node_modules/react-style-singleton": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", + "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", + "license": "MIT", + "dependencies": { + "get-nonce": "^1.0.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-syntax-highlighter": { + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-16.1.0.tgz", + "integrity": "sha512-E40/hBiP5rCNwkeBN1vRP+xow1X0pndinO+z3h7HLsHyjztbyjfzNWNKuAsJj+7DLam9iT4AaaOZnueCU+Nplg==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.28.4", + "highlight.js": "^10.4.1", + "highlightjs-vue": "^1.0.0", + "lowlight": "^1.17.0", + "prismjs": "^1.30.0", + "refractor": "^5.0.0" + }, + "engines": { + "node": ">= 16.20.2" + }, + "peerDependencies": { + "react": ">= 0.14.0" } }, "node_modules/react-transition-group": { @@ -7933,11 +11026,21 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "license": "MIT" + "node_modules/refractor": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/refractor/-/refractor-5.0.0.tgz", + "integrity": "sha512-QXOrHQF5jOpjjLfiNk5GFnWhRXvxjUVnlFxkeDmewR5sXkr3iM46Zo+CnRR8B+MDVqkULW4EcLVcRBNOPXHosw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/prismjs": "^1.0.0", + "hastscript": "^9.0.0", + "parse-entities": "^4.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, "node_modules/regexp-tree": { "version": "0.1.27", @@ -7980,242 +11083,60 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rewire": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/rewire/-/rewire-7.0.0.tgz", - "integrity": "sha512-DyyNyzwMtGYgu0Zl/ya0PR/oaunM+VuCuBxCuhYJHHaV0V+YvYa3bBGxb5OZ71vndgmp1pYY8F4YOwQo1siRGw==", - "dev": true, - "license": "MIT", - "dependencies": { - "eslint": "^8.47.0" - } - }, - "node_modules/rewire/node_modules/@eslint/eslintrc": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", - "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.6.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/rewire/node_modules/@eslint/js": { - "version": "8.57.1", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", - "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/rewire/node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/rewire/node_modules/eslint": { - "version": "8.57.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", - "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", - "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.57.1", - "@humanwhocodes/config-array": "^0.13.0", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "@ungap/structured-clone": "^1.2.0", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.2", - "eslint-visitor-keys": "^3.4.3", - "espree": "^9.6.1", - "esquery": "^1.4.2", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3", - "strip-ansi": "^6.0.1", - "text-table": "^0.2.0" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/rewire/node_modules/eslint-scope": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", - "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, - "node_modules/rewire/node_modules/espree": { - "version": "9.6.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", - "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", - "dev": true, - "license": "BSD-2-Clause", + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "license": "MIT", "dependencies": { - "acorn": "^8.9.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.1" + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": ">= 0.4" }, "funding": { - "url": "https://opencollective.com/eslint" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/rewire/node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "license": "MIT", - "dependencies": { - "flat-cache": "^3.0.4" - }, "engines": { - "node": "^10.12.0 || >=12.0.0" + "node": ">=4" } }, - "node_modules/rewire/node_modules/flat-cache": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", - "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", "dev": true, "license": "MIT", - "dependencies": { - "flatted": "^3.2.9", - "keyv": "^4.5.3", - "rimraf": "^3.0.2" - }, "engines": { - "node": "^10.12.0 || >=12.0.0" + "iojs": ">=1.0.0", + "node": ">=0.10.0" } }, - "node_modules/rewire/node_modules/globals": { - "version": "13.24.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", - "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "node_modules/rewire": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/rewire/-/rewire-9.0.1.tgz", + "integrity": "sha512-dnbLeTwHpXvWJjswC6CshXUUnnpE5AVhlayVRvDJhJx5ejbO4nbj1IXqN2urErgB7TpHUAMpf6iPDhQIxeSQOQ==", "dev": true, "license": "MIT", "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "eslint": "^9.30", + "pirates": "^4.0.7" } }, "node_modules/rimraf": { @@ -8242,13 +11163,13 @@ "license": "Unlicense" }, "node_modules/rollup": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.30.1.tgz", - "integrity": "sha512-mlJ4glW020fPuLi7DkM/lN97mYEZGWeqBnrljzN0gs7GLctqX3lNWxKQ7Gl712UAX+6fog/L3jh4gb7R6aVi3w==", + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", + "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==", "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -8258,34 +11179,34 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.30.1", - "@rollup/rollup-android-arm64": "4.30.1", - "@rollup/rollup-darwin-arm64": "4.30.1", - "@rollup/rollup-darwin-x64": "4.30.1", - "@rollup/rollup-freebsd-arm64": "4.30.1", - "@rollup/rollup-freebsd-x64": "4.30.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.30.1", - "@rollup/rollup-linux-arm-musleabihf": "4.30.1", - "@rollup/rollup-linux-arm64-gnu": "4.30.1", - "@rollup/rollup-linux-arm64-musl": "4.30.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.30.1", - "@rollup/rollup-linux-powerpc64le-gnu": "4.30.1", - "@rollup/rollup-linux-riscv64-gnu": "4.30.1", - "@rollup/rollup-linux-s390x-gnu": "4.30.1", - "@rollup/rollup-linux-x64-gnu": "4.30.1", - "@rollup/rollup-linux-x64-musl": "4.30.1", - "@rollup/rollup-win32-arm64-msvc": "4.30.1", - "@rollup/rollup-win32-ia32-msvc": "4.30.1", - "@rollup/rollup-win32-x64-msvc": "4.30.1", + "@rollup/rollup-android-arm-eabi": "4.57.1", + "@rollup/rollup-android-arm64": "4.57.1", + "@rollup/rollup-darwin-arm64": "4.57.1", + "@rollup/rollup-darwin-x64": "4.57.1", + "@rollup/rollup-freebsd-arm64": "4.57.1", + "@rollup/rollup-freebsd-x64": "4.57.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", + "@rollup/rollup-linux-arm-musleabihf": "4.57.1", + "@rollup/rollup-linux-arm64-gnu": "4.57.1", + "@rollup/rollup-linux-arm64-musl": "4.57.1", + "@rollup/rollup-linux-loong64-gnu": "4.57.1", + "@rollup/rollup-linux-loong64-musl": "4.57.1", + "@rollup/rollup-linux-ppc64-gnu": "4.57.1", + "@rollup/rollup-linux-ppc64-musl": "4.57.1", + "@rollup/rollup-linux-riscv64-gnu": "4.57.1", + "@rollup/rollup-linux-riscv64-musl": "4.57.1", + "@rollup/rollup-linux-s390x-gnu": "4.57.1", + "@rollup/rollup-linux-x64-gnu": "4.57.1", + "@rollup/rollup-linux-x64-musl": "4.57.1", + "@rollup/rollup-openbsd-x64": "4.57.1", + "@rollup/rollup-openharmony-arm64": "4.57.1", + "@rollup/rollup-win32-arm64-msvc": "4.57.1", + "@rollup/rollup-win32-ia32-msvc": "4.57.1", + "@rollup/rollup-win32-x64-gnu": "4.57.1", + "@rollup/rollup-win32-x64-msvc": "4.57.1", "fsevents": "~2.3.2" } }, - "node_modules/rope-sequence": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/rope-sequence/-/rope-sequence-1.3.4.tgz", - "integrity": "sha512-UT5EDe2cu2E/6O4igUr5PSFs23nvvukicWHx6GnOPlHAiiYbzNuCRQCuiUdHJQcqKalLKlrYJnjY0ySGsXNQXQ==", - "license": "MIT" - }, "node_modules/roughjs": { "version": "4.6.6", "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz", @@ -8332,6 +11253,18 @@ "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", "license": "BSD-3-Clause" }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/safe-array-concat": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", @@ -8404,18 +11337,15 @@ "license": "MIT" }, "node_modules/scheduler": { - "version": "0.23.2", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", - "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0" - } + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.25.0.tgz", + "integrity": "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==", + "license": "MIT" }, "node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -8425,6 +11355,12 @@ "node": ">=10" } }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "license": "MIT" + }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", @@ -8591,6 +11527,16 @@ "node": ">=0.10.0" } }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/split-on-first": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-3.0.0.tgz", @@ -8603,6 +11549,38 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/static-browser-server": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/static-browser-server/-/static-browser-server-1.0.3.tgz", + "integrity": "sha512-ZUyfgGDdFRbZGGJQ1YhiM930Yczz5VlbJObrQLlk24+qNHVQx4OlLcYswEUo3bIyNAbQUIUR9Yr5/Hqjzqb4zA==", + "license": "Apache-2.0", + "dependencies": { + "@open-draft/deferred-promise": "^2.1.0", + "dotenv": "^16.0.3", + "mime-db": "^1.52.0", + "outvariant": "^1.3.0" + } + }, + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/strict-event-emitter": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.4.6.tgz", + "integrity": "sha512-12KWeb+wixJohmnwNFerbyiBrAlq5qJLwIt38etRtKtmmHyDSoGlIqFE9wx+4IwG0aDjI7GV8tc8ZccjWZZtTg==", + "license": "MIT" + }, "node_modules/strict-uri-encode": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", @@ -8724,6 +11702,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -8749,6 +11741,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-mod": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.3.tgz", + "integrity": "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ==", + "license": "MIT" + }, "node_modules/stylis": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz", @@ -8780,6 +11778,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/tabbable": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.4.0.tgz", + "integrity": "sha512-05PUHKSNE8ou2dwIxTngl4EzcnsCDZGJ/iCLtDflR/SHB/ny14rXc+qU5P4mG9JkusiV7EivzY9Mhm55AzAvCg==", + "license": "MIT" + }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -8788,31 +11792,29 @@ "license": "MIT" }, "node_modules/tinyexec": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", - "license": "MIT" - }, - "node_modules/tippy.js": { - "version": "6.3.7", - "resolved": "https://registry.npmjs.org/tippy.js/-/tippy.js-6.3.7.tgz", - "integrity": "sha512-E1d3oP2emgJ9dRQZdf3Kkn0qJgI6ZLpyS5z6ZkY1DF3kaQaBsGZsndEpHwx+eC+tYM41HaSNvNtLx8tU57FzTQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", "license": "MIT", - "dependencies": { - "@popperjs/core": "^2.9.0" + "engines": { + "node": ">=18" } }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "dev": true, "license": "MIT", "dependencies": { - "is-number": "^7.0.0" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { - "node": ">=8.0" + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" } }, "node_modules/toggle-selection": { @@ -8822,9 +11824,9 @@ "license": "MIT" }, "node_modules/ts-api-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.0.0.tgz", - "integrity": "sha512-xCt/TOAc+EOHS1XPnijD3/yzpH6qg2xppZO1YDqGoVsNXfQfzHpOdNuXwrwOU8u4ITXJyDCTyt8w5g1sZv9ynQ==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz", + "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", "dev": true, "license": "MIT", "engines": { @@ -8850,9 +11852,9 @@ "license": "0BSD" }, "node_modules/tss-react": { - "version": "4.9.14", - "resolved": "https://registry.npmjs.org/tss-react/-/tss-react-4.9.14.tgz", - "integrity": "sha512-nAj4RCQk3ADzrmtxmTcmN1B9EKxPMIxuCfJ3ll964CksndJ2/ZImF6rAMo2Kud5yE3ENXHpPIBHCyuMtgptMvw==", + "version": "4.9.20", + "resolved": "https://registry.npmjs.org/tss-react/-/tss-react-4.9.20.tgz", + "integrity": "sha512-+tecs5hEKZmPqNDtiq5Gx2GxjrQXbV5JuOeWkV+eOf99qiIUkE3Vcn07zNLHws06iPfH2H4t5VqoVjIdCMS7hw==", "license": "MIT", "dependencies": { "@emotion/cache": "*", @@ -8862,9 +11864,9 @@ "peerDependencies": { "@emotion/react": "^11.4.1", "@emotion/server": "^11.4.0", - "@mui/material": "^5.0.0 || ^6.0.0", + "@mui/material": "^5.0.0 || ^6.0.0 || ^7.0.0", "@types/react": "^16.8.0 || ^17.0.2 || ^18.0.0 || ^19.0.0", - "react": "^16.8.0 || ^17.0.2 || ^18.0.0 || ^19.0.0" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" }, "peerDependenciesMeta": { "@emotion/server": { @@ -8875,6 +11877,12 @@ } } }, + "node_modules/type": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/type/-/type-2.7.3.tgz", + "integrity": "sha512-8j+1QmAbPvLZow5Qpi6NCaN8FB60p/6x8/vfNqOk/hC+HuvFZhL4+WfekuhQLiqFZXOgQdrs3B+XxEmCc6b3FQ==", + "license": "ISC" + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -8980,9 +11988,9 @@ } }, "node_modules/typescript": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", - "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", "bin": { @@ -8993,16 +12001,10 @@ "node": ">=14.17" } }, - "node_modules/uc.micro": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", - "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", - "license": "MIT" - }, "node_modules/ufo": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz", - "integrity": "sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==", + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", "license": "MIT" }, "node_modules/unbox-primitive": { @@ -9025,16 +12027,93 @@ } }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", "dev": true, "license": "MIT" }, + "node_modules/unidiff": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/unidiff/-/unidiff-1.0.4.tgz", + "integrity": "sha512-ynU0vsAXw0ir8roa+xPCUHmnJ5goc5BTM2Kuc3IJd8UwgaeRs7VSD5+eeaQL+xp1JtB92hu/Zy/Lgy7RZcr1pQ==", + "license": "MIT", + "dependencies": { + "diff": "^5.1.0" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position-from-estree": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz", + "integrity": "sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz", + "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/update-browserslist-db": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz", - "integrity": "sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", "dev": true, "funding": [ { @@ -9072,10 +12151,53 @@ "punycode": "^2.1.0" } }, + "node_modules/use-callback-ref": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", + "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sidecar": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", + "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", + "license": "MIT", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/use-sync-external-store": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.4.0.tgz", - "integrity": "sha512-9WXSPC5fMv61vaupRkCKCxsPxBocVnwakBEkMIHHpkTTg6icbJtg6jzgtLDm4bl3cSHAca52rYWih0k4K3PfHw==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", + "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", "license": "MIT", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -9089,34 +12211,69 @@ "license": "MIT" }, "node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", "funding": [ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" ], "license": "MIT", "bin": { - "uuid": "dist/bin/uuid" + "uuid": "dist/esm/bin/uuid" + } + }, + "node_modules/uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/vfile-message": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, "node_modules/vite": { - "version": "6.0.7", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.7.tgz", - "integrity": "sha512-RDt8r/7qx9940f8FcOIAH9PTViRrghKaK2K1jY3RaAURrEUbm9Du1mJ72G+jlhtG3WwodnfzY8ORQZbBavZEAQ==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.24.2", - "postcss": "^8.4.49", - "rollup": "^4.23.0" + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -9125,14 +12282,14 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", - "less": "*", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" @@ -9217,9 +12374,9 @@ "license": "MIT" }, "node_modules/vscode-uri": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz", - "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", + "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", "license": "MIT" }, "node_modules/w3c-keyname": { @@ -9321,16 +12478,17 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.18", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.18.tgz", - "integrity": "sha512-qEcY+KJYlWyLH9vNbsr6/5j59AXk5ni5aakf8ldzBvGde6Iz4sxZGkJyWSAueTG7QhOvNRYb1lDdFmL5Td0QKA==", + "version": "1.1.20", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.20.tgz", + "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==", "dev": true, "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "for-each": "^0.3.3", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" }, @@ -9392,16 +12550,19 @@ "license": "ISC" }, "node_modules/yaml": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", - "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", "dev": true, "license": "ISC", "bin": { "yaml": "bin.mjs" }, "engines": { - "node": ">= 14" + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" } }, "node_modules/yargs": { @@ -9444,10 +12605,33 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-validation-error": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-4.0.2.tgz", + "integrity": "sha512-Q6/nZLe6jxuU80qb/4uJ4t5v2VEZ44lzQjPDhYJNztRQ4wyWc6VF3D3Kb/fAuPetZQnhS3hnajCf9CsWesghLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "zod": "^3.25.0 || ^4.0.0" + } + }, "node_modules/zustand": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/zustand/-/zustand-4.5.6.tgz", - "integrity": "sha512-ibr/n1hBzLLj5Y+yUcU7dYw8p6WnIVzdJbnX+1YpaScvZVF2ziugqHs+LAmHw4lWO9c/zRj+K1ncgWDQuthEdQ==", + "version": "4.5.7", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-4.5.7.tgz", + "integrity": "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw==", "license": "MIT", "dependencies": { "use-sync-external-store": "^1.2.2" @@ -9471,6 +12655,16 @@ "optional": true } } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } } } } diff --git a/frontend/package.json b/frontend/package.json index 14426f59e..a7b1f4d04 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,33 +1,43 @@ { "name": "secobserve", - "version": "1.26.0", + "version": "1.48.0", "license": "BSD-3-Clause", "description": "SecObserve is an open source vulnerability management system for software development and cloud environments.", "private": true, "dependencies": { "@emotion/react": "11.14.0", - "@emotion/styled": "11.14.0", - "@fortawesome/fontawesome-svg-core": "6.7.2", - "@fortawesome/free-brands-svg-icons": "6.7.2", - "@fortawesome/free-solid-svg-icons": "6.7.2", - "@fortawesome/react-fontawesome": "0.2.2", - "@textea/json-viewer": "3.5.0", - "axios": "1.7.9", - "chart.js": "4.4.7", - "markdown-to-jsx": "7.7.3", - "mermaid": "11.4.1", - "oidc-client-ts": "3.1.0", + "@emotion/styled": "11.14.1", + "@fortawesome/fontawesome-svg-core": "7.2.0", + "@fortawesome/free-brands-svg-icons": "7.2.0", + "@fortawesome/free-solid-svg-icons": "7.2.0", + "@fortawesome/react-fontawesome": "3.2.0", + "@mdxeditor/editor": "3.52.4", + "@mui/icons-material": "7.3.8", + "@mui/material": "7.3.8", + "@textea/json-viewer": "4.0.1", + "axios": "1.13.5", + "chart.js": "4.5.1", + "cm6-theme-basic-light": "0.2.0", + "cm6-theme-basic-dark": "0.2.0", + "humanize-duration": "3.33.2", + "markdown-to-jsx": "9.7.4", + "marked": "17.0.2", + "mermaid": "11.12.3", + "oidc-client-ts": "3.4.1", + "packageurl-js": "2.0.1", "prop-types": "15.8.1", - "query-string": "9.1.1", - "ra-input-rich-text": "5.4.4", - "react": "18.3.1", - "react-admin": "5.4.4", - "react-chartjs-2": "5.3.0", - "react-dom": "18.3.1", - "react-is": "18.3.1", - "react-oidc-context": "3.2.0", + "query-string": "9.3.1", + "react": "19.0.0", + "react-admin": "5.14.2", + "ra-core": "5.14.2", + "ra-ui-materialui": "5.14.2", + "react-chartjs-2": "5.3.1", + "react-dom": "19.0.0", + "react-is": "19.0.0", + "react-oidc-context": "3.3.0", + "react-syntax-highlighter": "16.1.0", "runtime-env-cra": "file:lib/runtime-env-cra", - "tss-react": "4.9.14" + "tss-react": "4.9.20" }, "scripts": { "start": "NODE_ENV=development ./node_modules/runtime-env-cra/lib/index.js --config-name=./public/runtime-env.js && vite", @@ -43,29 +53,28 @@ "not op_mini all" ], "devDependencies": { - "@eslint/compat": "1.2.5", - "@eslint/eslintrc": "3.2.0", - "@eslint/js": "9.18.0", + "@eslint/eslintrc": "3.3.3", + "@eslint/js": "9.39.2", "@microsoft/eslint-formatter-sarif": "3.1.0", - "@trivago/prettier-plugin-sort-imports": "5.2.1", - "@types/inflection": "1.13.2", - "@types/node": "22.10.6", - "@types/prop-types": "15.7.14", - "@types/react": "18.3.18", - "@types/react-dom": "18.3.5", - "@types/recharts": "1.8.29", - "@typescript-eslint/eslint-plugin": "8.20.0", - "@typescript-eslint/parser": "8.20.0", - "@vitejs/plugin-react": "4.3.4", - "eslint": "9.18.0", - "eslint-plugin-react": "7.37.4", - "eslint-plugin-react-hooks": "rc", + "@trivago/prettier-plugin-sort-imports": "6.0.2", + "@types/humanize-duration": "3.27.4", + "@types/node": "24.10.13", + "@types/prop-types": "15.7.15", + "@types/react": "19.2.0", + "@types/react-dom": "19.2.0", + "@types/react-syntax-highlighter": "15.5.13", + "@typescript-eslint/eslint-plugin": "8.56.0", + "@typescript-eslint/parser": "8.56.0", + "@vitejs/plugin-react": "5.1.4", + "eslint": "9.39.2", + "eslint-plugin-react": "7.37.5", + "eslint-plugin-react-hooks": "7.0.0", "eslint-plugin-security": "3.0.1", - "globals": "15.14.0", - "prettier": "3.4.2", - "rewire": "7.0.0", - "typescript": "5.7.3", - "vite": "6.0.7", - "yaml": "2.7.0" + "globals": "17.3.0", + "prettier": "3.8.1", + "rewire": "9.0.1", + "typescript": "5.9.3", + "vite": "7.3.1", + "yaml": "2.8.2" } } diff --git a/frontend/public/background_login.png b/frontend/public/background_login.png index a628fa6da..fe00dc1a4 100644 Binary files a/frontend/public/background_login.png and b/frontend/public/background_login.png differ diff --git a/frontend/public/favicon.ico b/frontend/public/favicon.ico index 5866301af..6f161c43d 100644 Binary files a/frontend/public/favicon.ico and b/frontend/public/favicon.ico differ diff --git a/frontend/public/maibornwolff.svg b/frontend/public/maibornwolff.svg index e3a5ea85a..16b861309 100644 --- a/frontend/public/maibornwolff.svg +++ b/frontend/public/maibornwolff.svg @@ -1,108 +1,27 @@ - - + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/frontend/public/secobserve.svg b/frontend/public/secobserve.svg index e083ccf3d..ac3b2f00b 100644 --- a/frontend/public/secobserve.svg +++ b/frontend/public/secobserve.svg @@ -1,14 +1,14 @@ - + style="font-size:617.931px;line-height:386.207px;font-family:'Bitstream Vera Sans';letter-spacing:0px;word-spacing:0px;fill:#1e194b;fill-opacity:1;stroke-width:15.4483px" + transform="translate(-56.370621,1.3427734e-5)"> + + + + + + + diff --git a/frontend/public/secobserve_white.svg b/frontend/public/secobserve_white.svg index 6a1303fc3..36a3ad0a4 100644 --- a/frontend/public/secobserve_white.svg +++ b/frontend/public/secobserve_white.svg @@ -1,14 +1,14 @@ - + style="font-size:617.931px;line-height:386.207px;font-family:'Bitstream Vera Sans';letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke-width:15.4483px" + transform="translate(-56.370621,1.3427734e-5)"> + + + + + + + diff --git a/frontend/sonar-project.properties b/frontend/sonar-project.properties new file mode 100644 index 000000000..dd94d72a2 --- /dev/null +++ b/frontend/sonar-project.properties @@ -0,0 +1,18 @@ +sonar.projectKey=secobserve_secobserve-frontend +sonar.organization=secobserve + + +# This is the name and version displayed in the SonarCloud UI. +sonar.projectName=SecObserve Frontend +#sonar.projectVersion=1.0 + + +# Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. +#sonar.sources=. + +# Encoding of the source code. Default is default system encoding +#sonar.sourceEncoding=UTF-8 + +sonar.issue.ignore.multicriteria=e1 +sonar.issue.ignore.multicriteria.e1.ruleKey=typescript:S6671 +sonar.issue.ignore.multicriteria.e1.resourceKey=**/*.ts diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index e72078220..68ac3ba8a 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,22 +1,22 @@ -import { Admin, CustomRoutes, Resource, addRefreshAuthToDataProvider } from "react-admin"; +import { Admin, CustomRoutes, Resource, addRefreshAuthToAuthProvider, addRefreshAuthToDataProvider } from "react-admin"; import { AuthProvider } from "react-oidc-context"; import { Route } from "react-router"; import AccessControlAdministration from "./access_control/access_control_administration/AccessControlAdministration"; import authProvider from "./access_control/auth_provider/authProvider"; -import { oidcConfig } from "./access_control/auth_provider/authProvider"; -import { updateRefreshToken } from "./access_control/auth_provider/functions"; +import { oidcConfig, updateRefreshToken } from "./access_control/auth_provider/oidc"; import authorization_groups from "./access_control/authorization_groups"; import { Login } from "./access_control/login"; import users from "./access_control/users"; +import periodic_tasks from "./background_tasks/periodic_tasks"; import { Layout } from "./commons/layout"; import { darkTheme, lightTheme } from "./commons/layout/themes"; -import notifications from "./commons/notifications"; import { queryClient } from "./commons/queryClient"; import drfProvider from "./commons/ra-data-django-rest-framework"; import settings from "./commons/settings"; import UserSettings from "./commons/user_settings/UserSettings"; import { getTheme } from "./commons/user_settings/functions"; +import components from "./core/components"; import evidences from "./core/evidences"; import observation_logs from "./core/observation_logs"; import observations from "./core/observations"; @@ -25,15 +25,18 @@ import products from "./core/products"; import Reviews from "./core/reviews/Reviews"; import { Dashboard } from "./dashboard"; import parsers from "./import_observations/parsers"; +import concluded_licenses from "./licenses/concluded_licenses"; import LicenseAdministration from "./licenses/license_administration/LicenseAdministration"; import license_component_evidences from "./licenses/license_component_evidences"; import license_components from "./licenses/license_components"; import license_groups from "./licenses/license_groups"; import license_policies from "./licenses/license_policies"; import licenses from "./licenses/licenses"; +import notifications from "./notifications"; import general_rules from "./rules/general_rules"; import product_rules from "./rules/product_rules"; import csaf from "./vex/csaf"; +import cyclonedx from "./vex/cyclonedx"; import openvex from "./vex/openvex"; import vex_counters from "./vex/vex_counters"; import vex_documents from "./vex/vex_documents"; @@ -49,7 +52,7 @@ const App = () => { title="" dataProvider={addRefreshAuthToDataProvider(drfProvider(), updateRefreshToken)} queryClient={queryClient} - authProvider={authProvider} + authProvider={addRefreshAuthToAuthProvider(authProvider, updateRefreshToken)} dashboard={Dashboard} loginPage={Login} layout={Layout} @@ -66,6 +69,7 @@ const App = () => { } /> } /> } /> + } /> } /> } /> } /> @@ -75,6 +79,7 @@ const App = () => { name="product_groups" {...product_groups} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading // nosemgrep because the props are well defined in the import + options={{ label: "Product Groups" }} recordRepresentation={(record) => `${trim_string(record.name)}`} /> { name="observation_logs" {...observation_logs} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading // nosemgrep because the props are well defined in the import + options={{ label: "Observation Logs" }} /> { name="general_rules" {...general_rules} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading // nosemgrep because the props are well defined in the import + options={{ label: "General Rules" }} recordRepresentation={(record) => `${trim_string(record.name)}`} /> `${trim_string(record.name)}`} /> { name="authorization_groups" {...authorization_groups} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading // nosemgrep because the props are well defined in the import + options={{ label: "Authorization Groups" }} recordRepresentation={(record) => `${trim_string(record.name)}`} /> { name="vex/csaf" {...csaf} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading // nosemgrep because the props are well defined in the import + options={{ label: "exported CSAF documents" }} + recordRepresentation={(record) => + `${trim_string(record.document_id_prefix + " / " + record.document_base_id)}` + } /> + `${trim_string(record.document_id_prefix + " / " + record.document_base_id)}` + } + /> + + `${trim_string(record.document_id_prefix + " / " + record.document_base_id)}` + } /> `${trim_string(record.document_id_prefix + "_" + record.year)}`} /> `${trim_string(record.document_id)}`} /> + + + + `${trim_string(record.product_name + " / " + record.component_name_version)}` + } + /> + + + + + + diff --git a/frontend/src/access_control/access_control_administration/AccessControlAdministration.tsx b/frontend/src/access_control/access_control_administration/AccessControlAdministration.tsx index 98c355542..b7e2d4bed 100644 --- a/frontend/src/access_control/access_control_administration/AccessControlAdministration.tsx +++ b/frontend/src/access_control/access_control_administration/AccessControlAdministration.tsx @@ -1,12 +1,13 @@ import { Box, Divider, Paper, Tab, Tabs } from "@mui/material"; import { Fragment } from "react"; +import { ReactNode } from "react"; import { Link, matchPath, useLocation } from "react-router-dom"; import administration from "."; import { is_superuser } from "../../commons/functions"; import ListHeader from "../../commons/layout/ListHeader"; import api_tokens from "../api_tokens"; -import ApiTokenEmbeddedList from "../api_tokens/ApiTokenEmbeddedList"; +import AccessControlApiTokenEmbeddedList from "../api_tokens/AccessControlApiTokenEmbeddedList"; import authorization_groups from "../authorization_groups"; import AuthorizationGroupEmbeddedList from "../authorization_groups/AuthorizationGroupEmbeddedList"; import users from "../users"; @@ -25,7 +26,7 @@ function useRouteMatch(patterns: readonly string[]) { } interface TabPanelProps { - children?: React.ReactNode; + children?: ReactNode; index: number; value: number; } @@ -116,7 +117,7 @@ export default function AccessControlAdministration() { - + diff --git a/frontend/src/access_control/api_tokens/AccessControlApiTokenEmbeddedList.tsx b/frontend/src/access_control/api_tokens/AccessControlApiTokenEmbeddedList.tsx new file mode 100644 index 000000000..4221b1115 --- /dev/null +++ b/frontend/src/access_control/api_tokens/AccessControlApiTokenEmbeddedList.tsx @@ -0,0 +1,112 @@ +import { + Datagrid, + DateField, + FilterForm, + ListContextProvider, + ReferenceField, + ResourceContextProvider, + TextField, + TextInput, + WithRecord, + useListController, +} from "react-admin"; +import { Fragment } from "react/jsx-runtime"; + +import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { getSettingListSize } from "../../commons/user_settings/functions"; + +function listFilters() { + return []; +} + +const AccessControlApiTokenEmbeddedList = () => { + const listContext = useListController({ + filter: {}, + perPage: 25, + resource: "api_tokens", + sort: { field: "username", order: "ASC" }, + filterDefaultValues: {}, + disableSyncWithLocation: false, + storeKey: "api_tokens.embedded", + }); + + if (listContext.isLoading) { + return
Loading...
; + } + + return ( + + +
+ + + ( + + {(api_token.product || api_token.product_group) && } + {!api_token.product && !api_token.product_group && ( + + `../../${reference}/${record.id}/show` + } + sx={{ "& a": { textDecoration: "none" } }} + /> + )} + + )} + /> + ( + + {api_token.product && ( + + `../../${reference}/${record.id}/show/api_token` + } + sx={{ "& a": { textDecoration: "none" } }} + /> + )} + + )} + /> + ( + + {api_token.product_group && ( + + `../../${reference}/${record.id}/show/api_token` + } + sx={{ "& a": { textDecoration: "none" } }} + /> + )} + + )} + /> + + + + +
+
+
+ ); +}; + +export default AccessControlApiTokenEmbeddedList; diff --git a/frontend/src/access_control/product_api_token/ProductApiTokenCreate.tsx b/frontend/src/access_control/api_tokens/ApiTokenCreate.tsx similarity index 53% rename from frontend/src/access_control/product_api_token/ProductApiTokenCreate.tsx rename to frontend/src/access_control/api_tokens/ApiTokenCreate.tsx index 694e89b9e..b8cc190b3 100644 --- a/frontend/src/access_control/product_api_token/ProductApiTokenCreate.tsx +++ b/frontend/src/access_control/api_tokens/ApiTokenCreate.tsx @@ -1,22 +1,23 @@ import AddIcon from "@mui/icons-material/Add"; import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Stack, TextField } from "@mui/material"; import { useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { DateInput, SimpleForm, useNotify, useRefresh } from "react-admin"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import CopyToClipboardButton from "../../commons/custom_fields/CopyToClipboardButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; -import { validate_required } from "../../commons/custom_validators"; -import { AutocompleteInputWide } from "../../commons/layout/themes"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_required, validate_required_32, validate_required_255 } from "../../commons/custom_validators"; +import { AutocompleteInputWide, PasswordInputWide, TextInputWide } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; import { ROLE_CHOICES } from "../types"; -type CreateProductApiTokenProps = { - product: any; +type ApiTokenCreateProps = { + type: "user" | "product"; + product?: any; + user?: any; }; -const CreateProductApiToken = (props: CreateProductApiTokenProps) => { +const ApiTokenCreate = ({ type, product, user }: ApiTokenCreateProps) => { const refresh = useRefresh(); const notify = useNotify(); @@ -43,19 +44,30 @@ const CreateProductApiToken = (props: CreateProductApiTokenProps) => { refresh(); }; - const CustomToolbar = () => ( - - - } /> - - ); - const handleApiTokenCreate = async (data: any) => { - const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/product_api_tokens/"; - const create_data = { - id: props.product.id, - role: data.role, - }; + let url = ""; + let create_data = undefined; + if (type === "product") { + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/product_api_tokens/"; + create_data = { + product: product.id, + role: data.role, + name: data.api_token_name, + expiration_date: data.expiration_date, + }; + } else if (type === "user") { + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/authentication/create_user_api_token/"; + create_data = { + username: user.username, + password: data.password, + name: data.api_token_name, + expiration_date: data.expiration_date, + }; + } else { + notify("Type is not product or user", { type: "error" }); + setRoleOpen(false); + return; + } httpClient(url, { method: "POST", @@ -64,32 +76,47 @@ const CreateProductApiToken = (props: CreateProductApiTokenProps) => { .then((result) => { setApiToken(result.json.token); handleApiTokenOpen(); - notify("Product API token created", { + notify("API token created", { type: "success", }); + setRoleOpen(false); }) .catch((error) => { notify(error.message, { type: "warning", }); }); - - setRoleOpen(false); }; return ( <> - Create product API token + Create {type} API token - }> - + } + /> + } + > + {type === "product" && ( + + )} + {type === "user" && ( + + )} + + - Create product API token + Create {type} API token { ); }; -export default CreateProductApiToken; +export default ApiTokenCreate; diff --git a/frontend/src/access_control/api_tokens/ApiTokenEmbeddedList.tsx b/frontend/src/access_control/api_tokens/ApiTokenEmbeddedList.tsx index 32ba94e25..e5f403467 100644 --- a/frontend/src/access_control/api_tokens/ApiTokenEmbeddedList.tsx +++ b/frontend/src/access_control/api_tokens/ApiTokenEmbeddedList.tsx @@ -1,31 +1,39 @@ import { Datagrid, - FilterForm, + DateField, ListContextProvider, - ReferenceField, ResourceContextProvider, + SelectField, + SortPayload, TextField, - TextInput, WithRecord, useListController, } from "react-admin"; import { Fragment } from "react/jsx-runtime"; -import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { PERMISSION_PRODUCT_API_TOKEN_REVOKE, ROLE_CHOICES } from "../../access_control/types"; import { getSettingListSize } from "../../commons/user_settings/functions"; +import ApiTokenRevoke from "./ApiTokenRevoke"; -function listFilters() { - return []; -} +type ApiTokenEmbeddedListProps = { + type: "user" | "product"; + product?: any; + user?: any; +}; + +const ApiTokenEmbeddedList = ({ type, product, user }: ApiTokenEmbeddedListProps) => { + const filter = type === "product" ? { product: Number(product.id) } : { user: Number(user.id) }; + const sort: SortPayload = type === "product" ? { field: "role", order: "ASC" } : { field: "user", order: "ASC" }; + const resource = type === "product" ? "product_api_tokens" : "api_tokens"; + const current_user = localStorage.getItem("user"); + const username = current_user ? JSON.parse(current_user).username : ""; -const ApiTokenEmbeddedList = () => { const listContext = useListController({ - filter: {}, + filter: filter, perPage: 25, - resource: "api_tokens", - sort: { field: "name", order: "ASC" }, - filterDefaultValues: {}, - disableSyncWithLocation: false, + resource: resource, + sort: sort, + disableSyncWithLocation: true, storeKey: "api_tokens.embedded", }); @@ -34,55 +42,35 @@ const ApiTokenEmbeddedList = () => { } return ( - +
- + {type === "product" && } + ( - - {api_token.product && ( - - `../../${reference}/${record.id}/show/api_token` - } - sx={{ "& a": { textDecoration: "none" } }} - /> - )} - - )} - /> - ( - {api_token.product_group && ( - - `../../${reference}/${record.id}/show/api_token` - } - sx={{ "& a": { textDecoration: "none" } }} + {((type === "product" && + product?.permissions.includes(PERMISSION_PRODUCT_API_TOKEN_REVOKE)) || + (type === "user" && api_token.username === username)) && ( + )} )} /> -
diff --git a/frontend/src/access_control/api_tokens/ApiTokenRevoke.tsx b/frontend/src/access_control/api_tokens/ApiTokenRevoke.tsx new file mode 100644 index 000000000..456d5fd18 --- /dev/null +++ b/frontend/src/access_control/api_tokens/ApiTokenRevoke.tsx @@ -0,0 +1,101 @@ +import DeleteIcon from "@mui/icons-material/Delete"; +import { Dialog, DialogContent, DialogTitle, Typography } from "@mui/material"; +import { useState } from "react"; +import { SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; + +import CancelButton from "../../commons/custom_fields/CancelButton"; +import RemoveButton from "../../commons/custom_fields/RemoveButton"; +import Toolbar from "../../commons/custom_fields/Toolbar"; +import { validate_required_255 } from "../../commons/custom_validators"; +import { PasswordInputWide } from "../../commons/layout/themes"; +import { httpClient } from "../../commons/ra-data-django-rest-framework"; + +type ApiTokenRevokeProps = { + type: "user" | "product"; + api_token_id: any; + user?: any; + name: string; +}; + +const ApiTokenRevoke = ({ type, api_token_id, user, name }: ApiTokenRevokeProps) => { + const refresh = useRefresh(); + const notify = useNotify(); + + const [open, setOpen] = useState(false); + const handleOpen = () => setOpen(true); + const handleClose = () => setOpen(false); + + const handleApiTokenRevoke = async (data: any) => { + let method = ""; + let url = ""; + let revoke_data = undefined; + + if (type === "product") { + method = "DELETE"; + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/product_api_tokens/" + api_token_id + "/"; + } else if (type === "user") { + method = "POST"; + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/authentication/revoke_user_api_token/"; + revoke_data = { + username: user.username, + password: data.password, + name: name, + }; + } else { + notify("Type is not product or user", { type: "error" }); + setOpen(false); + return; + } + + httpClient(url, { + method: method, + body: type === "user" ? JSON.stringify(revoke_data) : null, + }) + .then(() => { + notify("API token revoked", { + type: "success", + }); + refresh(); + setOpen(false); + }) + .catch((error) => { + notify(error.message, { + type: "warning", + }); + }); + }; + + return ( + <> + + + Revoke {type} API token + + + + } + alwaysEnable={type === "product"} + /> + + } + > + + Are you sure you want to revoke the {type} API token {name}? + + {type === "user" && ( + + )} + + + + + ); +}; + +export default ApiTokenRevoke; diff --git a/frontend/src/access_control/auth_provider/authProvider.ts b/frontend/src/access_control/auth_provider/authProvider.ts index 4d350c838..9b50a3cf7 100644 --- a/frontend/src/access_control/auth_provider/authProvider.ts +++ b/frontend/src/access_control/auth_provider/authProvider.ts @@ -1,13 +1,12 @@ -import { User, WebStorageStateStore } from "oidc-client-ts"; import { UserManager } from "oidc-client-ts"; import { AuthProvider } from "react-admin"; import { set_settings_in_local_storage } from "../../commons/functions"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; -import { saveSettingListProperties, setListProperties } from "../../commons/user_settings/functions"; +import { oidcConfig, oidcStorageKey, oidc_signed_in } from "./oidc"; const authProvider: AuthProvider = { - login: ({ username, password }) => { + login: async ({ username, password }) => { if (oidc_signed_in()) { return Promise.resolve(); } else { @@ -25,12 +24,10 @@ const authProvider: AuthProvider = { } return response.json(); }) - .then((auth) => { - localStorage.setItem("jwt", auth.jwt); - setListProperties(auth.user.setting_list_properties); - delete auth.user.setting_list_properties; - localStorage.setItem("user", JSON.stringify(auth.user)); - localStorage.setItem("theme", auth.user.setting_theme); + .then((response) => { + localStorage.setItem("jwt", response.jwt); + localStorage.setItem("user", JSON.stringify(response.user)); + localStorage.setItem("theme", response.user.setting_theme); }) .catch((error) => { if (error.message == "Forbidden") { @@ -42,12 +39,9 @@ const authProvider: AuthProvider = { } }, logout: async () => { - if (oidc_signed_in() || jwt_signed_in()) { - await saveSettingListProperties(); - } - localStorage.removeItem("jwt"); localStorage.removeItem("user"); + localStorage.removeItem("notification_count"); if (oidc_signed_in()) { const user_manager = new UserManager(oidcConfig); @@ -58,21 +52,13 @@ const authProvider: AuthProvider = { }, checkError: async (error) => { if (error.status === 401) { + if (location.hash !== "#/login") { + localStorage.setItem("last_location", location.hash); + } if (oidc_signed_in()) { const user_manager = new UserManager(oidcConfig); - localStorage.setItem("last_location", location.hash); - await user_manager - .signinSilent() - .then(() => { - error.message = false; - error.logoutUser = false; - error.redirectTo = location.hash; - throw error; - }) - .catch(() => { - localStorage.removeItem(oidcStorageKey); - return user_manager.signinRedirect(); - }); + localStorage.removeItem(oidcStorageKey); + return user_manager.signinRedirect(); } throw error; } @@ -81,6 +67,11 @@ const authProvider: AuthProvider = { if (oidc_signed_in() || jwt_signed_in()) { return Promise.resolve(); } + + if (location.hash != "" && !location.hash.startsWith("#/login")) { + localStorage.setItem("last_location", location.hash); + } + return Promise.reject({ message: false }); }, getPermissions: () => Promise.reject(), @@ -107,8 +98,6 @@ const authProvider: AuthProvider = { export const setUserInfo = async () => { const userinfo = await getUserInfo(); - setListProperties(userinfo.setting_list_properties); - delete userinfo.setting_list_properties; localStorage.setItem("user", JSON.stringify(userinfo)); localStorage.setItem("theme", userinfo.setting_theme); }; @@ -123,58 +112,4 @@ export function jwt_signed_in(): boolean { return localStorage.getItem("jwt") != null; } -export const oidcStorageKey = - "oidc.user:" + window.__RUNTIME_CONFIG__.OIDC_AUTHORITY + ":" + window.__RUNTIME_CONFIG__.OIDC_CLIENT_ID; - -export function oidcStorageUser(): string | null { - return localStorage.getItem(oidcStorageKey); -} - -export function oidc_signed_in(): boolean { - return oidcStorageUser() != null; -} - -// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-invalid-void-type -const onSigninCallback = (_user: User | void): void => { - const user_manager = new UserManager(oidcConfig); - user_manager.clearStaleState(); - const last_location = localStorage.getItem("last_location"); - if (last_location) { - localStorage.removeItem("last_location"); - location.hash = last_location; - window.history.replaceState({}, document.title, "/" + last_location); - } else { - window.history.replaceState({}, document.title, window.location.pathname); - } -}; - -export const oidcConfig = { - userStore: new WebStorageStateStore({ store: window.localStorage }), - authority: window.__RUNTIME_CONFIG__.OIDC_AUTHORITY, - client_id: window.__RUNTIME_CONFIG__.OIDC_CLIENT_ID, - redirect_uri: window.__RUNTIME_CONFIG__.OIDC_REDIRECT_URI, - post_logout_redirect_uri: window.__RUNTIME_CONFIG__.OIDC_POST_LOGOUT_REDIRECT_URI, - scope: - window.__RUNTIME_CONFIG__.OIDC_SCOPE && window.__RUNTIME_CONFIG__.OIDC_SCOPE !== "dummy" - ? window.__RUNTIME_CONFIG__.OIDC_SCOPE - : "openid profile email", - automaticSilentRenew: true, - prompt: "select_account", - onSigninCallback: onSigninCallback, -}; - -export function get_oidc_id_token(): string | null { - if (oidcStorageUser()) { - const user = User.fromStorageString(oidcStorageUser()!); // eslint-disable-line @typescript-eslint/no-non-null-assertion - // We have checked before that user is not null - if (user && user.id_token) { - return user.id_token; - } else { - return null; - } - } else { - return null; - } -} - export default authProvider; diff --git a/frontend/src/access_control/auth_provider/axios_instance.ts b/frontend/src/access_control/auth_provider/axios_instance.ts index d6a4d8866..9a602f04c 100644 --- a/frontend/src/access_control/auth_provider/axios_instance.ts +++ b/frontend/src/access_control/auth_provider/axios_instance.ts @@ -1,6 +1,7 @@ import axios, { InternalAxiosRequestConfig } from "axios"; -import { get_oidc_id_token, jwt_signed_in, oidc_signed_in } from "./authProvider"; +import { jwt_signed_in } from "../../access_control/auth_provider/authProvider"; +import { get_oidc_id_token, oidc_signed_in } from "../../access_control/auth_provider/oidc"; const axios_instance = axios.create({ baseURL: window.__RUNTIME_CONFIG__.API_BASE_URL, @@ -10,12 +11,12 @@ axios_instance.interceptors.request.use( async (config: InternalAxiosRequestConfig) => { if (oidc_signed_in()) { if (config.headers) { - config.headers["Authorization"] = "Bearer " + get_oidc_id_token(); + config.headers.Authorization = "Bearer " + get_oidc_id_token(); } return config; } else if (jwt_signed_in()) { if (config.headers) { - config.headers["Authorization"] = "JWT " + localStorage.getItem("jwt"); + config.headers.Authorization = "JWT " + localStorage.getItem("jwt"); } return config; } else { diff --git a/frontend/src/access_control/auth_provider/functions.ts b/frontend/src/access_control/auth_provider/functions.ts deleted file mode 100644 index cb8aa8c01..000000000 --- a/frontend/src/access_control/auth_provider/functions.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { UserManager } from "oidc-client-ts"; - -import { oidcConfig, oidcStorageUser } from "./authProvider"; - -export const updateRefreshToken = () => { - const oidcUser = oidcStorageUser(); - if (oidcUser) { - const expires_at = JSON.parse(oidcUser).expires_at * 1000; - if (expires_at < Date.now()) { - localStorage.setItem("user_action", "refreshing token"); - const user_manager = new UserManager(oidcConfig); - user_manager.signinSilent(); - } - } - return Promise.resolve(); -}; diff --git a/frontend/src/access_control/auth_provider/oidc.ts b/frontend/src/access_control/auth_provider/oidc.ts new file mode 100644 index 000000000..92a0a6b8c --- /dev/null +++ b/frontend/src/access_control/auth_provider/oidc.ts @@ -0,0 +1,80 @@ +import { User, UserManager, WebStorageStateStore } from "oidc-client-ts"; + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +const onSigninCallback = (_user: User | void): void => { + const user_manager = new UserManager(oidcConfig); + user_manager.clearStaleState(); + const last_location = localStorage.getItem("last_location"); + if (last_location) { + localStorage.removeItem("last_location"); + location.hash = last_location; + window.location.replace("/" + last_location); + } else { + window.history.replaceState({}, document.title, window.location.pathname); + } +}; + +export const oidcConfig = { + userStore: new WebStorageStateStore({ store: window.localStorage }), + authority: window.__RUNTIME_CONFIG__.OIDC_AUTHORITY, + client_id: window.__RUNTIME_CONFIG__.OIDC_CLIENT_ID, + redirect_uri: window.__RUNTIME_CONFIG__.OIDC_REDIRECT_URI, + post_logout_redirect_uri: window.__RUNTIME_CONFIG__.OIDC_POST_LOGOUT_REDIRECT_URI, + scope: + window.__RUNTIME_CONFIG__.OIDC_SCOPE && window.__RUNTIME_CONFIG__.OIDC_SCOPE !== "dummy" + ? window.__RUNTIME_CONFIG__.OIDC_SCOPE + : "openid profile email", + automaticSilentRenew: true, + onSigninCallback: onSigninCallback, + ...(window.__RUNTIME_CONFIG__.OIDC_PROMPT && window.__RUNTIME_CONFIG__.OIDC_PROMPT !== "" + ? { prompt: window.__RUNTIME_CONFIG__.OIDC_PROMPT } + : {}), +}; + +export const oidcStorageKey = + "oidc.user:" + window.__RUNTIME_CONFIG__.OIDC_AUTHORITY + ":" + window.__RUNTIME_CONFIG__.OIDC_CLIENT_ID; + +export function oidcStorageUser(): string | null { + return localStorage.getItem(oidcStorageKey); +} + +export function oidc_signed_in(): boolean { + return oidcStorageUser() != null; +} + +export function get_oidc_id_token(): string | null { + if (oidcStorageUser()) { + const user = User.fromStorageString(oidcStorageUser()!); + // We have checked before that user is not null + if (user?.id_token) { + return user.id_token; + } else { + return null; + } + } else { + return null; + } +} + +export const updateRefreshToken = () => { + const oidcUser = oidcStorageUser(); + if (oidcUser) { + const expires_at = JSON.parse(oidcUser).expires_at * 1000; + if (expires_at < Date.now()) { + localStorage.setItem("user_action", "refreshing token"); + const user_manager = new UserManager(oidcConfig); + return user_manager + .signinSilent() + .then(() => { + return Promise.resolve(); + }) + .catch((error: Error) => { + if (location.hash !== "#/login") { + localStorage.setItem("last_location", location.hash); + } + return Promise.reject(error); + }); + } + } + return Promise.resolve(); +}; diff --git a/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberAdd.tsx b/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberAdd.tsx index 537c79955..ba93ca50b 100644 --- a/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberAdd.tsx +++ b/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberAdd.tsx @@ -108,8 +108,9 @@ const AuthorizationGroupMemberAdd = ({ id }: AuthorizationGroupMemberAddProps) = /> setIsManager(e.target.checked)} /> diff --git a/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEdit.tsx b/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEdit.tsx index 6f75f381f..e7ef06a2e 100644 --- a/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEdit.tsx +++ b/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEdit.tsx @@ -1,10 +1,9 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { BooleanInput, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { TextInputWide } from "../../commons/layout/themes"; const ProductMemberEdit = () => { @@ -48,19 +47,13 @@ const ProductMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); return ( Edit user - }> + }> diff --git a/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEmbeddedList.tsx b/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEmbeddedList.tsx index d36141c1c..24179bd03 100644 --- a/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEmbeddedList.tsx +++ b/frontend/src/access_control/authorization_group_members/AuthorizationGroupMemberEmbeddedList.tsx @@ -4,7 +4,6 @@ import { BooleanField, Datagrid, FilterForm, - Identifier, ListContextProvider, NullableBooleanInput, ResourceContextProvider, @@ -14,7 +13,7 @@ import { } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { UserFullNameURLField } from "../../commons/custom_fields/UserFullNameURLField"; import { is_superuser } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import AuthorizationGroupMemberAdd from "./AuthorizationGroupMemberAdd"; @@ -29,10 +28,6 @@ function listFilters() { ]; } -const showUser = (id: Identifier) => { - return "#/users/" + id + "/show"; -}; - type AuthorizationGroupMemberEmbeddedListProps = { authorization_group: any; }; @@ -67,26 +62,7 @@ const AuthorizationGroupMemberEmbeddedList = ({ authorization_group }: Authoriza bulkActionButtons={false} resource="users" > - ( - - )} - /> - ( - - )} - /> + {(is_superuser() || authorization_group.is_manager) && ( { const transform = (data: any) => { - if (!data.oidc_group) { - data.oidc_group = ""; - } + data.oidc_group ??= ""; return data; }; return ( - - Authorization Group + + +   Authorization Group diff --git a/frontend/src/access_control/authorization_groups/AuthorizationGroupEdit.tsx b/frontend/src/access_control/authorization_groups/AuthorizationGroupEdit.tsx index c7bfe48c0..da5eebe06 100644 --- a/frontend/src/access_control/authorization_groups/AuthorizationGroupEdit.tsx +++ b/frontend/src/access_control/authorization_groups/AuthorizationGroupEdit.tsx @@ -1,6 +1,7 @@ import { Typography } from "@mui/material"; import { DeleteButton, Edit, SaveButton, SimpleForm, Toolbar } from "react-admin"; +import authorization_groups from "."; import { validate_255, validate_required_255 } from "../../commons/custom_validators"; import { TextInputWide } from "../../commons/layout/themes"; @@ -15,17 +16,16 @@ const CustomToolbar = () => { const AuthorizationGroupEdit = () => { const transform = (data: any) => { - if (!data.oidc_group) { - data.oidc_group = ""; - } + data.oidc_group ??= ""; return data; }; return ( }> - - Authorization Group + + +   Authorization Group diff --git a/frontend/src/access_control/authorization_groups/AuthorizationGroupShow.tsx b/frontend/src/access_control/authorization_groups/AuthorizationGroupShow.tsx index 0bf8dfb2a..f725c1a17 100644 --- a/frontend/src/access_control/authorization_groups/AuthorizationGroupShow.tsx +++ b/frontend/src/access_control/authorization_groups/AuthorizationGroupShow.tsx @@ -1,4 +1,5 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; +import { Accordion, AccordionDetails, AccordionSummary, Box, Paper, Stack, Typography } from "@mui/material"; import { Fragment } from "react"; import { EditButton, @@ -11,6 +12,7 @@ import { useRecordContext, } from "react-admin"; +import authorization_groups from "."; import { is_superuser } from "../../commons/functions"; import { useStyles } from "../../commons/layout/themes"; import UserProductAuthorizationGroupMemberEmbeddedList from "../../core/product_authorization_group_members/UserProductAuthorizationGroupMemberEmbeddedList"; @@ -27,7 +29,7 @@ const ShowActions = () => { filterDefaultValues={{ is_active: true }} storeKey="authorization_groups.embedded" /> - {((authorization_group && authorization_group.is_manager) || is_superuser()) && } + {(authorization_group?.is_manager || is_superuser()) && }
); @@ -39,10 +41,11 @@ const AuthorizationGroupComponent = () => { return ( ( - + - - Authorization Group + + +   Authorization Group @@ -62,26 +65,30 @@ const AuthorizationGroupComponent = () => { {authorization_group.has_product_group_members && ( - - - Product Groups - - - + + }> + Product Groups + + + + + )} {authorization_group.has_product_members && ( - - - Products - - - + + }> + Products + + + + + )} )} diff --git a/frontend/src/access_control/login/Login.tsx b/frontend/src/access_control/login/Login.tsx index f2b1572b3..89b35671a 100644 --- a/frontend/src/access_control/login/Login.tsx +++ b/frontend/src/access_control/login/Login.tsx @@ -2,24 +2,25 @@ import LockIcon from "@mui/icons-material/Lock"; import PersonIcon from "@mui/icons-material/Person"; import { Avatar, Button, Card, CardActions, CircularProgress, Stack } from "@mui/material"; import Box from "@mui/material/Box"; -import PropTypes from "prop-types"; import { Fragment, useEffect, useState } from "react"; import { Form, TextInput, required, useLogin, useNotify, useTheme } from "react-admin"; import { useAuth } from "react-oidc-context"; import { Navigate, useLocation } from "react-router-dom"; +import { jwt_signed_in } from "../../access_control/auth_provider/authProvider"; +import { oidc_signed_in } from "../../access_control/auth_provider/oidc"; import { getTheme } from "../../commons/user_settings/functions"; import { OIDCSignInButton } from "../auth_provider/OIDCSignInButton"; -import { jwt_signed_in, oidc_signed_in } from "../auth_provider/authProvider"; const Login = () => { const [loading, setLoading] = useState(false); const [, setTheme] = useTheme(); const auth = useAuth(); - const [feature_disable_user_login, setFeatureDisableUserLogin] = useState(false); + const [featureDisableUserLogin, setFeatureDisableUserLogin] = useState(false); const notify = useNotify(); const login = useLogin(); const location = useLocation(); + const [newLocation, setNewLocation] = useState("/"); const isAuthenticated = jwt_signed_in() || oidc_signed_in(); @@ -58,41 +59,58 @@ const Login = () => { }); } - const handleSubmit = (auth: FormValues) => { + interface FormValues { + username?: string; + password?: string; + } + + const handleSubmit = async (auth: FormValues) => { setLoading(true); - login(auth, location.state ? (location.state as any).nextPathname : "/") + login(auth) .then(() => { setTheme(getTheme()); + setNewLocation("/"); + const last_location = localStorage.getItem("last_location"); + if (last_location) { + localStorage.removeItem("last_location"); + if (last_location.startsWith("#")) { + setNewLocation(last_location.substring(1)); + } else { + setNewLocation(last_location); + } + } }) .catch((error: Error) => { setLoading(false); notify( typeof error === "string" ? error - : typeof error === "undefined" || !error.message + : typeof error === "undefined" || !error.message // eslint-disable-line @typescript-eslint/prefer-optional-chain ? "ra.auth.sign_in_error" : error.message, { type: "warning", messageArgs: { - _: typeof error === "string" ? error : error && error.message ? error.message : undefined, + _: typeof error === "string" ? error : error?.message ? error.message : undefined, }, } ); }); + + setLoading(false); }; function show_user_login() { return ( window.__RUNTIME_CONFIG__.OIDC_ENABLE == "false" || - !feature_disable_user_login || + !featureDisableUserLogin || location.hash == "#force_user_login" ); } return ( - {isAuthenticated && } + {isAuthenticated && } {!isAuthenticated && !auth.isLoading && (
{ ); }; -Login.propTypes = { - authProvider: PropTypes.func, - previousRoute: PropTypes.string, -}; - export default Login; - -interface FormValues { - username?: string; - password?: string; -} diff --git a/frontend/src/access_control/product_api_token/ProductApiTokenEmbeddedList.tsx b/frontend/src/access_control/product_api_token/ProductApiTokenEmbeddedList.tsx deleted file mode 100644 index 1f7765b6a..000000000 --- a/frontend/src/access_control/product_api_token/ProductApiTokenEmbeddedList.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import { Datagrid, ListContextProvider, ResourceContextProvider, SelectField, useListController } from "react-admin"; - -import { PERMISSION_PRODUCT_API_TOKEN_REVOKE, ROLE_CHOICES } from "../../access_control/types"; -import { getSettingListSize } from "../../commons/user_settings/functions"; -import RevokeProductApiToken from "./ProductApiTokenRevoke"; - -type ProductApiTokenEmbeddedListProps = { - product: any; -}; - -const ProductApiTokenEmbeddedList = ({ product }: ProductApiTokenEmbeddedListProps) => { - const listContext = useListController({ - filter: { product: Number(product.id) }, - perPage: 25, - resource: "product_api_tokens", - sort: { field: "role", order: "ASC" }, - disableSyncWithLocation: true, - storeKey: "product_api_tokens.embedded", - }); - - if (listContext.isLoading) { - return
Loading...
; - } - - return ( - - -
- - - {product && product.permissions.includes(PERMISSION_PRODUCT_API_TOKEN_REVOKE) && ( - - )} - -
-
-
- ); -}; - -export default ProductApiTokenEmbeddedList; diff --git a/frontend/src/access_control/product_api_token/ProductApiTokenRevoke.tsx b/frontend/src/access_control/product_api_token/ProductApiTokenRevoke.tsx deleted file mode 100644 index 7c9206aab..000000000 --- a/frontend/src/access_control/product_api_token/ProductApiTokenRevoke.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import { useState } from "react"; -import { Confirm, useNotify, useRefresh } from "react-admin"; - -import RemoveButton from "../../commons/custom_fields/RemoveButton"; -import { httpClient } from "../../commons/ra-data-django-rest-framework"; - -type RevokeProductApiTokenProps = { - product: any; -}; - -const RevokeProductApiToken = (props: RevokeProductApiTokenProps) => { - const refresh = useRefresh(); - const notify = useNotify(); - - const [open, setOpen] = useState(false); - const handleOpen = () => setOpen(true); - const handleClose = () => setOpen(false); - - const handleApiTokenRevoke = async () => { - const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/product_api_tokens/" + props.product.id + "/"; - httpClient(url, { - method: "DELETE", - }) - .then(() => { - notify("Product API token revoked", { - type: "success", - }); - refresh(); - }) - .catch((error) => { - notify(error.message, { - type: "warning", - }); - }); - - setOpen(false); - }; - - return ( - <> - - - - ); -}; - -export default RevokeProductApiToken; diff --git a/frontend/src/access_control/types.tsx b/frontend/src/access_control/types.tsx index 6bc489445..9ebf945d5 100644 --- a/frontend/src/access_control/types.tsx +++ b/frontend/src/access_control/types.tsx @@ -40,7 +40,9 @@ export const PERMISSION_BRANCH_DELETE = 1403; export const PERMISSION_BRANCH_CREATE = 1404; export const PERMISSION_SERVICE_VIEW = 1501; +export const PERMISSION_SERVICE_EDIT = 1502; export const PERMISSION_SERVICE_DELETE = 1503; +export const PERMISSION_SERVICE_CREATE = 1504; export const PERMISSION_OBSERVATION_VIEW = 2001; export const PERMISSION_OBSERVATION_EDIT = 2002; @@ -63,4 +65,10 @@ export const PERMISSION_VEX_EDIT = 5002; export const PERMISSION_VEX_DELETE = 5003; export const PERMISSION_VEX_CREATE = 5004; +export const PERMISSION_COMPONENT_LICENSE_EDIT = 6002; export const PERMISSION_COMPONENT_LICENSE_DELETE = 6003; + +export const PERMISSION_CONCLUDED_LICENSE_VIEW = 7001; +export const PERMISSION_CONCLUDED_LICENSE_EDIT = 7002; +export const PERMISSION_CONCLUDED_LICENSE_DELETE = 7003; +export const PERMISSION_CONCLUDED_LICENSE_CREATE = 7004; diff --git a/frontend/src/access_control/users/UserChangePassword.tsx b/frontend/src/access_control/users/UserChangePassword.tsx index 09b949580..3385ac2ad 100644 --- a/frontend/src/access_control/users/UserChangePassword.tsx +++ b/frontend/src/access_control/users/UserChangePassword.tsx @@ -1,11 +1,10 @@ import PasswordIcon from "@mui/icons-material/Password"; import { Dialog, DialogContent, DialogTitle, Typography } from "@mui/material"; import { Fragment, useEffect, useState } from "react"; -import { SaveButton, SimpleForm, WithRecord, useNotify, useRefresh } from "react-admin"; +import { SimpleForm, WithRecord, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import SmallButton from "../../commons/custom_fields/SmallButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_required_255 } from "../../commons/custom_validators"; import { PasswordInputWide } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; @@ -14,7 +13,7 @@ const UserChangePassword = () => { const refresh = useRefresh(); const [open, setOpen] = useState(false); const notify = useNotify(); - const [password_rules, setPasswordRules] = useState(""); + const [passwordRules, setPasswordRules] = useState(""); useEffect(() => { get_password_rules(); @@ -59,15 +58,8 @@ const UserChangePassword = () => { }; const handleCancel = () => setOpen(false); - const handleOpen = () => setOpen(true); - const CustomToolbar = () => ( - - - - - ); return ( } /> @@ -76,16 +68,19 @@ const UserChangePassword = () => { Change password for {user.full_name} - }> + } + > Password rules: - {password_rules} + {passwordRules} diff --git a/frontend/src/access_control/users/UserCreate.tsx b/frontend/src/access_control/users/UserCreate.tsx index 9b21dd49d..bcf9b2d46 100644 --- a/frontend/src/access_control/users/UserCreate.tsx +++ b/frontend/src/access_control/users/UserCreate.tsx @@ -1,31 +1,25 @@ import { Divider, Typography } from "@mui/material"; import { BooleanInput, Create, SimpleForm } from "react-admin"; +import users from "."; import { validate_150, validate_255, validate_required_150 } from "../../commons/custom_validators"; import { TextInputWide } from "../../commons/layout/themes"; const UserCreate = () => { const transform = (data: any) => { - if (!data.full_name) { - data.full_name = ""; - } - if (!data.first_name) { - data.first_name = ""; - } - if (!data.last_name) { - data.last_name = ""; - } - if (!data.email) { - data.email = ""; - } + data.full_name ??= ""; + data.first_name ??= ""; + data.last_name ??= ""; + data.email ??= ""; return data; }; return ( - - User + + +   User diff --git a/frontend/src/access_control/users/UserEdit.tsx b/frontend/src/access_control/users/UserEdit.tsx index b6cd549b3..f742f72bc 100644 --- a/frontend/src/access_control/users/UserEdit.tsx +++ b/frontend/src/access_control/users/UserEdit.tsx @@ -1,6 +1,7 @@ import { Divider, Typography } from "@mui/material"; import { BooleanInput, DeleteButton, Edit, SaveButton, SimpleForm, Toolbar } from "react-admin"; +import users from "."; import { validate_150, validate_255, validate_required_150 } from "../../commons/custom_validators"; import { TextInputWide } from "../../commons/layout/themes"; @@ -15,26 +16,19 @@ const CustomToolbar = () => { const UserEdit = () => { const transform = (data: any) => { - if (!data.full_name) { - data.full_name = ""; - } - if (!data.first_name) { - data.first_name = ""; - } - if (!data.last_name) { - data.last_name = ""; - } - if (!data.email) { - data.email = ""; - } + data.full_name ??= ""; + data.first_name ??= ""; + data.last_name ??= ""; + data.email ??= ""; return data; }; return ( }> - - User + + +   User diff --git a/frontend/src/access_control/users/UserShow.tsx b/frontend/src/access_control/users/UserShow.tsx index 3291d292b..4a80d29f7 100644 --- a/frontend/src/access_control/users/UserShow.tsx +++ b/frontend/src/access_control/users/UserShow.tsx @@ -1,4 +1,5 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; +import { Accordion, AccordionDetails, AccordionSummary, Box, Paper, Stack, Typography } from "@mui/material"; import { Fragment } from "react"; import { BooleanField, @@ -13,9 +14,12 @@ import { useRecordContext, } from "react-admin"; +import users from "."; import { is_superuser } from "../../commons/functions"; import { useStyles } from "../../commons/layout/themes"; import UserProductMemberEmbeddedList from "../../core/product_members/UserProductMemberEmbeddedList"; +import ApiTokenCreate from "../api_tokens/ApiTokenCreate"; +import ApiTokenEmbeddedList from "../api_tokens/ApiTokenEmbeddedList"; import AuthorizationGroupEmbeddedList from "../authorization_groups/AuthorizationGroupEmbeddedList"; import UserChangePassword from "./UserChangePassword"; @@ -61,9 +65,10 @@ const ShowActions = () => { const UserComponent = () => { const { classes } = useStyles(); const current_user = localStorage.getItem("user"); + const current_user_id = current_user ? JSON.parse(current_user).id : 0; const showFullInformation = (user: any) => { - return is_superuser() || (current_user && JSON.parse(current_user).id == user.id); + return is_superuser() || current_user_id == user.id; }; const userWidth = (user: any) => { @@ -73,12 +78,13 @@ const UserComponent = () => { return ( ( - + - - User + + +   User @@ -122,7 +128,7 @@ const UserComponent = () => { {showFullInformation(user) && ( - + Permissions @@ -153,34 +159,60 @@ const UserComponent = () => { )} + {user.setting_package_info_preference && ( + + + + )} )} + {showFullInformation(user) && (user.has_api_tokens || current_user_id === user.id) && ( + + }> + API Token + + + {current_user_id === user.id && ( + + + + )} + + + + )} {showFullInformation(user) && user.has_authorization_groups && ( - - - Authorization Groups - - - + + }> + Authorization Groups + + + + + )} {showFullInformation(user) && user.has_product_group_members && ( - - - Product Groups - - - + + }> + Product Groups + + + + + )} {showFullInformation(user) && user.has_product_members && ( - - - Products - - - + + }> + Products + + + + + )} )} diff --git a/frontend/src/background_tasks/periodic_tasks/PeriodicTaskList.tsx b/frontend/src/background_tasks/periodic_tasks/PeriodicTaskList.tsx new file mode 100644 index 000000000..ce505a32c --- /dev/null +++ b/frontend/src/background_tasks/periodic_tasks/PeriodicTaskList.tsx @@ -0,0 +1,41 @@ +import humanizeDuration from "humanize-duration"; +import { Fragment } from "react"; +import { AutocompleteInput, Datagrid, DateField, FunctionField, List, TextField, TextInput } from "react-admin"; + +import periodic_tasks from "."; +import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { PeriodicTaskStatusField } from "../../commons/custom_fields/PeriodicTaskStatusField"; +import ListHeader from "../../commons/layout/ListHeader"; +import { getSettingListSize } from "../../commons/user_settings/functions"; +import { PERIODIC_TASKS_STATUS_CHOICES } from "../types"; + +const listFilters = [ + , + , +]; + +const PeriodicTaskList = () => { + return ( + + + } + filters={listFilters} + sort={{ field: "start_time", order: "DESC" }} + actions={false} + disableSyncWithLocation={false} + > + + + + `${humanizeDuration(record.duration)}`} /> + + + + + + ); +}; + +export default PeriodicTaskList; diff --git a/frontend/src/background_tasks/periodic_tasks/index.ts b/frontend/src/background_tasks/periodic_tasks/index.ts new file mode 100644 index 000000000..cf4934820 --- /dev/null +++ b/frontend/src/background_tasks/periodic_tasks/index.ts @@ -0,0 +1,8 @@ +import PeriodicTaskIcon from "@mui/icons-material/Schedule"; + +import PeriodicTaskList from "./PeriodicTaskList"; + +export default { + list: PeriodicTaskList, + icon: PeriodicTaskIcon, +}; diff --git a/frontend/src/background_tasks/types.ts b/frontend/src/background_tasks/types.ts new file mode 100644 index 000000000..cd2fa8768 --- /dev/null +++ b/frontend/src/background_tasks/types.ts @@ -0,0 +1,5 @@ +export const PERIODIC_TASKS_STATUS_CHOICES = [ + { id: "Success", name: "Success" }, + { id: "Failure", name: "Failure" }, + { id: "Running", name: "Running" }, +]; diff --git a/frontend/src/commons/IntervalHooks.tsx b/frontend/src/commons/IntervalHooks.tsx new file mode 100644 index 000000000..a0ea02c60 --- /dev/null +++ b/frontend/src/commons/IntervalHooks.tsx @@ -0,0 +1,24 @@ +import React from "react"; + +type IntervalCallback = () => void; + +// Taken from https://stackoverflow.com/a/70935119 + +function useDispatch(callback: IntervalCallback, delay: number): void { + const cachedCallback = React.useRef(null); + + React.useEffect(() => { + cachedCallback.current = callback; + }, [callback]); + + React.useEffect(() => { + if (delay !== 0) { + const id = setInterval(() => cachedCallback?.current?.(), delay); + return () => clearInterval(id); + } + }, [delay]); +} + +export const IntervalHooks = { + useDispatch, +}; diff --git a/frontend/src/commons/about/About.tsx b/frontend/src/commons/about/About.tsx index fac47f54f..be9be6c3f 100644 --- a/frontend/src/commons/about/About.tsx +++ b/frontend/src/commons/about/About.tsx @@ -75,37 +75,30 @@ const About = () => { tools and makes them available for assessment and reporting. - + Backend version: - + {backendVersion} - + Frontend version: - + {get_version} - + Copyright: - - - MaibornWolff GmbH - + + Stefan Fleckenstein - + License: - + { BSD 3-Clause - + Source code: - + - https://github.com/MaibornWolff/SecObserve + https://github.com/SecObserve/SecObserve diff --git a/frontend/src/commons/custom_fields/AuthorizationGroupNameURLField.tsx b/frontend/src/commons/custom_fields/AuthorizationGroupNameURLField.tsx new file mode 100644 index 000000000..0b8e23fbd --- /dev/null +++ b/frontend/src/commons/custom_fields/AuthorizationGroupNameURLField.tsx @@ -0,0 +1,17 @@ +import { FieldProps, Identifier, useRecordContext } from "react-admin"; + +import TextUrlField from "./TextUrlField"; + +export const AuthorizationGroupNameURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? ( + + ) : null; +}; + +const showAuthorizationGroup = (id: Identifier) => { + return "#/authorization_groups/" + id + "/show"; +}; diff --git a/frontend/src/commons/custom_fields/CVEFoundInField.tsx b/frontend/src/commons/custom_fields/CVEFoundInField.tsx new file mode 100644 index 000000000..685daae03 --- /dev/null +++ b/frontend/src/commons/custom_fields/CVEFoundInField.tsx @@ -0,0 +1,77 @@ +import { Stack } from "@mui/material"; +import { ReactNode } from "react"; + +import TextUrlField from "./TextUrlField"; + +interface CVEFoundInFieldProps { + cve_found_in: any; + vulnerability_id: string; +} + +const Divider: ReactNode =   |  ; + +const CVEFoundInField = ({ cve_found_in, vulnerability_id }: CVEFoundInFieldProps) => { + let cisa_url = null; + let exploit_db_url = null; + let metasploit_url = null; + let nuclei_url = null; + let vulncheck_url = null; + let poc_github_url = null; + for (const item of cve_found_in) { + switch (item.source) { + case "CISA KEV": { + cisa_url = + "https://www.cisa.gov/known-exploited-vulnerabilities-catalog?search_api_fulltext=" + + vulnerability_id; + break; + } + case "Exploit-DB": { + exploit_db_url = "https://gitlab.com/exploit-database/exploitdb/-/raw/main/files_exploits.csv"; + break; + } + case "Metasploit": { + metasploit_url = + "https://raw.githubusercontent.com/rapid7/metasploit-framework/master/db/modules_metadata_base.json"; + break; + } + case "Nuclei": { + nuclei_url = "https://raw.githubusercontent.com/projectdiscovery/nuclei-templates/main/cves.json"; + break; + } + case "PoC GitHub": { + const cve_parts = vulnerability_id.split("-"); + if (cve_parts.length === 3) { + const year = cve_parts[1]; + poc_github_url = + "https://github.com/nomi-sec/PoC-in-GitHub/tree/master/" + + year + + "/" + + vulnerability_id + + ".json"; + } + break; + } + case "VulnCheck KEV": { + vulncheck_url = "https://vulncheck.com/cve/" + vulnerability_id; + break; + } + } + } + return ( + + {cisa_url && } + {cisa_url && (exploit_db_url || metasploit_url || nuclei_url || poc_github_url || vulncheck_url) && Divider} + {exploit_db_url && } + {exploit_db_url && (metasploit_url || nuclei_url || poc_github_url || vulncheck_url) && Divider} + {metasploit_url && } + {metasploit_url && (nuclei_url || poc_github_url || vulncheck_url) && Divider} + {nuclei_url && } + {nuclei_url && (poc_github_url || vulncheck_url) && Divider} + {poc_github_url && } + {poc_github_url && vulncheck_url && Divider} + {vulncheck_url && } + + ); +}; + +export default CVEFoundInField; diff --git a/frontend/src/commons/custom_fields/EditButton.tsx b/frontend/src/commons/custom_fields/EditButton.tsx index a5b9cfc2d..becb4821b 100644 --- a/frontend/src/commons/custom_fields/EditButton.tsx +++ b/frontend/src/commons/custom_fields/EditButton.tsx @@ -1,14 +1,19 @@ import EditIcon from "@mui/icons-material/Edit"; +import { ReactNode } from "react"; import SmallButton from "./SmallButton"; interface EditButtonProps { title: string; onClick: () => void; + icon?: ReactNode; } -const EditButton = ({ title, onClick }: EditButtonProps) => { - return SmallButton({ title, onClick, icon: }); +const EditButton = ({ title, onClick, icon }: EditButtonProps) => { + if (icon === undefined) { + icon = ; + } + return SmallButton({ title, onClick, icon: icon }); }; export default EditButton; diff --git a/frontend/src/commons/custom_fields/LabeledTextField.tsx b/frontend/src/commons/custom_fields/LabeledTextField.tsx index 482d72e78..879ce5e27 100644 --- a/frontend/src/commons/custom_fields/LabeledTextField.tsx +++ b/frontend/src/commons/custom_fields/LabeledTextField.tsx @@ -1,20 +1,12 @@ +import { Typography } from "@mui/material"; + interface LabeledTextFieldProps { text: string | number; label: string; } const LabeledTextField = (props: LabeledTextFieldProps) => { - return ( -
- {props.text} -
- ); + return {props.text}; }; export default LabeledTextField; diff --git a/frontend/src/commons/custom_fields/MarkdownEdit.css b/frontend/src/commons/custom_fields/MarkdownEdit.css new file mode 100644 index 000000000..8650c5c48 --- /dev/null +++ b/frontend/src/commons/custom_fields/MarkdownEdit.css @@ -0,0 +1,50 @@ +.prose { + font-family: "Roboto", sans-serif; + font-size: 1rem; + font-weight: 400; + line-height: 1.3; + padding-top: 0px; + padding-bottom: 0px; +} + +:root, +.light, +.light-theme { + --border: #c4c4c4; + --border-hover: #000000; + --border-focus-within: #000000; +} + +:root, +.dark, +.dark-theme { + --border: #676767; + --border-hover: #ffffff; + --border-focus-within: #6ed3f1; +} + +.mdxeditor { + border: 1px solid var(--border); + border-radius: 10px; +} + +.mdxeditor:hover { + border: 1px solid var(--border-hover); + border-radius: 10px; +} + +.mdxeditor:focus-within { + border: 2px solid var(--border-focus-within); + border-radius: 10px; +} + +.mdxeditor-toolbar { + border-radius: 10px; +} + +.cm-editor { + border-radius: 10px; +} +.cm-content { + font-size: 1rem; +} diff --git a/frontend/src/commons/custom_fields/MarkdownEdit.tsx b/frontend/src/commons/custom_fields/MarkdownEdit.tsx new file mode 100644 index 000000000..70ca3ec8c --- /dev/null +++ b/frontend/src/commons/custom_fields/MarkdownEdit.tsx @@ -0,0 +1,104 @@ +import { + BlockTypeSelect, + BoldItalicUnderlineToggles, + CodeToggle, + CreateLink, + DiffSourceToggleWrapper, + InsertImage, + InsertTable, + InsertThematicBreak, + ListsToggle, + MDXEditor, + Separator, + diffSourcePlugin, + headingsPlugin, + imagePlugin, + linkDialogPlugin, + linkPlugin, + listsPlugin, + markdownShortcutPlugin, + maxLengthPlugin, + quotePlugin, + tablePlugin, + thematicBreakPlugin, + toolbarPlugin, +} from "@mdxeditor/editor"; +import "@mdxeditor/editor/style.css"; +// @ts-expect-error Types are expected but none could be found +import { basicDark } from "cm6-theme-basic-dark"; +// @ts-expect-error Types are expected but none could be found +import { basicLight } from "cm6-theme-basic-light"; +import { Labeled } from "react-admin"; + +import { getTheme } from "../user_settings/functions"; +import "./MarkdownEdit.css"; + +interface MarkdownEditProps { + label: string; + initialValue: string; + setValue: (value: string) => void; + overlayContainer?: HTMLDivElement | null; + maxLength?: number; +} + +const MarkdownEdit = ({ label, initialValue, setValue, overlayContainer, maxLength }: MarkdownEditProps) => { + const mdxeditor_theme = getTheme() == "dark" ? "dark-theme" : "light-theme"; + const codemirror_theme = getTheme() == "dark" ? basicDark : basicLight; + + if (!maxLength) { + maxLength = Infinity; + } + + const allPlugins = () => [ + toolbarPlugin({ + toolbarContents: () => ( + + + + + + + + + + + + + + + + ), + }), + listsPlugin(), + quotePlugin(), + headingsPlugin(), + imagePlugin(), + linkPlugin(), + linkDialogPlugin(), + tablePlugin(), + thematicBreakPlugin(), + markdownShortcutPlugin(), + diffSourcePlugin({ + diffMarkdown: initialValue, + viewMode: "rich-text", + codeMirrorExtensions: [codemirror_theme], + }), + maxLengthPlugin(maxLength), + ]; + + return ( + + setValue(markdown ?? "")} + plugins={allPlugins()} + /> + + ); +}; + +export default MarkdownEdit; diff --git a/frontend/src/commons/custom_fields/MarkdownField.tsx b/frontend/src/commons/custom_fields/MarkdownField.tsx index e637432d1..215b26e94 100644 --- a/frontend/src/commons/custom_fields/MarkdownField.tsx +++ b/frontend/src/commons/custom_fields/MarkdownField.tsx @@ -1,35 +1,77 @@ import Markdown from "markdown-to-jsx"; +import { marked } from "marked"; +import { Fragment } from "react"; import { useLinkStyles } from "../../commons/layout/themes"; -import { getSettingTheme } from "../user_settings/functions"; +import { getResolvedSettingTheme } from "../user_settings/functions"; +import LabeledTextField from "./LabeledTextField"; interface MarkdownProps { content: string; label: string; } +// copied from https://stackoverflow.com/a/77300386 +function isMarkdownValue(value: string): boolean { + const tokenTypes: string[] = []; + + marked(value, { + walkTokens: (token) => { + tokenTypes.push(token.type); + }, + }); + + const isMarkdown = [ + "space", + "code", + "fences", + "heading", + "hr", + "link", + "blockquote", + "list", + "html", + "def", + "table", + "lheading", + "escape", + "tag", + "reflink", + "strong", + "codespan", + "url", + ].some((tokenType) => tokenTypes.includes(tokenType)); + + return isMarkdown; +} + const MarkdownField = (props: MarkdownProps) => { - const { classes } = useLinkStyles({ setting_theme: getSettingTheme() }); + const { classes } = useLinkStyles({ setting_theme: getResolvedSettingTheme() }); return ( - + {isMarkdownValue(props.content) && ( + - {props.content} - + }} + > + {props.content} + + )} + {!isMarkdownValue(props.content) && } +
); }; diff --git a/frontend/src/commons/custom_fields/MenuButton.tsx b/frontend/src/commons/custom_fields/MenuButton.tsx new file mode 100644 index 000000000..376d1a175 --- /dev/null +++ b/frontend/src/commons/custom_fields/MenuButton.tsx @@ -0,0 +1,33 @@ +import { Button } from "@mui/material"; + +import { getIconAndFontColor } from "../../commons/functions"; + +interface MenuButtonProps { + title: string; + onClick: () => void; + icon: any; +} + +const MenuButton = ({ title, onClick, icon }: MenuButtonProps) => { + return ( + + ); +}; + +export default MenuButton; diff --git a/frontend/src/commons/custom_fields/OSVLinuxDistributionField.tsx b/frontend/src/commons/custom_fields/OSVLinuxDistributionField.tsx new file mode 100644 index 000000000..7c56d292f --- /dev/null +++ b/frontend/src/commons/custom_fields/OSVLinuxDistributionField.tsx @@ -0,0 +1,25 @@ +import { Typography } from "@mui/material"; +import { Fragment } from "react"; + +interface OSVLinuxDistributionFieldProps { + osv_linux_distribution: string | undefined; + osv_linux_release: string | undefined; + label: string; +} + +const OSVLinuxDistributionField = (props: OSVLinuxDistributionFieldProps) => { + return ( + + {props.osv_linux_distribution && props.osv_linux_release && ( + + {props.osv_linux_distribution}:{props.osv_linux_release} + + )} + {props.osv_linux_distribution && !props.osv_linux_release && ( + {props.osv_linux_distribution} + )} + + ); +}; + +export default OSVLinuxDistributionField; diff --git a/frontend/src/commons/custom_fields/OSVLinuxDistributionInput.tsx b/frontend/src/commons/custom_fields/OSVLinuxDistributionInput.tsx new file mode 100644 index 000000000..41c64eb97 --- /dev/null +++ b/frontend/src/commons/custom_fields/OSVLinuxDistributionInput.tsx @@ -0,0 +1,40 @@ +import { Fragment } from "react"; +import { TextInput } from "react-admin"; + +import { AutocompleteInputMedium } from "../layout/themes"; +import TextUrlField from "./TextUrlField"; + +const DISTRIBUTION_CHOICES = [ + { id: "AlmaLinux", name: "AlmaLinux" }, + { id: "Alpine", name: "Alpine" }, + { id: "Chainguard", name: "Chainguard" }, + { id: "Debian", name: "Debian" }, + { id: "Mageia", name: "Mageia" }, + { id: "openSUSE", name: "openSUSE" }, + { id: "Photon OS", name: "Photon OS" }, + { id: "Red Hat", name: "Red Hat" }, + { id: "Rocky Linux", name: "Rocky Linux" }, + { id: "SUSE", name: "SUSE" }, + { id: "Ubuntu", name: "Ubuntu" }, + { id: "Wolfi", name: "Wolfi" }, +]; + +const OSVLinuxDistributionInput = () => { + return ( + + + + + + ); +}; + +export default OSVLinuxDistributionInput; diff --git a/frontend/src/commons/custom_fields/ObservationsCountField.tsx b/frontend/src/commons/custom_fields/ObservationsCountField.tsx index 0fd7ddb43..ec9abd165 100644 --- a/frontend/src/commons/custom_fields/ObservationsCountField.tsx +++ b/frontend/src/commons/custom_fields/ObservationsCountField.tsx @@ -44,7 +44,7 @@ const ObservationsCountField = (props: ObservationsProps) => { padding: 8, }} > - {record.open_critical_observation_count} + {record.active_critical_observation_count} { padding: 8, }} > - {record.open_high_observation_count} + {record.active_high_observation_count} { padding: 8, }} > - {record.open_medium_observation_count} + {record.active_medium_observation_count} { padding: 8, }} > - {record.open_low_observation_count} + {record.active_low_observation_count} { padding: 8, }} > - {record.open_none_observation_count} + {record.active_none_observation_count} { padding: 8, }} > - {record.open_unknown_observation_count} + {record.active_unknown_observation_count} ) : null; diff --git a/frontend/src/commons/custom_fields/PeriodicTaskStatusField.tsx b/frontend/src/commons/custom_fields/PeriodicTaskStatusField.tsx new file mode 100644 index 000000000..aff22e4e7 --- /dev/null +++ b/frontend/src/commons/custom_fields/PeriodicTaskStatusField.tsx @@ -0,0 +1,43 @@ +import { ChipField, useRecordContext } from "react-admin"; + +interface PeriodicTaskStatusFieldProps { + label: string; +} + +export const PeriodicTaskStatusField = ({ label }: PeriodicTaskStatusFieldProps) => { + const record = useRecordContext(); + + function get_status_color() { + if (record?.status === "Success") { + return "#00aa00"; + } else if (record?.status === "Failed") { + return "#d4333f"; + } else if (record?.status === "Running") { + return "#00B4F0"; + } else { + return "#424242"; + } + } + + function get_text_record() { + if (record?.status) { + return { text: record.status }; + } else { + return { text: "Unknown" }; + } + } + + return record?.status != null ? ( + + ) : null; +}; diff --git a/frontend/src/commons/custom_fields/SecurityGateTextField.tsx b/frontend/src/commons/custom_fields/SecurityGateTextField.tsx index 63b40440b..916f5739e 100644 --- a/frontend/src/commons/custom_fields/SecurityGateTextField.tsx +++ b/frontend/src/commons/custom_fields/SecurityGateTextField.tsx @@ -8,22 +8,22 @@ export const SecurityGateTextField = (props: SecurityGateTextProps) => { const record = useRecordContext(); function get_severity_color() { - if (record && record.security_gate_passed) { - return "#0a0"; + if (record?.security_gate_passed) { + return "#00aa00"; } else { return "#d4333f"; } } function get_text_record() { - if (record && record.security_gate_passed) { + if (record?.security_gate_passed) { return { text: "Passed" }; } else { return { text: "Failed" }; } } - return record && record.security_gate_passed != null ? ( + return record?.security_gate_passed != null ? ( { const record = useRecordContext(); return record ? ( - + + {get_current_severity(record) && ( + + )} + {!get_current_severity(record) && ( + + )} + ) : null; }; diff --git a/frontend/src/commons/custom_fields/TextUrlField.tsx b/frontend/src/commons/custom_fields/TextUrlField.tsx index a201e35ea..500f8f225 100644 --- a/frontend/src/commons/custom_fields/TextUrlField.tsx +++ b/frontend/src/commons/custom_fields/TextUrlField.tsx @@ -1,13 +1,15 @@ import LaunchIcon from "@mui/icons-material/Launch"; +import { Link, Typography } from "@mui/material"; import { Fragment } from "react"; import { useLinkStyles } from "../../commons/layout/themes"; -import { getSettingTheme } from "../../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../../commons/user_settings/functions"; interface TextUrlFieldProps { text: string | number; url: string; - label: string; + label?: string | undefined; + new_tab?: boolean | undefined; } function is_valid_url(urlString: string): boolean { @@ -37,52 +39,27 @@ function is_invalid_url(urlString: string): boolean { } const TextUrlField = (props: TextUrlFieldProps) => { - const { classes } = useLinkStyles({ setting_theme: getSettingTheme() }); + const { classes } = useLinkStyles({ setting_theme: getResolvedSettingTheme() }); return ( - {is_valid_url(props.url) && ( - + {props.new_tab === true && is_valid_url(props.url) && ( + {props.text}   - + )} - {is_valid_relative_url(props.url) && ( - + {props.new_tab !== true && is_valid_url(props.url) && ( + {props.text} - + )} - {is_invalid_url(props.url) && ( - + {is_valid_relative_url(props.url) && ( + {props.text} - + )} + {is_invalid_url(props.url) && {props.text}} ); }; diff --git a/frontend/src/commons/custom_fields/ToolbarCancelSave.tsx b/frontend/src/commons/custom_fields/ToolbarCancelSave.tsx new file mode 100644 index 000000000..3bad719ce --- /dev/null +++ b/frontend/src/commons/custom_fields/ToolbarCancelSave.tsx @@ -0,0 +1,32 @@ +import { ReactNode } from "react"; +import { SaveButton } from "react-admin"; + +import CancelButton from "../../commons/custom_fields/CancelButton"; +import Toolbar from "../../commons/custom_fields/Toolbar"; + +interface ToolbarCancelSaveProps { + onClick: () => void; + saveButtonLabel?: string; + saveButtonIcon?: ReactNode; + alwaysEnable?: boolean; +} + +export const ToolbarCancelSave = ({ + onClick, + saveButtonLabel, + saveButtonIcon, + alwaysEnable, +}: ToolbarCancelSaveProps) => ( + + + {saveButtonLabel && !saveButtonIcon && } + {saveButtonLabel && saveButtonIcon && !alwaysEnable && ( + + )} + {saveButtonLabel && saveButtonIcon && alwaysEnable && ( + + )} + {!saveButtonLabel && alwaysEnable && } + {!saveButtonLabel && !alwaysEnable && } + +); diff --git a/frontend/src/commons/custom_fields/UserFullNameURLField.tsx b/frontend/src/commons/custom_fields/UserFullNameURLField.tsx new file mode 100644 index 000000000..8a990365d --- /dev/null +++ b/frontend/src/commons/custom_fields/UserFullNameURLField.tsx @@ -0,0 +1,12 @@ +import { FieldProps, Identifier, useRecordContext } from "react-admin"; + +import TextUrlField from "./TextUrlField"; + +export const UserFullNameURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? : null; +}; + +const showUser = (id: Identifier) => { + return "#/users/" + id + "/show"; +}; diff --git a/frontend/src/commons/custom_fields/VulnerabilityIdField.tsx b/frontend/src/commons/custom_fields/VulnerabilityIdField.tsx new file mode 100644 index 000000000..7b70cee6f --- /dev/null +++ b/frontend/src/commons/custom_fields/VulnerabilityIdField.tsx @@ -0,0 +1,41 @@ +import TextUrlField from "./TextUrlField"; + +interface VulnerabilityIdFieldProps { + vulnerability_id: string; +} + +const VULNERABILITY_URLS = { + ASB: "https://osv.dev/vulnerability/", + BIT: "https://osv.dev/vulnerability/", + CGA: "https://images.chainguard.dev/security/", + CVE: "https://nvd.nist.gov/vuln/detail/", + DLA: "https://security-tracker.debian.org/tracker/", + DSA: "https://security-tracker.debian.org/tracker/", + GHSA: "https://github.com/advisories/", + GO: "https://pkg.go.dev/vuln/", + OSV: "https://osv.dev/vulnerability/", + PYSEC: "https://osv.dev/vulnerability/", + RHSA: "https://access.redhat.com/errata/", + SNYK: "https://snyk.io/vuln/", + RUSTSEC: "https://rustsec.org/advisories/", +}; + +export function get_vulnerability_url(vulnerability_id: string): string { + let return_value = ""; + + Object.entries(VULNERABILITY_URLS).forEach((entry) => { + const [key, value] = entry; + if (vulnerability_id.startsWith(key)) { + return_value = value + vulnerability_id; + } + }); + + return return_value; +} + +const VulnerabilityIdField = ({ vulnerability_id }: VulnerabilityIdFieldProps) => { + const url = get_vulnerability_url(vulnerability_id); + return ; +}; + +export default VulnerabilityIdField; diff --git a/frontend/src/commons/custom_validators.ts b/frontend/src/commons/custom_validators.ts index 60e2c15ac..78a3e0608 100644 --- a/frontend/src/commons/custom_validators.ts +++ b/frontend/src/commons/custom_validators.ts @@ -2,6 +2,7 @@ import { maxLength, maxValue, minValue, required } from "react-admin"; export const validate_required = [required()]; +export const validate_required_32 = [required(), maxLength(32)]; export const validate_required_150 = [required(), maxLength(150)]; export const validate_required_255 = [required(), maxLength(255)]; export const validate_required_2048 = [required(), maxLength(2048)]; @@ -17,6 +18,7 @@ export const validate_0_999999 = [minValue(0), maxValue(999999)]; export const validate_0_23 = [minValue(0), maxValue(23)]; export const validate_0_59 = [minValue(0), maxValue(59)]; export const validate_1_4096 = [minValue(1), maxValue(4096)]; +export const validate_1_999999 = [minValue(1), maxValue(999999)]; export const validate_2000_9999 = [minValue(2000), maxValue(9999)]; export function validate_after_today() { diff --git a/frontend/src/commons/functions.tsx b/frontend/src/commons/functions.tsx index 9a7f3b702..c87b9d302 100644 --- a/frontend/src/commons/functions.tsx +++ b/frontend/src/commons/functions.tsx @@ -1,3 +1,6 @@ +import { PackageURL } from "packageurl-js"; +import { SortPayload } from "ra-core"; + import { httpClient } from "../commons/ra-data-django-rest-framework"; import { OBSERVATION_SEVERITY_CRITICAL, @@ -17,10 +20,15 @@ import { EVALUATION_RESULT_REVIEW_REQUIRED, EVALUATION_RESULT_UNKNOWN, } from "../licenses/types"; -import { getSettingTheme } from "./user_settings/functions"; +import { getResolvedSettingTheme, getSettingPackageInfoPreference } from "./user_settings/functions"; + +export function getErrorMessage(error: unknown) { + if (error instanceof Error) return error.message; + return String(error); +} export function getIconAndFontColor() { - if (getSettingTheme() == "dark") { + if (getResolvedSettingTheme() == "dark") { return "white"; } else { return "black"; @@ -52,9 +60,9 @@ export function get_severity_color(severity: string): string { return backgroundColor; } -export function get_evaluation_result_color(record: any | null, evaluation_result: string | null): string { +export function get_evaluation_result_color(record: any, evaluation_result: string | null): string { if (!evaluation_result) { - if (record && record.component_license_data) { + if (record?.component_license_data) { evaluation_result = record.component_license_data.evaluation_result; } else { evaluation_result = record.evaluation_result; @@ -102,65 +110,110 @@ export function get_cvss4_url(cvss_vector: string): string { return ""; } -const VULNERABILITY_URLS = { - CVE: "https://nvd.nist.gov/vuln/detail/", - DLA: "https://security-tracker.debian.org/tracker/", - GHSA: "https://github.com/advisories/", - OSV: "https://osv.dev/vulnerability/", - PYSEC: "https://osv.dev/vulnerability/", - SNYK: "https://snyk.io/vuln/", - RUSTSEC: "https://rustsec.org/advisories/", -}; +export function get_component_purl_url(component_purl: string): string | null { + if (component_purl === "") { + return null; + } -export function get_vulnerability_url(vulnerability_id: string): string | null { - let return_value = null; + try { + const purl = PackageURL.fromString(component_purl); - Object.entries(VULNERABILITY_URLS).forEach((entry) => { - const [key, value] = entry; - if (vulnerability_id.startsWith(key)) { - return_value = value + vulnerability_id; + if (purl.type === undefined || purl.name === undefined) { + return null; } - }); - return return_value; -} + let package_info_preference = getSettingPackageInfoPreference(); + package_info_preference ??= "open/source/insights"; -export function get_component_purl_url( - component_name: string, - component_version: string | null, - component_purl_type: string | null, - component_purl_namespace: string | null -): string | null { - if (component_purl_type === null) { + let namespace_separator = "/"; + if (purl.type === "maven") { + namespace_separator = ":"; + } + + let component_purl_url: string | null = null; + + if (package_info_preference === "open/source/insights") { + component_purl_url = get_purl_url_deps_dev( + purl.type, + purl.namespace, + purl.name, + purl.version, + namespace_separator + ); + } + + component_purl_url ??= get_purl_url_ecosyste_ms(purl.type, purl.namespace, purl.name, namespace_separator); + + return component_purl_url; + } catch (e: unknown) { + console.warn("PURL " + component_purl + " is not valid: " + getErrorMessage(e)); return null; } +} + +function get_purl_url_deps_dev( + purl_type: string, + purl_namespace: string | undefined, + purl_name: string, + purl_version: string | undefined, + namespace_separator: string +): string | null { + let purl_url: string | null = null; const typeArray: string[] = ["cargo", "golang", "maven", "npm", "nuget", "pypi"]; - if (!typeArray.includes(component_purl_type)) { - return null; - } + if (purl_type !== null && purl_name !== null && typeArray.includes(purl_type)) { + let deps_dev_type = purl_type; + if (purl_type === "golang") { + deps_dev_type = "go"; + } - let deps_dev_type = component_purl_type; - if (component_purl_type === "golang") { - deps_dev_type = "go"; + purl_url = "https://deps.dev/" + deps_dev_type + "/"; + if (purl_namespace !== undefined) { + purl_url += encodeURIComponent(purl_namespace) + encodeURIComponent(namespace_separator); + } + purl_url += encodeURIComponent(purl_name); + if (purl_version !== undefined) { + purl_url += "/" + purl_version; + } } - let namespace_separator = "/"; - if (component_purl_type === "maven") { - namespace_separator = ":"; - } + return purl_url; +} - let component_purl_url = "https://deps.dev/" + deps_dev_type + "/"; - if (!component_name.includes(":") && component_purl_namespace !== null) { - component_purl_url = - component_purl_url + encodeURIComponent(component_purl_namespace) + encodeURIComponent(namespace_separator); - } - component_purl_url = component_purl_url + encodeURIComponent(component_name); - if (component_version !== null) { - component_purl_url = component_purl_url + "/" + component_version; +function get_purl_url_ecosyste_ms( + purl_type: string, + purl_namespace: string | undefined, + purl_name: string, + namespace_separator: string +): string | null { + let purl_url: string | null = null; + + const types: Record = { + npm: "npmjs.org", + golang: "proxy.golang.org", + nuget: "nuget.org", + maven: "repo1.maven.org", + pypi: "pypi.org", + composer: "packagist.org", + gem: "rubygems.org", + cargo: "crates.io", + cocoapods: "cocoapods.org", + cpan: "cpan.org", + cran: "cran.r-project.org", + hackage: "hackage.haskell.org", + }; + + if (Object.keys(types).includes(purl_type)) { + const ecosystems_type = types[purl_type]; // eslint-disable-line security/detect-object-injection + + purl_url = "https://packages.ecosyste.ms/registries/" + ecosystems_type + "/packages/"; + if (purl_namespace !== undefined) { + purl_url += encodeURIComponent(purl_namespace) + encodeURIComponent(namespace_separator); + } + purl_url += encodeURIComponent(purl_name); } - return component_purl_url; + return purl_url; } const rtf = new Intl.RelativeTimeFormat("en", { @@ -187,10 +240,21 @@ export function set_settings_in_local_storage() { }); } +export const feature_email = () => { + try { + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; + const feature_vex_position = features.indexOf("feature_email"); + return feature_vex_position !== -1; + } catch { + return false; + } +}; + export const feature_vex_enabled = () => { try { - const settings = JSON.parse(localStorage.getItem("settings") || "{}"); - const features = settings.features || []; + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; const feature_vex_position = features.indexOf("feature_vex"); return feature_vex_position !== -1; } catch { @@ -200,8 +264,8 @@ export const feature_vex_enabled = () => { export function settings_risk_acceptance_expiry_date(): string | null { try { - const settings = JSON.parse(localStorage.getItem("settings") || "{}"); - const risk_acceptance_expiry_days = settings.risk_acceptance_expiry_days || null; + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const risk_acceptance_expiry_days = settings.risk_acceptance_expiry_days ?? null; if (risk_acceptance_expiry_days === null) { return null; } @@ -213,10 +277,23 @@ export function settings_risk_acceptance_expiry_date(): string | null { } } +export function settings_vex_justification_style(): string | null { + try { + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const vex_justification_style = settings.vex_justification_style ?? null; + if (vex_justification_style === null) { + return null; + } + return vex_justification_style; + } catch { + return null; + } +} + export const feature_general_rules_need_approval_enabled = () => { try { - const settings = JSON.parse(localStorage.getItem("settings") || "{}"); - const features = settings.features || []; + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; const feature_general_rules_need_approval = features.indexOf("feature_general_rules_need_approval"); return feature_general_rules_need_approval !== -1; } catch { @@ -226,8 +303,8 @@ export const feature_general_rules_need_approval_enabled = () => { export const feature_license_management = () => { try { - const settings = JSON.parse(localStorage.getItem("settings") || "{}"); - const features = settings.features || []; + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; const feature_vex_position = features.indexOf("feature_license_management"); return feature_vex_position !== -1; } catch { @@ -235,12 +312,46 @@ export const feature_license_management = () => { } }; +export const feature_automatic_api_import = () => { + try { + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; + const feature_vex_position = features.indexOf("feature_automatic_api_import"); + return feature_vex_position !== -1; + } catch { + return false; + } +}; + +export const feature_automatic_osv_scanning = () => { + try { + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; + const feature_vex_position = features.indexOf("feature_automatic_osv_scanning"); + return feature_vex_position !== -1; + } catch { + return false; + } +}; + +export const feature_exploit_information = () => { + try { + const settings = JSON.parse(localStorage.getItem("settings") ?? "{}"); + const features = settings.features ?? []; + const feature_vex_position = features.indexOf("feature_exploit_information"); + return feature_vex_position !== -1; + } catch { + return false; + } +}; + export const justificationIsEnabledForStatus = (status: string) => { const vex_enabled = feature_vex_enabled(); - const justification_recommended_for_status = - [OBSERVATION_STATUS_NOT_AFFECTED, OBSERVATION_STATUS_NOT_SECURITY, OBSERVATION_STATUS_FALSE_POSITIVE].indexOf( - status - ) >= 0; + const justification_recommended_for_status = [ + OBSERVATION_STATUS_NOT_AFFECTED, + OBSERVATION_STATUS_NOT_SECURITY, + OBSERVATION_STATUS_FALSE_POSITIVE, + ].includes(status); return vex_enabled && justification_recommended_for_status; }; @@ -253,3 +364,35 @@ export const is_external = () => { const user = localStorage.getItem("user"); return user && JSON.parse(user).is_external; }; + +export function has_attribute(attribute: string, data: any, sort: SortPayload | undefined) { + if (sort?.field === attribute) { + return true; + } + + if (!data || !Array.isArray(data)) { + return false; + } + + return data.some((entry: any) => { + if (!entry) return false; + + // Handle nested attributes (e.g., "product_data.name") + if (attribute.includes(".")) { + const parts = attribute.split("."); + let value: any = entry; + for (const part of parts) { + if (value && typeof value === "object") { + value = value[part]; // eslint-disable-line security/detect-object-injection + } else { + value = undefined; + break; + } + } + return value !== null && value !== undefined && value !== ""; + } else { + const value = entry[attribute]; // eslint-disable-line security/detect-object-injection + return value !== null && value !== undefined && value !== ""; + } + }); +} diff --git a/frontend/src/commons/layout/AppBar.tsx b/frontend/src/commons/layout/AppBar.tsx index 5d89c0d90..87c87a4b9 100644 --- a/frontend/src/commons/layout/AppBar.tsx +++ b/frontend/src/commons/layout/AppBar.tsx @@ -20,7 +20,7 @@ const DocumentationMenu = forwardRef(() => { return ( { diff --git a/frontend/src/commons/layout/Logo.tsx b/frontend/src/commons/layout/Logo.tsx index 00b1edc63..71df5f1b4 100644 --- a/frontend/src/commons/layout/Logo.tsx +++ b/frontend/src/commons/layout/Logo.tsx @@ -1,10 +1,10 @@ -import { getSettingTheme } from "../user_settings/functions"; +import { getResolvedSettingTheme } from "../user_settings/functions"; const Logo = () => { - if (getSettingTheme() == "dark") { - return ; + if (getResolvedSettingTheme() == "dark") { + return SecObserve logo; } else { - return ; + return SecObserve logo; } }; diff --git a/frontend/src/commons/layout/Menu.tsx b/frontend/src/commons/layout/Menu.tsx index 8f39d03f3..cf3b08f6c 100644 --- a/frontend/src/commons/layout/Menu.tsx +++ b/frontend/src/commons/layout/Menu.tsx @@ -1,24 +1,28 @@ import ChecklistIcon from "@mui/icons-material/Checklist"; import SecurityIcon from "@mui/icons-material/Security"; import SettingsIcon from "@mui/icons-material/Settings"; -import Box from "@mui/material/Box"; -import { Fragment, useState } from "react"; +import { Badge, Box } from "@mui/material"; +import { useEffect, useState } from "react"; import { DashboardMenuItem, MenuItemLink, MenuProps, useSidebarState } from "react-admin"; import administration from "../../access_control/access_control_administration"; +import periodic_tasks from "../../background_tasks/periodic_tasks"; +import components from "../../core/components"; import observations from "../../core/observations"; import product_groups from "../../core/product_groups"; import products from "../../core/products"; import parsers from "../../import_observations/parsers"; import licenses from "../../licenses/licenses"; +import notifications from "../../notifications"; +import { get_notification_count, update_notification_count } from "../../notifications/notification_count"; import general_rules from "../../rules/general_rules"; import csaf from "../../vex/csaf"; +import cyclonedx from "../../vex/cyclonedx"; import openvex from "../../vex/openvex"; import vex_counters from "../../vex/vex_counters"; import vex_documents from "../../vex/vex_documents"; -import { feature_license_management, feature_vex_enabled } from "../functions"; -import { is_superuser } from "../functions"; -import notifications from "../notifications"; +import { IntervalHooks } from "../IntervalHooks"; +import { feature_license_management, feature_vex_enabled, is_superuser } from "../functions"; import settings from "../settings"; import SubMenu from "./SubMenu"; @@ -31,162 +35,183 @@ const Menu = ({ dense = false }: MenuProps) => { setState((state) => ({ ...state, [menu]: !state[menu] })); // eslint-disable-line security/detect-object-injection }; + IntervalHooks.useDispatch( + () => { + update_notification_count(); + }, + 5 * 60 * 1000 + ); + + useEffect(() => { + update_notification_count(); + }, []); + return ( - - - theme.transitions.create("width", { - easing: theme.transitions.easing.sharp, - duration: theme.transitions.duration.leavingScreen, - }), - }} + + theme.transitions.create("width", { + easing: theme.transitions.easing.sharp, + duration: theme.transitions.duration.leavingScreen, + }), + }} + > + + } + dense={dense} + /> + } + dense={dense} + /> + } + dense={dense} + /> + } + dense={dense} + /> + } + dense={dense} + /> + } + dense={dense} > - - } - dense={dense} - /> - } - dense={dense} - /> - } - dense={dense} - /> - } - dense={dense} - /> - } - dense={dense} - /> - {feature_vex_enabled() && ( - handleToggle("menuVEX")} - isOpen={state.menuVEX} - name="VEX" - icon={} - dense={dense} - > - } - dense={dense} - /> - } - dense={dense} - /> - {is_superuser() && ( - } - dense={dense} - /> - )} - + Notifications + {get_notification_count() && get_notification_count() != "0" && ( + )} + + {feature_vex_enabled() && ( handleToggle("menuSettings")} - isOpen={state.menuSettings} - name="Administration" - icon={} + handleToggle={() => handleToggle("menuVEX")} + isOpen={state.menuVEX} + name="VEX" + icon={} dense={dense} > - {feature_license_management() && ( - } - dense={dense} - /> - )} } + primaryText="CSAF" + leftIcon={} dense={dense} /> - {is_superuser() && ( - } - dense={dense} - /> - )} } + primaryText="CycloneDX" + leftIcon={} dense={dense} /> } + primaryText="OpenVEX" + leftIcon={} dense={dense} /> - {feature_vex_enabled() && ( + {is_superuser() && ( } + primaryText="VEX documents" + leftIcon={} dense={dense} /> )} - - handleToggle("menuSettings")} + isOpen={state.menuSettings} + name="Administration" + icon={} + dense={dense} > - - - - - + {feature_license_management() && ( + } + dense={dense} + /> + )} + } + dense={dense} + /> + {is_superuser() && ( + } + dense={dense} + /> + )} + {is_superuser() && ( + } + dense={dense} + /> + )} + } + dense={dense} + /> + } + dense={dense} + /> + {feature_vex_enabled() && ( + } + dense={dense} + /> + )} + +
); }; diff --git a/frontend/src/commons/layout/ToggleThemeButton.tsx b/frontend/src/commons/layout/ToggleThemeButton.tsx index bcf2801f9..ba1085a22 100644 --- a/frontend/src/commons/layout/ToggleThemeButton.tsx +++ b/frontend/src/commons/layout/ToggleThemeButton.tsx @@ -1,43 +1,77 @@ import DarkModeIcon from "@mui/icons-material/DarkMode"; import LightModeIcon from "@mui/icons-material/LightMode"; +import SettingsBrightnessIcon from "@mui/icons-material/SettingsBrightness"; import { IconButton, Tooltip } from "@mui/material"; +import { useEffect, useState } from "react"; import { useTheme } from "react-admin"; -import { getSettingTheme, saveSettingTheme } from "../../commons/user_settings/functions"; - -const tooltipTitle = () => { - const theme = getSettingTheme(); - return theme === "dark" ? "Light mode" : "Dark mode"; -}; - -const ThemeIcon = () => { - const theme = getSettingTheme(); - return theme === "dark" ? ( - - ) : ( - - ); -}; +import { + ThemePreference, + getNextTheme, + getSettingTheme, + resolveTheme, + saveSettingTheme, +} from "../../commons/user_settings/functions"; const ToggleThemeButton = () => { const [, setTheme] = useTheme(); + const [preference, setPreference] = useState(getSettingTheme() as ThemePreference); + + useEffect(() => { + const mediaQuery = window.matchMedia("(prefers-color-scheme: dark)"); + const handleChange = () => { + if (preference === "system") { + setTheme(resolveTheme("system")); + } + }; + + mediaQuery.addEventListener("change", handleChange); + return () => mediaQuery.removeEventListener("change", handleChange); + }, [preference, setTheme]); + + const toggleTheme = () => { + const nextTheme = getNextTheme(preference); + const resolvedTheme = resolveTheme(nextTheme); + + localStorage.setItem("theme", nextTheme); + saveSettingTheme(nextTheme); + setPreference(nextTheme); + setTheme(resolvedTheme); + }; + + const tooltipTitle = () => { + switch (preference) { + case "light": + return "Light mode"; + case "dark": + return "Dark mode"; + case "system": + return "System mode"; + default: + return "Light mode"; + } + }; + + const resolvedTheme = resolveTheme(preference); + const isDark = resolvedTheme === "dark"; + const iconColor = isDark ? "rgba(255, 255, 255, 0.7)" : "rgba(0, 0, 0, 0.54)"; - const toogleTheme = () => { - const theme = getSettingTheme(); - if (theme === "dark") { - setTheme("light"); - localStorage.setItem("theme", "light"); - saveSettingTheme("light"); - } else { - setTheme("dark"); - localStorage.setItem("theme", "dark"); - saveSettingTheme("dark"); + const ThemeIcon = () => { + switch (preference) { + case "light": + return ; + case "dark": + return ; + case "system": + return ; + default: + return ; } }; return ( - + diff --git a/frontend/src/commons/layout/themes.ts b/frontend/src/commons/layout/themes.ts index e9e4c2722..d58287a4b 100644 --- a/frontend/src/commons/layout/themes.ts +++ b/frontend/src/commons/layout/themes.ts @@ -53,6 +53,12 @@ export const useStyles = tss.create({ fontWeight: "bold", fontSize: "1rem", }, + tabFont: { + fontSize: "0.875rem", + fontWeight: 500, + lineHeight: 1.25, + letterSpacing: "0.02857em", + }, }); export const darkTheme = { @@ -86,6 +92,13 @@ export const darkTheme = { variant: "outlined" as const, }, }, + RaToolbar: { + styleOverrides: { + root: { + backgroundColor: "#ffffff1f", + }, + }, + }, }, }; diff --git a/frontend/src/commons/notifications/NotificationList.tsx b/frontend/src/commons/notifications/NotificationList.tsx deleted file mode 100644 index d87f5e4a6..000000000 --- a/frontend/src/commons/notifications/NotificationList.tsx +++ /dev/null @@ -1,85 +0,0 @@ -import { Fragment } from "react"; -import { - AutocompleteInput, - DatagridConfigurable, - DateField, - List, - ReferenceInput, - SelectColumnsButton, - TextField, - TextInput, - TopToolbar, -} from "react-admin"; - -import notifications from "."; -import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import ListHeader from "../../commons/layout/ListHeader"; -import { AutocompleteInputMedium } from "../layout/themes"; -import { TYPE_CHOICES } from "../types"; -import { getSettingListSize } from "../user_settings/functions"; -import NotificationBulkDeleteButton from "./NotificationBulkDeleteButton"; - -const listFilters = [ - , - , - , - , - - - , - - - , -]; - -const BulkActionButtons = () => ( - - - -); - -const ListActions = () => ( - - - -); - -const NotificationList = () => { - return ( - - - } - filters={listFilters} - sort={{ field: "created", order: "DESC" }} - disableSyncWithLocation={false} - storeKey="notifications.list" - actions={} - > - } - > - - - - - - - - - - - - ); -}; - -export default NotificationList; diff --git a/frontend/src/commons/notifications/NotificationShow.tsx b/frontend/src/commons/notifications/NotificationShow.tsx deleted file mode 100644 index cb5bb89fd..000000000 --- a/frontend/src/commons/notifications/NotificationShow.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import { Stack, Typography } from "@mui/material"; -import { - DateField, - DeleteWithConfirmButton, - PrevNextButtons, - ReferenceField, - Show, - SimpleShowLayout, - TextField, - TopToolbar, - WithRecord, -} from "react-admin"; - -const ShowActions = () => { - return ( - - - - - - - ); -}; - -const NotificationShow = () => { - return ( - }> - ( - - Notification - - - - {notification && notification.message && } - {notification && notification.function && } - {notification && notification.arguments && } - {notification && notification.product && ( - - )} - {notification && notification.observation && ( - - )} - - - )} - /> - - ); -}; - -export default NotificationShow; diff --git a/frontend/src/commons/ra-data-django-rest-framework/index.ts b/frontend/src/commons/ra-data-django-rest-framework/index.ts index 1b7493208..2983e8862 100644 --- a/frontend/src/commons/ra-data-django-rest-framework/index.ts +++ b/frontend/src/commons/ra-data-django-rest-framework/index.ts @@ -1,12 +1,13 @@ /** * Copyright (c) 2020 Bojan Mihelac under the MIT License * See https://github.com/bmihelac/ra-data-django-rest-framework - * Copied to make it compatible with React-Admin 4 and DefectDojo + * Copied to make it compatible with React-Admin 4 and SecObserve */ import queryString from "query-string"; import { DataProvider, Identifier, fetchUtils } from "react-admin"; -import { get_oidc_id_token, jwt_signed_in, oidc_signed_in } from "../../access_control/auth_provider/authProvider"; +import { jwt_signed_in } from "../../access_control/auth_provider/authProvider"; +import { get_oidc_id_token, oidc_signed_in, updateRefreshToken } from "../../access_control/auth_provider/oidc"; const base_url = window.__RUNTIME_CONFIG__.API_BASE_URL; @@ -59,13 +60,19 @@ function createOptionsFromTokenOIDC() { } } -export function httpClient(url: string, options?: fetchUtils.Options | undefined) { +export async function httpClient(url: string, options?: fetchUtils.Options | undefined) { if (oidc_signed_in()) { - return fetchUtils.fetchJson(url, Object.assign(createOptionsFromTokenOIDC(), options)); + return updateRefreshToken() + .then(() => { + return fetchUtils.fetchJson(url, Object.assign(createOptionsFromTokenOIDC(), options)); + }) + .catch((error: Error) => { + throw error; + }); } else if (jwt_signed_in()) { return fetchUtils.fetchJson(url, Object.assign(createOptionsFromTokenJWT(), options)); } else { - return Promise.reject(); + throw new Error("Not authenticated"); } } @@ -79,7 +86,7 @@ export default (): DataProvider => { return { getList: async (resource, params) => { let api_resource = resource; - if (params.meta && params.meta.api_resource) { + if (params?.meta?.api_resource) { api_resource = params.meta.api_resource; } @@ -108,7 +115,7 @@ export default (): DataProvider => { getMany: (resource, params) => { let api_resource = resource; - if (params.meta && params.meta.api_resource) { + if (params?.meta?.api_resource) { api_resource = params.meta.api_resource; } diff --git a/frontend/src/commons/settings/SettingsEdit.tsx b/frontend/src/commons/settings/SettingsEdit.tsx index b10e1523c..1ee395041 100644 --- a/frontend/src/commons/settings/SettingsEdit.tsx +++ b/frontend/src/commons/settings/SettingsEdit.tsx @@ -1,6 +1,15 @@ -import { Divider, Stack, Typography } from "@mui/material"; +import { Divider, Grid, Stack, Typography } from "@mui/material"; import { Fragment } from "react"; -import { BooleanInput, Edit, FormDataConsumer, NumberInput, SaveButton, SimpleForm, Toolbar } from "react-admin"; +import { + BooleanInput, + Edit, + FormDataConsumer, + NumberInput, + RadioButtonGroupInput, + SaveButton, + SimpleForm, + Toolbar, +} from "react-admin"; import settings from "."; import { @@ -8,10 +17,13 @@ import { validate_0_59, validate_0_999999, validate_1_4096, + validate_1_999999, validate_255, } from "../../commons/custom_validators"; import ListHeader from "../../commons/layout/ListHeader"; -import { TextInputWide } from "../../commons/layout/themes"; +import { TextInputExtraWide, TextInputWide } from "../../commons/layout/themes"; +import { feature_email } from "../functions"; +import { VEX_JUSTIFICATION_TYPE_CHOICES } from "../types"; const CustomToolbar = () => { return ( @@ -23,27 +35,13 @@ const CustomToolbar = () => { const SettingsEdit = () => { const transform = (data: any) => { - if (!data.internal_users) { - data.internal_users = ""; - } - if (!data.branch_housekeeping_exempt_branches) { - data.branch_housekeeping_exempt_branches = ""; - } - if (!data.base_url_frontend) { - data.base_url_frontend = ""; - } - if (!data.email_from) { - data.email_from = ""; - } - if (!data.exception_email_to) { - data.exception_email_to = ""; - } - if (!data.exception_ms_teams_webhook) { - data.exception_ms_teams_webhook = ""; - } - if (!data.exception_slack_webhook) { - data.exception_slack_webhook = ""; - } + data.internal_users ??= ""; + data.branch_housekeeping_exempt_branches ??= ""; + data.base_url_frontend ??= ""; + data.email_from ??= ""; + data.exception_email_to ??= ""; + data.exception_ms_teams_webhook ??= ""; + data.exception_slack_webhook ??= ""; return data; }; @@ -55,25 +53,31 @@ const SettingsEdit = () => { Authentication - - + + + + + + + + { helperText="Comma separated list of email regular expressions to identify internal users" sx={{ marginBottom: 2 }} /> - - - - Background tasks (restart needed) - - - - - Features - - - + + + + + + + + + {({ formData }) => + formData.feature_vex && ( + + ) + } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {({ formData }) => + formData.feature_exploit_information && ( + + ) + } + + + + + @@ -140,48 +194,41 @@ const SettingsEdit = () => { Housekeeping for branches - - - - + - - + + {({ formData }) => + formData.branch_housekeeping_active && ( + + + + + + + + + ) + } + @@ -194,28 +241,32 @@ const SettingsEdit = () => { helperText="Base URL of the frontend, used to set links in notifications correctly" sx={{ marginBottom: 2 }} /> - - - + )} + {feature_email() && ( + + )} + - { {({ formData }) => formData.security_gate_active && ( - - - - - - - - + + + + + + + + + + + + + + + + ) } - Risk acceptance expiry + Password validation for non-OIDC users - - - - - + + + + + + + + + + + + + + + - - Automatic API import + + Background tasks - - - {({ formData }) => - formData.feature_automatic_api_import && ( - - - - - ) - } - - - - Password validation for non-OIDC users + + The settings in this section require a restart of the SecObserve backend to take effect. + - - - - - - License management - - - - {({ formData }) => - formData.feature_license_management && ( - - - - - ) - } - + + + + + + + {({ formData }) => + formData.feature_license_management && ( + + ) + } + + + + + {({ formData }) => + (formData.feature_automatic_api_import || + formData.feature_automatic_osv_scanning) && ( + + ) + } + + + + + + + + + + + {({ formData }) => + formData.feature_license_management && ( + + ) + } + + + + + {({ formData }) => + (formData.feature_automatic_api_import || + formData.feature_automatic_osv_scanning) && ( + + ) + } + + + + diff --git a/frontend/src/commons/settings/SettingsShow.tsx b/frontend/src/commons/settings/SettingsShow.tsx index d20a4b98e..2e36d0232 100644 --- a/frontend/src/commons/settings/SettingsShow.tsx +++ b/frontend/src/commons/settings/SettingsShow.tsx @@ -1,9 +1,10 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import { Box, Grid, Paper, Stack, Typography } from "@mui/material"; import { Fragment } from "react"; import { BooleanField, EditButton, Labeled, NumberField, Show, TextField, TopToolbar, WithRecord } from "react-admin"; import settings from "."; import ListHeader from "../../commons/layout/ListHeader"; +import { feature_email } from "../functions"; import JWTSecretReset from "./JWTSecretReset"; const ShowActions = () => { @@ -26,82 +27,124 @@ const SettingsShowComponent = () => { Authentication - - - - - - - - {settings.internal_users && ( - - + + + + - )} - - - - - - Background tasks (restart needed) - - - - - - - - + + + + - - - - - + + + + {settings.internal_users && ( + + + + )} + {settings.oidc_clock_skew > 0 && ( + + + + )} Features - - - - - - - - - - - + + + + + + + + + + + {settings.feature_vex && ( + + + + )} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {settings.feature_exploit_information && ( + + + + )} + + + + + + Housekeeping for branches - - - - - - - - - - - - - - - - {settings.branch_housekeeping_exempt_branches && ( - - - - )} - + + + + {settings.branch_housekeeping_active && ( + + + + + + + + {settings.branch_housekeeping_exempt_branches && ( + + + + )} + + + )} @@ -114,12 +157,12 @@ const SettingsShowComponent = () => { )} - {settings.email_from && ( + {feature_email() && settings.email_from && ( )} - {settings.exception_email_to && ( + {feature_email() && settings.exception_email_to && ( @@ -144,114 +187,132 @@ const SettingsShowComponent = () => { Security gates - - - - - {settings.security_gate_active && ( - - - - - - - - - - - - - - - - - - - - - )} - + + + + {settings.security_gate_active && ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + )} - Risk acceptance expiry + Password validation for non-OIDC users - - - - - - - - - - - - - - - - - Automatic API import - - - - - - {settings.feature_automatic_api_import && ( - - - + + + + + - - + + - )} - + + + + + + + + + + + + - Password validation for non-OIDC users + Background tasks - - - - - - - - - - - - - - - - - - License management - - - - - - {settings.feature_license_management && ( - - - + + + + + + + + + - - + {settings.feature_license_management && ( + + + + )} + + + + + + + {(settings.feature_automatic_api_import || + settings.feature_automatic_osv_scanning) && ( + + + + )} + + - )} - + + + + + + + {settings.feature_license_management && ( + + + + )} + + + + + + + {(settings.feature_automatic_api_import || + settings.feature_automatic_osv_scanning) && ( + + + + )} + + + )} diff --git a/frontend/src/commons/types.ts b/frontend/src/commons/types.ts index d1a0b3845..316150b73 100644 --- a/frontend/src/commons/types.ts +++ b/frontend/src/commons/types.ts @@ -1,23 +1,3 @@ -import { Identifier, RaRecord } from "react-admin"; - -export const TYPE_CHOICES = [ - { id: "Exception", name: "Exception" }, - { id: "Security gate", name: "Security gate" }, - { id: "Task", name: "Task" }, -]; - -export interface Notification extends RaRecord { - id: Identifier; - type: string; - name: string; - created: Date; - message: string; - user: Identifier; - observation: Identifier; - function: string; - arguments: string; -} - export type ThemeName = "light" | "dark"; declare global { @@ -25,3 +5,23 @@ declare global { restServer: any; } } + +export const VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX = "CSAF/OpenVEX"; +export const VEX_JUSTIFICATION_TYPE_CYCLONEDX = "CycloneDX"; + +export const VEX_JUSTIFICATION_TYPE_CHOICES = [ + { id: VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX, name: VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX }, + { id: VEX_JUSTIFICATION_TYPE_CYCLONEDX, name: VEX_JUSTIFICATION_TYPE_CYCLONEDX }, +]; + +export const METRICS_TIMESPAN_7_DAYS = "Week"; +export const METRICS_TIMESPAN_30_DAYS = "Month"; +export const METRICS_TIMESPAN_90_DAYS = "Quarter"; +export const METRICS_TIMESPAN_365_DAYS = "Year"; + +export const METRICS_TIMESPAN_CHOICES = [ + { id: METRICS_TIMESPAN_7_DAYS, name: METRICS_TIMESPAN_7_DAYS }, + { id: METRICS_TIMESPAN_30_DAYS, name: METRICS_TIMESPAN_30_DAYS }, + { id: METRICS_TIMESPAN_90_DAYS, name: METRICS_TIMESPAN_90_DAYS }, + { id: METRICS_TIMESPAN_365_DAYS, name: METRICS_TIMESPAN_365_DAYS }, +]; diff --git a/frontend/src/commons/user_settings/UserSettings.tsx b/frontend/src/commons/user_settings/UserSettings.tsx index be4208321..50aa4dd1a 100644 --- a/frontend/src/commons/user_settings/UserSettings.tsx +++ b/frontend/src/commons/user_settings/UserSettings.tsx @@ -9,23 +9,60 @@ import { Stack, Typography, } from "@mui/material"; +import { useEffect } from "react"; import { Title, useTheme } from "react-admin"; -import { getSettingListSize, getSettingTheme, saveSettingListSize, saveSettingTheme } from "./functions"; +import { + METRICS_TIMESPAN_7_DAYS, + METRICS_TIMESPAN_30_DAYS, + METRICS_TIMESPAN_90_DAYS, + METRICS_TIMESPAN_365_DAYS, +} from "../types"; +import { + ThemePreference, + getSettingListSize, + getSettingMetricsTimespan, + getSettingPackageInfoPreference, + getSettingTheme, + resolveTheme, + saveSettingListSize, + saveSettingPackageInfoPreference, + saveSettingTheme, + saveSettingsMetricsTimespan, +} from "./functions"; const UserSettings = () => { const [, setTheme] = useTheme(); + useEffect(() => { + const mediaQuery = window.matchMedia("(prefers-color-scheme: dark)"); + const handleChange = () => { + const currentPreference = getSettingTheme() as ThemePreference; + if (currentPreference === "system") { + setTheme(resolveTheme("system")); + } + }; + + mediaQuery.addEventListener("change", handleChange); + return () => mediaQuery.removeEventListener("change", handleChange); + }, [setTheme]); + function setLightTheme() { - setTheme("light"); localStorage.setItem("theme", "light"); saveSettingTheme("light"); + setTheme("light"); } function setDarkTheme() { - setTheme("dark"); localStorage.setItem("theme", "dark"); saveSettingTheme("dark"); + setTheme("dark"); + } + + function setSystemTheme() { + localStorage.setItem("theme", "system"); + saveSettingTheme("system"); + setTheme(resolveTheme("system")); } return ( @@ -49,6 +86,12 @@ const UserSettings = () => { label="Dark" onClick={() => setDarkTheme()} /> + } + label="System" + onClick={() => setSystemTheme()} + /> @@ -71,6 +114,62 @@ const UserSettings = () => { /> + + + Package information preference + + + + } + label="open/source/insights" + onClick={() => saveSettingPackageInfoPreference("open/source/insights")} + /> + } + label="ecosyste.ms" + onClick={() => saveSettingPackageInfoPreference("ecosyste.ms")} + /> + + + + + Metrics Timespan (days) + + + + } + label="7" + onClick={() => saveSettingsMetricsTimespan(METRICS_TIMESPAN_7_DAYS)} + /> + } + label="30" + onClick={() => saveSettingsMetricsTimespan(METRICS_TIMESPAN_30_DAYS)} + /> + } + label="90" + onClick={() => saveSettingsMetricsTimespan(METRICS_TIMESPAN_90_DAYS)} + /> + } + label="365" + onClick={() => saveSettingsMetricsTimespan(METRICS_TIMESPAN_365_DAYS)} + /> + + diff --git a/frontend/src/commons/user_settings/functions.ts b/frontend/src/commons/user_settings/functions.ts index 9fc2474fe..9abb8356c 100644 --- a/frontend/src/commons/user_settings/functions.ts +++ b/frontend/src/commons/user_settings/functions.ts @@ -1,6 +1,51 @@ import { ThemeType } from "react-admin"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; +import { + METRICS_TIMESPAN_7_DAYS, + METRICS_TIMESPAN_30_DAYS, + METRICS_TIMESPAN_90_DAYS, + METRICS_TIMESPAN_365_DAYS, +} from "../types"; + +export type ThemePreference = "light" | "dark" | "system"; + +export function getSystemPrefersDark(): boolean { + return window.matchMedia("(prefers-color-scheme: dark)").matches; +} + +export function resolveTheme(preference: ThemePreference): ThemeType { + if (preference === "system") { + return getSystemPrefersDark() ? "dark" : "light"; + } + return preference; +} + +export function castThemePreference(theme: string): ThemePreference { + switch (theme) { + case "light": + return "light"; + case "dark": + return "dark"; + case "system": + return "system"; + default: + return "light"; + } +} + +export function getNextTheme(current: ThemePreference): ThemePreference { + switch (current) { + case "light": + return "dark"; + case "dark": + return "system"; + case "system": + return "light"; + default: + return "light"; + } +} export async function saveSettingTheme(theme: string) { const user = JSON.parse(localStorage.getItem("user") || "{}"); @@ -15,60 +60,90 @@ export function getSettingTheme(): string { const user = localStorage.getItem("user"); if (user) { const user_json = JSON.parse(user); - theme = user_json.setting_theme; + theme = user_json.setting_theme ?? theme; } else if (storage_theme) { theme = storage_theme; } - return theme; } +export function getResolvedSettingTheme(): ThemeType { + const user_theme = getSettingTheme(); + return resolveTheme(castThemePreference(user_theme)); +} + export function saveSettingListSize(list_size: string) { saveSetting({ setting_list_size: list_size }); } -export function getSettingListSize(): "small" | "medium" | undefined { - let list_size: "small" | "medium" | undefined = "medium"; +type ListSize = "small" | "medium" | undefined; + +export function getSettingListSize(): ListSize { + let list_size: ListSize = "medium"; const user = localStorage.getItem("user"); if (user) { const user_json = JSON.parse(user); - list_size = user_json.setting_list_size as "small" | "medium" | undefined; + list_size = user_json.setting_list_size as ListSize; } return list_size; } -export function getTheme(): ThemeType { - const setting_theme = getSettingTheme(); - if (setting_theme == "dark") { - return "dark"; - } else { - return "light"; +export function saveSettingPackageInfoPreference(package_info_preference: string) { + saveSetting({ setting_package_info_preference: package_info_preference }); +} + +type PackageInfoPreference = "open/source/insights" | "ecosyste.ms" | undefined; + +export function getSettingPackageInfoPreference(): PackageInfoPreference { + let package_info_preference: PackageInfoPreference = "open/source/insights"; + + const user = localStorage.getItem("user"); + if (user) { + const user_json = JSON.parse(user); + package_info_preference = user_json.setting_package_info_preference as PackageInfoPreference; } + + return package_info_preference; +} + +export function getTheme(): ThemeType { + const setting_theme = getSettingTheme() as ThemePreference; + return resolveTheme(setting_theme); } -export async function saveSettingListProperties() { - const list_settings: { key: string; value: string | null }[] = []; - Object.keys(localStorage).forEach(function (ls_key) { - if (ls_key.startsWith("RaStore.preferences")) { - list_settings.push({ key: ls_key, value: localStorage.getItem(ls_key) }); - } - }); - const list_setting_string = JSON.stringify(list_settings); - saveSetting({ setting_list_properties: list_setting_string }); +export async function saveSettingsMetricsTimespan(setting_metrics_timespan: string) { + const user = JSON.parse(localStorage.getItem("user") || "{}"); + user.setting_metrics_timespan = setting_metrics_timespan; + localStorage.setItem("user", JSON.stringify(user)); + saveSetting({ setting_metrics_timespan: setting_metrics_timespan }); +} + +export function getSettingMetricsTimespan(): string { + let setting_metrics_timespan = METRICS_TIMESPAN_7_DAYS; + const user = localStorage.getItem("user"); + if (user) { + const user_json = JSON.parse(user); + setting_metrics_timespan = user_json.setting_metrics_timespan; + } + + return setting_metrics_timespan; } -export function setListProperties(setting_list_properties: string) { - if (setting_list_properties == null || setting_list_properties == "") { - return; +export function getSettingsMetricsTimespanInDays(): number { + switch (getSettingMetricsTimespan()) { + case METRICS_TIMESPAN_7_DAYS: + return 7; + case METRICS_TIMESPAN_30_DAYS: + return 30; + case METRICS_TIMESPAN_90_DAYS: + return 90; + case METRICS_TIMESPAN_365_DAYS: + return 365; + default: + return 7; // Default to Week } - const list_settings = JSON.parse(setting_list_properties); - list_settings.forEach(function (ls: { key: string; value: string | null }) { - if (ls.value != null) { - localStorage.setItem(ls.key, ls.value); - } - }); } function saveSetting(setting: any) { diff --git a/frontend/src/core/branches/BranchCreate.tsx b/frontend/src/core/branches/BranchCreate.tsx index 75a52fafa..1b0382168 100644 --- a/frontend/src/core/branches/BranchCreate.tsx +++ b/frontend/src/core/branches/BranchCreate.tsx @@ -1,18 +1,18 @@ -import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Dialog, DialogContent, DialogTitle, Stack } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, CreateBase, SaveButton, SimpleForm, useCreate, useNotify, useRefresh } from "react-admin"; +import { BooleanInput, CreateBase, SimpleForm, useCreate, useNotify, useRefresh } from "react-admin"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import OSVLinuxDistributionInput from "../../commons/custom_fields/OSVLinuxDistributionInput"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_required_255 } from "../../commons/custom_validators"; import { TextInputWide } from "../../commons/layout/themes"; export type BranchCreateProps = { - id: any; + product: any; }; -const BranchCreate = ({ id }: BranchCreateProps) => { +const BranchCreate = ({ product }: BranchCreateProps) => { const [open, setOpen] = useState(false); const refresh = useRefresh(); const notify = useNotify(); @@ -24,15 +24,22 @@ const BranchCreate = ({ id }: BranchCreateProps) => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - const create_branch = (data: any) => { - data.product = id; + data.product = product.id; + + if (!data.purl) { + data.purl = ""; + } + if (!data.cpe23) { + data.cpe23 = ""; + } + if (!data.osv_linux_distribution) { + data.osv_linux_distribution = ""; + } + if (!data.osv_linux_release) { + data.osv_linux_release = ""; + } + create( "branches", { data: data }, @@ -40,24 +47,29 @@ const BranchCreate = ({ id }: BranchCreateProps) => { onSuccess: () => { refresh(); notify("Branch / version added", { type: "success" }); + setOpen(false); }, onError: (error: any) => { notify(error.message, { type: "warning" }); }, } ); - setOpen(false); }; return ( - + Add branch / version - }> + }> + { label="Protect from housekeeping" defaultValue={false} /> + {product?.osv_enabled && ( + + + + )} diff --git a/frontend/src/core/branches/BranchEdit.tsx b/frontend/src/core/branches/BranchEdit.tsx index d16fdc62e..95851c4dc 100644 --- a/frontend/src/core/branches/BranchEdit.tsx +++ b/frontend/src/core/branches/BranchEdit.tsx @@ -1,14 +1,18 @@ -import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Dialog, DialogContent, DialogTitle, Stack } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { BooleanInput, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import OSVLinuxDistributionInput from "../../commons/custom_fields/OSVLinuxDistributionInput"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_required_255 } from "../../commons/custom_validators"; import { TextInputWide } from "../../commons/layout/themes"; -const BranchEdit = () => { +export type BranchEditProps = { + product: any; +}; + +const BranchEdit = ({ product }: BranchEditProps) => { const [open, setOpen] = useState(false); const [update] = useUpdate(); const refresh = useRefresh(); @@ -20,11 +24,27 @@ const BranchEdit = () => { setOpen(false); }; const branch_update = async (data: any) => { + if (!data.purl) { + data.purl = ""; + } + if (!data.cpe23) { + data.cpe23 = ""; + } + if (!data.osv_linux_distribution) { + data.osv_linux_distribution = ""; + } + if (!data.osv_linux_release) { + data.osv_linux_release = ""; + } + const patch = { name: data.name, + is_default_branch: data.is_default_branch, housekeeping_protect: data.housekeeping_protect, purl: data.purl, cpe23: data.cpe23, + osv_linux_distribution: data.osv_linux_distribution, + osv_linux_release: data.osv_linux_release, }; update( @@ -39,6 +59,7 @@ const BranchEdit = () => { notify("Branch / version updated", { type: "success", }); + setOpen(false); }, onError: (error: any) => { notify(error.message, { @@ -47,26 +68,25 @@ const BranchEdit = () => { }, } ); - setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); return ( - + Edit branch / version - }> + }> + + {product?.osv_enabled && ( + + + + )} diff --git a/frontend/src/core/branches/BranchEmbeddedList.tsx b/frontend/src/core/branches/BranchEmbeddedList.tsx index 09e4f3979..986e495d5 100644 --- a/frontend/src/core/branches/BranchEmbeddedList.tsx +++ b/frontend/src/core/branches/BranchEmbeddedList.tsx @@ -3,22 +3,36 @@ import { BooleanField, Datagrid, DateField, + FieldProps, ListContextProvider, ResourceContextProvider, TextField, + WithListContext, WithRecord, useListController, + useRecordContext, } from "react-admin"; -import { PERMISSION_BRANCH_DELETE, PERMISSION_BRANCH_EDIT } from "../../access_control/types"; +import { PERMISSION_BRANCH_DELETE, PERMISSION_BRANCH_EDIT, PERMISSION_PRODUCT_EDIT } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import LicensesCountField from "../../commons/custom_fields/LicensesCountField"; +import OSVLinuxDistributionField from "../../commons/custom_fields/OSVLinuxDistributionField"; import ObservationsCountField from "../../commons/custom_fields/ObservationsCountField"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; -import { feature_license_management } from "../../commons/functions"; +import { feature_license_management, has_attribute } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import BranchDelete from "./BranchDelete"; import BranchEdit from "./BranchEdit"; +import DefaultBranch from "./DefaultBranch"; + +const BranchNameURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? : null; +}; + +function get_observations_url(product_id: number, branch_id: number): string { + return `#/products/${product_id}/show?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A["Open"%2C"Affected"%2C"In review"]%2C%22branch%22%3A${branch_id}%7D&order=ASC&sort=current_severity`; +} type BranchEmbeddedListProps = { product: any; @@ -38,55 +52,65 @@ const BranchEmbeddedList = ({ product }: BranchEmbeddedListProps) => { return
Loading...
; } - function get_observations_url(product_id: number, branch_id: number): string { - return `#/products/${product_id}/show?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22branch%22%3A${branch_id}%7D&order=ASC&sort=current_severity`; - } - return (
- - ( - ( + + + + {has_attribute("purl", data, sort) && } + {has_attribute("cpe23", data, sort) && } + + !branch.is_default_branch && + } + /> + {product?.has_branch_osv_linux_distribution && ( + ( + + )} + /> + )} + + {feature_license_management() && product?.has_licenses && ( + + )} + + ( + + {product?.permissions.includes(PERMISSION_BRANCH_EDIT) && ( + + )} + {product?.permissions.includes(PERMISSION_PRODUCT_EDIT) && + !branch.is_default_branch && } + {product?.permissions.includes(PERMISSION_BRANCH_DELETE) && + !branch.is_default_branch && } + + )} /> - )} - /> - - - - - {feature_license_management() && product.has_licenses && ( - + )} - - - !branch.is_default_branch && - } - /> - ( - - {product && product.permissions.includes(PERMISSION_BRANCH_EDIT) && } - {product && - product.permissions.includes(PERMISSION_BRANCH_DELETE) && - !branch.is_default_branch && } - - )} - /> - + />
diff --git a/frontend/src/core/branches/DefaultBranch.tsx b/frontend/src/core/branches/DefaultBranch.tsx new file mode 100644 index 000000000..81525a170 --- /dev/null +++ b/frontend/src/core/branches/DefaultBranch.tsx @@ -0,0 +1,58 @@ +import VisibilityIcon from "@mui/icons-material/Visibility"; +import { useState } from "react"; +import { Confirm, useNotify, useRefresh } from "react-admin"; + +import EditButton from "../../commons/custom_fields/EditButton"; +import { httpClient } from "../../commons/ra-data-django-rest-framework"; + +type DefaultBranchProps = { + branch: any; +}; + +const DefaultBranch = ({ branch }: DefaultBranchProps) => { + const notify = useNotify(); + const refresh = useRefresh(); + + const [open, setOpen] = useState(false); + const handleOpen = () => setOpen(true); + const handleClose = () => setOpen(false); + + const setDefaultBranch = async () => { + const patch = { + is_default_branch: true, + }; + const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/branches/" + branch.id + "/"; + httpClient(url, { + method: "PATCH", + body: JSON.stringify(patch), + }) + .then(() => { + refresh(); + notify("Default branch / version set", { + type: "success", + }); + }) + .catch((error) => { + notify(error.message, { + type: "warning", + }); + }); + + setOpen(false); + }; + + return ( + <> + } /> + + + ); +}; + +export default DefaultBranch; diff --git a/frontend/src/core/branches/ShowDefaultBranchObservationsButton.tsx b/frontend/src/core/branches/ShowDefaultBranchObservationsButton.tsx index bf271204d..1a1dab766 100644 --- a/frontend/src/core/branches/ShowDefaultBranchObservationsButton.tsx +++ b/frontend/src/core/branches/ShowDefaultBranchObservationsButton.tsx @@ -14,7 +14,7 @@ const ShowDefaultBranchObservationsButton = ({ product }: ShowDefaultBranchObser const navigate = useNavigate(); function get_observations_url(branch_id: Identifier): string { - return `?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22branch%22%3A${branch_id}%7D&order=ASC&sort=current_severity`; + return `?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A["Open"%2C"Affected"%2C"In review"]%2C%22branch%22%3A${branch_id}%7D&order=ASC&sort=current_severity`; } const navigateToObservations = () => { diff --git a/frontend/src/core/components/ComponentList.tsx b/frontend/src/core/components/ComponentList.tsx new file mode 100644 index 000000000..3df5ef237 --- /dev/null +++ b/frontend/src/core/components/ComponentList.tsx @@ -0,0 +1,84 @@ +import { Fragment } from "react"; +import { + AutocompleteInput, + BooleanField, + Datagrid, + List, + NullableBooleanInput, + ReferenceInput, + TextField, + TextInput, + WithListContext, +} from "react-admin"; + +import components from "."; +import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { has_attribute } from "../../commons/functions"; +import ListHeader from "../../commons/layout/ListHeader"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; +import { getSettingListSize } from "../../commons/user_settings/functions"; +import { PURL_TYPE_CHOICES } from "../types"; + +const listFilters = [ + , + , + + + , + + + , + , + , + , +]; + +const ComponentList = () => { + return ( + + + } + filters={listFilters} + sort={{ field: "component_name_version_type", order: "ASC" }} + disableSyncWithLocation={false} + actions={false} + storeKey="components.list" + > + ( + + + + {has_attribute("product_group_name", data, sort) && ( + + )} + {has_attribute("branch_name", data, sort) && ( + + )} + {has_attribute("origin_service_name", data, sort) && ( + + )} + + + )} + /> + + + ); +}; + +export default ComponentList; diff --git a/frontend/src/core/components/ComponentShow.tsx b/frontend/src/core/components/ComponentShow.tsx new file mode 100644 index 000000000..1aaaf6b5a --- /dev/null +++ b/frontend/src/core/components/ComponentShow.tsx @@ -0,0 +1,49 @@ +import { Box, Paper, Typography } from "@mui/material"; +import { Fragment } from "react"; +import { PrevNextButtons, Show, TopToolbar, WithRecord } from "react-admin"; + +import ObservationsComponentList from "../observations/ObservationComponentList"; +import ComponentShowAside from "./ComponentShowAside"; +import ComponentShowComponent from "./ComponentShowComponent"; + +const ShowActions = () => { + return ( + + + + ); +}; + +export const ComponentComponent = () => { + return ( + ( + + + + + {component?.has_observations && ( + + Observations + + + )} + + )} + /> + ); +}; +const ComponentShow = () => { + return ( + } component={ComponentComponent} aside={}> + + + ); +}; + +export default ComponentShow; diff --git a/frontend/src/core/components/ComponentShowAside.tsx b/frontend/src/core/components/ComponentShowAside.tsx new file mode 100644 index 000000000..9964c4628 --- /dev/null +++ b/frontend/src/core/components/ComponentShowAside.tsx @@ -0,0 +1,101 @@ +import { Box, Paper, Stack, Typography } from "@mui/material"; +import { useEffect, useState } from "react"; +import { Identifier, Labeled, ReferenceField, TextField, WithRecord, useNotify, useRecordContext } from "react-admin"; + +import { httpClient } from "../../commons/ra-data-django-rest-framework"; +import LicenseComponentShowLicense from "../../licenses/license_components/LicenseComponentShowLicense"; + +const ComponentShowAside = () => { + const [licenseComponent, setLicenseComponent] = useState(undefined); + const notify = useNotify(); + const component = useRecordContext(); + + useEffect(() => { + if (component) { + get_data(component.id); + } + }, [component]); // eslint-disable-line react-hooks/exhaustive-deps + + function get_data(component_id: Identifier) { + httpClient( + window.__RUNTIME_CONFIG__.API_BASE_URL + "/license_components/for_component/?component=" + component_id, + { + method: "GET", + } + ) + .then((result) => { + if (result.status === 200) { + setLicenseComponent(result.json); + } else { + setLicenseComponent(undefined); + } + }) + .catch((error) => { + setLicenseComponent(undefined); + if (error !== undefined) { + notify(error.message, { + type: "warning", + }); + } else { + notify("Error while loading License Component", { + type: "warning", + }); + } + }); + } + + return ( + + + {licenseComponent && } + + ); +}; + +const MetaData = () => { + return ( + ( + + + Metadata + + + + + + {component.branch_name && ( + + + + )} + {component.origin_service_name && ( + + + + )} + + + )} + /> + ); +}; + +type LicenseProps = { + licenseComponent?: any; +}; + +const License = ({ licenseComponent }: LicenseProps) => { + return ( + + + + ); +}; + +export default ComponentShowAside; diff --git a/frontend/src/core/components/ComponentShowComponent.tsx b/frontend/src/core/components/ComponentShowComponent.tsx new file mode 100644 index 000000000..7d468cd97 --- /dev/null +++ b/frontend/src/core/components/ComponentShowComponent.tsx @@ -0,0 +1,75 @@ +import { Stack, Typography } from "@mui/material"; +import { Labeled, RecordContextProvider, TextField } from "react-admin"; + +import components from "."; +import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { feature_vex_enabled, get_component_purl_url } from "../../commons/functions"; +import { useStyles } from "../../commons/layout/themes"; +import MermaidDependencies from "../observations/Mermaid_Dependencies"; + +type ComponentShowComponentProps = { + component: any; + icon: boolean; +}; + +const ComponentShowComponent = ({ component, icon }: ComponentShowComponentProps) => { + const { classes } = useStyles(); + + return ( + + {component && ( + + {icon && ( + + +   Component + + )} + {!icon && Component} + + {component.component_name != "" && ( + + + + )} + {component.component_version != "" && ( + + + + )} + + {component.component_purl !== "" && get_component_purl_url(component.component_purl) === null && ( + + + + )} + {component.component_purl !== "" && get_component_purl_url(component.component_purl) !== null && ( + + + + )} + {component.component_cpe != "" && ( + + + + )} + {feature_vex_enabled() && component.component_cyclonedx_bom_link != "" && ( + + + + )} + {component.component_dependencies && component.component_dependencies != "" && ( + + )} + + )} + + ); +}; + +export default ComponentShowComponent; diff --git a/frontend/src/core/components/index.ts b/frontend/src/core/components/index.ts new file mode 100644 index 000000000..500793b22 --- /dev/null +++ b/frontend/src/core/components/index.ts @@ -0,0 +1,10 @@ +import ComponentsIcon from "@mui/icons-material/Extension"; + +import ComponentList from "./ComponentList"; +import ComponentShow from "./ComponentShow"; + +export default { + list: ComponentList, + show: ComponentShow, + icon: ComponentsIcon, +}; diff --git a/frontend/src/core/evidences/EvidenceShow.tsx b/frontend/src/core/evidences/EvidenceShow.tsx index 4f0910115..32e2f8a07 100644 --- a/frontend/src/core/evidences/EvidenceShow.tsx +++ b/frontend/src/core/evidences/EvidenceShow.tsx @@ -12,8 +12,9 @@ import { useRecordContext, } from "react-admin"; +import evidences from "."; import { useStyles } from "../../commons/layout/themes"; -import { getSettingTheme } from "../../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../../commons/user_settings/functions"; const ShowActions = () => { const evidence = useRecordContext(); @@ -35,8 +36,9 @@ const EvidenceShow = () => { return ( }> - - Evidence + + +   Evidence { collapseStringsAfterLength={false} enableClipboard={false} className={classes.displayFontSize} - theme={getSettingTheme() as JsonViewerTheme} + theme={getResolvedSettingTheme() as JsonViewerTheme} sx={{ padding: 1 }} /> diff --git a/frontend/src/core/functions.ts b/frontend/src/core/functions.ts new file mode 100644 index 000000000..ea2ed1ff1 --- /dev/null +++ b/frontend/src/core/functions.ts @@ -0,0 +1,38 @@ +export const transform_product_group_and_product = (data: any, description: string) => { + data.description = description; + + if (data.repository_branch_housekeeping_active) { + data.repository_branch_housekeeping_keep_inactive_days ||= 1; + } else { + data.repository_branch_housekeeping_keep_inactive_days ||= null; + } + data.repository_branch_housekeeping_exempt_branches ??= ""; + + data.notification_email_to ??= ""; + data.notification_ms_teams_webhook ??= ""; + data.notification_slack_webhook ??= ""; + + if (data.security_gate_active) { + data.security_gate_threshold_critical ||= 0; + data.security_gate_threshold_high ||= 0; + data.security_gate_threshold_medium ||= 0; + data.security_gate_threshold_low ||= 0; + data.security_gate_threshold_none ||= 0; + data.security_gate_threshold_unknown ||= 0; + } else { + data.security_gate_threshold_critical ||= null; + data.security_gate_threshold_high ||= null; + data.security_gate_threshold_medium ||= null; + data.security_gate_threshold_low ||= null; + data.security_gate_threshold_none ||= null; + data.security_gate_threshold_unknown ||= null; + } + + if (data.risk_acceptance_expiry_active) { + data.risk_acceptance_expiry_days ||= 30; + } else { + data.risk_acceptance_expiry_days ||= null; + } + + return data; +}; diff --git a/frontend/src/core/observation_logs/AssessmentApproval.tsx b/frontend/src/core/observation_logs/AssessmentApproval.tsx index 138af7d86..ce94a2681 100644 --- a/frontend/src/core/observation_logs/AssessmentApproval.tsx +++ b/frontend/src/core/observation_logs/AssessmentApproval.tsx @@ -1,11 +1,10 @@ import ApprovalIcon from "@mui/icons-material/Approval"; import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { SimpleForm, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import SmallButton from "../../commons/custom_fields/SmallButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_required, validate_required_255 } from "../../commons/custom_validators"; import { AutocompleteInputMedium, TextInputWide } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; @@ -55,12 +54,6 @@ const AssessmentApproval = (props: AssessmentApprovalProps) => { const handleCancel = () => setOpen(false); const handleOpen = () => setOpen(true); - const CustomToolbar = () => ( - - - - - ); return ( } /> @@ -70,7 +63,7 @@ const AssessmentApproval = (props: AssessmentApprovalProps) => {   Assessment approval - }> + }> { +type AssessmentBulkApprovalProps = { + storeKey: string; +}; + +const AssessmentBulkApproval = ({ storeKey }: AssessmentBulkApprovalProps) => { const [open, setOpen] = useState(false); const refresh = useRefresh(); const notify = useNotify(); const { selectedIds } = useListContext(); - const unselectAll = useUnselectAll("observation_logs"); + const unselectAll = useUnselectAll("observation_logs", storeKey); const [loading, setLoading] = useState(false); const assessmentUpdate = async (data: any) => { @@ -58,12 +61,6 @@ const AssessmentBulkApproval = () => { const handleCancel = () => setOpen(false); const handleOpen = () => setOpen(true); - const CustomToolbar = () => ( - - - - - ); return ( } /> @@ -73,7 +70,7 @@ const AssessmentBulkApproval = () => {   Assessment approval - }> + }> { + const [open, setOpen] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const { selectedIds } = useListContext(); + const unselectAll = useUnselectAll("observation_logs", storeKey); + const [loading, setLoading] = useState(false); + + const assessmentDelete = async () => { + setLoading(true); + const deleteBody = { + observation_logs: selectedIds, + }; + + httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/observation_logs/bulk_delete/", { + method: "DELETE", + body: JSON.stringify(deleteBody), + }) + .then((response) => { + refresh(); + setOpen(false); + setLoading(false); + unselectAll(); + const count = response.json.count; + notify(`${count} assessment${count === 1 ? "" : "s"} deleted`, { + type: "success", + }); + }) + .catch((error) => { + refresh(); + setOpen(false); + setLoading(false); + unselectAll(); + notify(error.message, { + type: "warning", + }); + }); + }; + + const handleCancel = () => setOpen(false); + const handleOpen = () => setOpen(true); + + return ( + + + + {loading ? ( + theme.zIndex.drawer + 1 }} open={open}> + + + ) : null} + + ); +}; + +export default AssessmentDeleteApproval; diff --git a/frontend/src/core/observation_logs/ObservationLogApprovalList.tsx b/frontend/src/core/observation_logs/ObservationLogApprovalList.tsx index efeb964d1..8ef3b53a7 100644 --- a/frontend/src/core/observation_logs/ObservationLogApprovalList.tsx +++ b/frontend/src/core/observation_logs/ObservationLogApprovalList.tsx @@ -1,5 +1,8 @@ +import { Stack } from "@mui/material"; +import { Fragment } from "react"; import { AutocompleteInput, + ChipField, Datagrid, DateField, FilterForm, @@ -9,25 +12,34 @@ import { ResourceContextProvider, TextField, TextInput, + WithListContext, useListController, } from "react-admin"; -import { Fragment } from "react/jsx-runtime"; import { PERMISSION_OBSERVATION_LOG_APPROVAL } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import { feature_vex_enabled } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide } from "../../commons/layout/themes"; +import { SeverityField } from "../../commons/custom_fields/SeverityField"; +import { feature_vex_enabled, has_attribute } from "../../commons/functions"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; -import { ASSESSMENT_STATUS_NEEDS_APPROVAL } from "../types"; -import { OBSERVATION_SEVERITY_CHOICES, OBSERVATION_STATUS_CHOICES } from "../types"; +import { ASSESSMENT_STATUS_NEEDS_APPROVAL, OBSERVATION_SEVERITY_CHOICES, OBSERVATION_STATUS_CHOICES } from "../types"; import AssessmentBulkApproval from "./AssessmentBulkApproval"; +import AssessmentDeleteApproval from "./AssessmentDeleteApproval"; import { commentShortened } from "./functions"; -const BulkActionButtons = ({ product }: any) => { +type BulkActionButtonsProps = { + product: any; + storeKey: string; +}; + +const BulkActionButtons = ({ product, storeKey }: BulkActionButtonsProps) => { return ( - {(!product || (product && product.permissions.includes(PERMISSION_OBSERVATION_LOG_APPROVAL))) && ( - + {(!product || product?.permissions.includes(PERMISSION_OBSERVATION_LOG_APPROVAL)) && ( + + + + )} ); @@ -35,6 +47,9 @@ const BulkActionButtons = ({ product }: any) => { function listFilters(product: any) { const filters = []; + + filters.push(); + if (!product) { filters.push( - - ); - } - if (!product) { - filters.push( + , - + , + , + ); } - if (!product) { + + if (product?.has_branches) { filters.push( - + ); } - - if (product && product.has_branches) { + if (product?.has_services) { filters.push( - + ); } - filters.push(); - - if (!product || (product && product.has_component)) { + if (!product || product?.has_component) { filters.push(); } - if (!product || (product && product.has_docker_image)) { + if (!product || product?.has_docker_image) { filters.push(); } - if (!product || (product && product.has_endpoint)) { + if (!product || product?.has_endpoint) { filters.push(); } - if (!product || (product && product.has_source)) { + if (!product || product?.has_source) { filters.push(); } - if (!product || (product && product.has_cloud_resource)) { + if (!product || product?.has_cloud_resource) { filters.push(); } - if (!product || (product && product.has_kubernetes_resource)) { + if (!product || product?.has_kubernetes_resource) { filters.push(); } @@ -166,87 +178,99 @@ const ObservationLogApprovalList = ({ product }: ObservationLogApprovalListProps
- - ) - } - rowClick={ShowObservationLogs} - resource="observation_logs" - > - {!product && } - {!product && ( - - )} - {(!product || (product && product.has_branches)) && ( - - )} - - {(!product || (product && product.has_component)) && ( - - )} - {(!product || (product && product.has_docker_image)) && ( - - )} - {(!product || (product && product.has_endpoint)) && ( - - )} - {(!product || (product && product.has_source)) && ( - - )} - {(!product || (product && product.has_cloud_resource)) && ( - - )} - {(!product || (product && product.has_kubernetes_resource)) && ( - - )} - - - - {feature_vex_enabled() && ( - + ( + + ) : ( + false + ) + } + rowClick={ShowObservationLogs} + resource="observation_logs" + > + + {!product && } + {!product && + has_attribute("observation_data.product_data.product_group_name", data, sort) && ( + + )} + {has_attribute("observation_data.branch_name", data, sort) && ( + + )} + {has_attribute("observation_data.origin_service_name", data, sort) && ( + + )} + {has_attribute("observation_data.origin_component_name_version", data, sort) && ( + + )} + {has_attribute("observation_data.origin_docker_image_name_tag_short", data, sort) && ( + + )} + {has_attribute("observation_data.origin_endpoint_hostname", data, sort) && ( + + )} + {has_attribute("observation_data.origin_source_file", data, sort) && ( + + )} + {has_attribute("observation_data.origin_cloud_qualified_resource", data, sort) && ( + + )} + {has_attribute("observation_data.origin_kubernetes_qualified_resource", data, sort) && ( + + )} + + + + {feature_vex_enabled() && has_attribute("vex_justification", data, sort) && ( + + )} + commentShortened(record.comment)} + sortable={false} + sx={{ wordBreak: "break-word" }} + /> + + )} - commentShortened(record.comment)} - sortable={false} - sx={{ wordBreak: "break-word" }} - /> - - + />
diff --git a/frontend/src/core/observation_logs/ObservationLogEmbeddedList.tsx b/frontend/src/core/observation_logs/ObservationLogEmbeddedList.tsx index 1d2a1eef9..c4ff989d5 100644 --- a/frontend/src/core/observation_logs/ObservationLogEmbeddedList.tsx +++ b/frontend/src/core/observation_logs/ObservationLogEmbeddedList.tsx @@ -3,12 +3,15 @@ import { Datagrid, DateField, ListContextProvider, + NumberField, ResourceContextProvider, TextField, + WithListContext, useListController, } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { has_attribute } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; type ObservationLogEmbeddedListProps = { @@ -51,32 +54,43 @@ const ObservationLogEmbeddedList = ({ observation }: ObservationLogEmbeddedListP
- - {(observation.product_data.assessments_need_approval || - observation.product_data.product_group_assessments_need_approval) && ( - + ( + + {(observation.product_data.assessments_need_approval || + observation.product_data.product_group_assessments_need_approval) && ( + + )} + + + + {has_attribute("priority", data, sort) && ( + + )} + + + {(observation.product_data.assessments_need_approval || + observation.product_data.product_group_assessments_need_approval) && ( + + )} + )} - - - - - - {(observation.product_data.assessments_need_approval || - observation.product_data.product_group_assessments_need_approval) && ( - - )} - + />
diff --git a/frontend/src/core/observation_logs/ObservationLogShow.tsx b/frontend/src/core/observation_logs/ObservationLogShow.tsx index 372160c26..d345afece 100644 --- a/frontend/src/core/observation_logs/ObservationLogShow.tsx +++ b/frontend/src/core/observation_logs/ObservationLogShow.tsx @@ -14,6 +14,7 @@ import { useRecordContext, } from "react-admin"; +import observation_logs from "."; import { PERMISSION_OBSERVATION_LOG_APPROVAL } from "../../access_control/types"; import MarkdownField from "../../commons/custom_fields/MarkdownField"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; @@ -40,11 +41,7 @@ const ShowActions = () => { sort = { field: "created", order: "ASC" }; storeKey = "observation_logs.approval"; } - if ( - observation_log && - observation_log.observation_data && - localStorage.getItem("observationlogapprovallistproduct") - ) { + if (observation_log?.observation_data && localStorage.getItem("observationlogapprovallistproduct")) { filter = { product: observation_log.observation_data.product, assessment_status: ASSESSMENT_STATUS_NEEDS_APPROVAL, @@ -59,12 +56,8 @@ const ShowActions = () => { {observation_log && filter && sort && storeKey && ( )} - {observation_log && - observation_log.observation_data && - observation_log.observation_data.product_data && - observation_log.observation_data.product_data.permissions && - observation_log.assessment_status == ASSESSMENT_STATUS_NEEDS_APPROVAL && - observation_log.observation_data.product_data.permissions.includes( + {observation_log?.assessment_status == ASSESSMENT_STATUS_NEEDS_APPROVAL && + observation_log?.observation_data?.product_data?.permissions?.includes( PERMISSION_OBSERVATION_LOG_APPROVAL ) && } @@ -79,7 +72,10 @@ const ObservationLogComponent = () => { - Observation Log + + +   Observation Log + @@ -150,39 +146,37 @@ const ObservationLogComponent = () => { - {observation_log && - observation_log.observation_data && - (observation_log.observation_data.product_data.assessments_need_approval || - observation_log.observation_data.product_data.product_group_assessments_need_approval) && ( - - - Approval - - + {(observation_log?.observation_data?.product_data?.assessments_need_approval || + observation_log?.observation_data?.product_data?.product_group_assessments_need_approval) && ( + + + Approval + + + + {observation_log.approval_user_full_name && ( + + + + )} + {observation_log.approval_remark && ( + + + + )} + {observation_log.approval_date && ( + + - {observation_log.approval_user_full_name && ( - - - - )} - {observation_log.approval_remark && ( - - - - )} - {observation_log.approval_date && ( - - - - )} - - - )} + )} + + + )} )} /> diff --git a/frontend/src/core/observations/Mermaid_Dependencies.tsx b/frontend/src/core/observations/Mermaid_Dependencies.tsx index e5f4ac9a9..8081b32b0 100644 --- a/frontend/src/core/observations/Mermaid_Dependencies.tsx +++ b/frontend/src/core/observations/Mermaid_Dependencies.tsx @@ -3,11 +3,11 @@ import CloseIcon from "@mui/icons-material/Close"; import RemoveIcon from "@mui/icons-material/Remove"; import { Dialog, DialogContent, DialogTitle, Divider, IconButton, Paper, Stack } from "@mui/material"; import mermaid from "mermaid"; -import { Fragment, useEffect, useState } from "react"; -import { Labeled, WrapperField } from "react-admin"; +import { Fragment, useEffect, useRef, useState } from "react"; +import { Labeled, WrapperField, useTheme } from "react-admin"; import LabeledTextField from "../../commons/custom_fields/LabeledTextField"; -import { getTheme } from "../../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../../commons/user_settings/functions"; mermaid.initialize({ flowchart: { @@ -40,9 +40,9 @@ const createMermaidGraph = (dependencies_str: string) => { if (dependencies.length > 500) { return "Error: Graph is too large, it has more than 500 dependencies"; } - const line_color = getTheme() == "dark" ? "white" : "black"; - const primary_color = getTheme() == "dark" ? "#0086B4" : "#C9F1FF"; - const primary_text_color = getTheme() == "dark" ? "white" : "black"; + const line_color = getResolvedSettingTheme() == "dark" ? "white" : "black"; + const primary_color = getResolvedSettingTheme() == "dark" ? "#0086B4" : "#C9F1FF"; + const primary_text_color = getResolvedSettingTheme() == "dark" ? "white" : "black"; let mermaid_content = "---\n" + " config:\n" + @@ -61,6 +61,11 @@ const createMermaidGraph = (dependencies_str: string) => { for (const dependency of dependencies) { const components_list = dependency.split(" --> "); if (components_list.length != 2) { + console.warn("Invalid dependency: " + dependency); + continue; + } + if (dependency.split(" --> ")[0].trim() == "" || dependency.split(" --> ")[1].trim() == "") { + console.warn("Invalid dependency: " + dependency); continue; } components.add(dependency.split(" --> ")[0]); @@ -68,8 +73,13 @@ const createMermaidGraph = (dependencies_str: string) => { mermaid_content += " " + dependency + "\n"; } + // Sort components in descending order to make replaceAll more robust + const arrayFromSet = Array.from(components); + const sortedArray = arrayFromSet.toSorted((a, b) => b.localeCompare(a)); + const sortedComponents = new Set(sortedArray); + let i = 1; - for (const component of components) { + for (const component of sortedComponents) { mermaid_content = mermaid_content.replaceAll(component + " ", "id" + i.toString() + '("' + component + '") '); mermaid_content = mermaid_content.replaceAll(" " + component, " id" + i.toString() + '("' + component + '")'); i++; @@ -84,6 +94,8 @@ type ComponentShowProps = { const MermaidDependencies = ({ dependencies }: ComponentShowProps) => { const [open, setOpen] = useState(false); + const [theme] = useTheme(); + const mermaidRef = useRef(null); const handleOpen = () => { setOpen(true); }; @@ -93,13 +105,13 @@ const MermaidDependencies = ({ dependencies }: ComponentShowProps) => { }; useEffect(() => { - if (dependencies) { - if (document.getElementById("mermaid-dependencies")) { - document.getElementById("mermaid-dependencies")?.removeAttribute("data-processed"); - mermaid.contentLoaded(); - } + if (dependencies && mermaidRef.current) { + const element = mermaidRef.current; + element.removeAttribute("data-processed"); + element.innerHTML = createMermaidGraph(dependencies); + mermaid.run({ nodes: [element] }); } - }, [dependencies, mermaid.contentLoaded()]); // eslint-disable-line react-hooks/exhaustive-deps + }, [dependencies, theme]); return ( @@ -107,14 +119,15 @@ const MermaidDependencies = ({ dependencies }: ComponentShowProps) => { -
{createMermaidGraph(dependencies)} -
+
diff --git a/frontend/src/core/observations/ObservationAssessment.tsx b/frontend/src/core/observations/ObservationAssessment.tsx index fe4be6079..c76cf4cfc 100644 --- a/frontend/src/core/observations/ObservationAssessment.tsx +++ b/frontend/src/core/observations/ObservationAssessment.tsx @@ -1,16 +1,18 @@ import PlaylistAddCheckIcon from "@mui/icons-material/PlaylistAddCheck"; import { Dialog, DialogContent, DialogTitle } from "@mui/material"; -import { Fragment, useState } from "react"; -import { DateInput, FormDataConsumer, SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { Fragment, useRef, useState } from "react"; +import { DateInput, FormDataConsumer, SimpleForm, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; import SmallButton from "../../commons/custom_fields/SmallButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; -import { validate_after_today, validate_required, validate_required_4096 } from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; -import { AutocompleteInputMedium, TextInputWide } from "../../commons/layout/themes"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_after_today, validate_required } from "../../commons/custom_validators"; +import { justificationIsEnabledForStatus, settings_vex_justification_style } from "../../commons/functions"; +import { AutocompleteInputMedium, AutocompleteInputWide } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; +import { VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX, VEX_JUSTIFICATION_TYPE_CYCLONEDX } from "../../commons/types"; import { + OBSERVATION_CYCLONEDX_VEX_JUSTIFICATION_CHOICES, OBSERVATION_SEVERITY_CHOICES, OBSERVATION_STATUS_CHOICES, OBSERVATION_STATUS_OPEN, @@ -19,17 +21,27 @@ import { } from "../types"; const ObservationAssessment = () => { + const dialogRef = useRef(null); + const [comment, setComment] = useState(""); const [open, setOpen] = useState(false); const [status, setStatus] = useState(OBSERVATION_STATUS_OPEN); const justificationEnabled = justificationIsEnabledForStatus(status); const refresh = useRefresh(); const notify = useNotify(); + const observationUpdate = async (data: any) => { + if (comment === "") { + notify("Comment is required", { + type: "warning", + }); + return; + } + const patch = { severity: data.current_severity, status: data.current_status, vex_justification: justificationEnabled ? data.current_vex_justification : "", - comment: data.comment, + comment: comment, risk_acceptance_expiry_date: data.risk_acceptance_expiry_date, }; @@ -60,19 +72,16 @@ const ObservationAssessment = () => { const handleCancel = () => setOpen(false); const handleOpen = () => setOpen(true); - const CustomToolbar = () => ( - - - - - ); return ( } /> - + Observation Assessment - }> + } + > { label="Status" onChange={(e) => setStatus(e)} /> - {justificationEnabled && ( - - )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX && ( + + )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CYCLONEDX && ( + + )} {({ formData }) => formData.current_status && @@ -107,11 +125,12 @@ const ObservationAssessment = () => { ) } - diff --git a/frontend/src/core/observations/ObservationBulkAssessment.tsx b/frontend/src/core/observations/ObservationBulkAssessment.tsx index dba80b428..dc253b880 100644 --- a/frontend/src/core/observations/ObservationBulkAssessment.tsx +++ b/frontend/src/core/observations/ObservationBulkAssessment.tsx @@ -1,10 +1,9 @@ import PlaylistAddCheckIcon from "@mui/icons-material/PlaylistAddCheck"; import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle } from "@mui/material"; -import { Fragment, useState } from "react"; +import { Fragment, useRef, useState } from "react"; import { DateInput, FormDataConsumer, - SaveButton, SimpleForm, useListContext, useNotify, @@ -12,14 +11,20 @@ import { useUnselectAll, } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; import SmallButton from "../../commons/custom_fields/SmallButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; -import { validate_after_today, validate_required_4096 } from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus, settings_risk_acceptance_expiry_date } from "../../commons/functions"; -import { AutocompleteInputMedium, TextInputWide } from "../../commons/layout/themes"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_after_today } from "../../commons/custom_validators"; +import { + justificationIsEnabledForStatus, + settings_risk_acceptance_expiry_date, + settings_vex_justification_style, +} from "../../commons/functions"; +import { AutocompleteInputMedium, AutocompleteInputWide } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; +import { VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX, VEX_JUSTIFICATION_TYPE_CYCLONEDX } from "../../commons/types"; import { + OBSERVATION_CYCLONEDX_VEX_JUSTIFICATION_CHOICES, OBSERVATION_SEVERITY_CHOICES, OBSERVATION_STATUS_CHOICES, OBSERVATION_STATUS_OPEN, @@ -29,9 +34,12 @@ import { type ObservationBulkAssessmentButtonProps = { product: any; + storeKey: string; }; -const ObservationBulkAssessment = (props: ObservationBulkAssessmentButtonProps) => { +const ObservationBulkAssessment = ({ product, storeKey }: ObservationBulkAssessmentButtonProps) => { + const dialogRef = useRef(null); + const [comment, setComment] = useState(""); const [open, setOpen] = useState(false); const [status, setStatus] = useState(OBSERVATION_STATUS_OPEN); const justificationEnabled = justificationIsEnabledForStatus(status); @@ -39,24 +47,26 @@ const ObservationBulkAssessment = (props: ObservationBulkAssessmentButtonProps) const [loading, setLoading] = useState(false); const notify = useNotify(); const { selectedIds } = useListContext(); - const unselectAll = useUnselectAll("observations"); + const unselectAll = useUnselectAll("observations", storeKey); const observationUpdate = async (data: any) => { + if (comment === "") { + notify("Comment is required", { + type: "warning", + }); + return; + } setLoading(true); let url = ""; - if (props.product) { - url = - window.__RUNTIME_CONFIG__.API_BASE_URL + - "/products/" + - props.product.id + - "/observations_bulk_assessment/"; + if (product) { + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/products/" + product.id + "/observations_bulk_assessment/"; } else { url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/observations/bulk_assessment/"; } const assessment_data = { severity: data.current_severity, status: data.current_status, - comment: data.comment, + comment: comment, vex_justification: justificationEnabled ? data.current_vex_justification : "", observations: selectedIds, risk_acceptance_expiry_date: data.risk_acceptance_expiry_date, @@ -93,20 +103,16 @@ const ObservationBulkAssessment = (props: ObservationBulkAssessmentButtonProps) const handleCancel = () => setOpen(false); const handleOpen = () => setOpen(true); - const CustomToolbar = () => ( - - - - - ); - return ( } /> - + Bulk Observation Assessment - }> + } + > setStatus(e)} /> - {justificationEnabled && ( - - )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX && ( + + )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CYCLONEDX && ( + + )} {({ formData }) => formData.current_status && @@ -144,11 +159,12 @@ const ObservationBulkAssessment = (props: ObservationBulkAssessmentButtonProps) ) } - diff --git a/frontend/src/core/observations/ObservationBulkDeleteButton.tsx b/frontend/src/core/observations/ObservationBulkDeleteButton.tsx index da05b46e1..17889145a 100644 --- a/frontend/src/core/observations/ObservationBulkDeleteButton.tsx +++ b/frontend/src/core/observations/ObservationBulkDeleteButton.tsx @@ -7,22 +7,22 @@ import { httpClient } from "../../commons/ra-data-django-rest-framework"; type ObservationBulkDeleteButtonProps = { product: any; + storeKey: string; }; -const ObservationBulkDeleteButton = (props: ObservationBulkDeleteButtonProps) => { +const ObservationBulkDeleteButton = ({ product, storeKey }: ObservationBulkDeleteButtonProps) => { const [open, setOpen] = useState(false); const { selectedIds } = useListContext(); const refresh = useRefresh(); const [loading, setLoading] = useState(false); const notify = useNotify(); - const unselectAll = useUnselectAll("observations"); + const unselectAll = useUnselectAll("observations", storeKey); const handleClick = () => setOpen(true); const handleDialogClose = () => setOpen(false); const handleConfirm = async () => { setLoading(true); - const url = - window.__RUNTIME_CONFIG__.API_BASE_URL + "/products/" + props.product.id + "/observations_bulk_delete/"; + const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/products/" + product.id + "/observations_bulk_delete/"; const delete_data = { observations: selectedIds, }; diff --git a/frontend/src/core/observations/ObservationComponentList.tsx b/frontend/src/core/observations/ObservationComponentList.tsx new file mode 100644 index 000000000..1e20ec5b0 --- /dev/null +++ b/frontend/src/core/observations/ObservationComponentList.tsx @@ -0,0 +1,111 @@ +import { + AutocompleteArrayInput, + ChipField, + Datagrid, + FilterForm, + FunctionField, + ListContextProvider, + ResourceContextProvider, + TextField, + TextInput, + useListController, +} from "react-admin"; + +import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { SeverityField } from "../../commons/custom_fields/SeverityField"; +import { humanReadableDate } from "../../commons/functions"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; +import { getSettingListSize } from "../../commons/user_settings/functions"; +import { + AGE_CHOICES, + OBSERVATION_SEVERITY_CHOICES, + OBSERVATION_STATUS_ACTIVE, + OBSERVATION_STATUS_CHOICES, + Observation, +} from "../types"; +import ObservationExpand from "./ObservationExpand"; +import { IDENTIFIER_OBSERVATION_COMPONENT_LIST, setListIdentifier } from "./functions"; + +function listFilters() { + const filters = []; + filters.push( + , + , + , + , + + ); + + return filters; +} + +const ShowObservations = (id: any) => { + return "../../../../observations/" + id + "/show"; +}; + +type ObservationsComponentListProps = { + component: any; +}; + +const ObservationsComponentList = ({ component }: ObservationsComponentListProps) => { + setListIdentifier(IDENTIFIER_OBSERVATION_COMPONENT_LIST); + + const listContext = useListController({ + filter: { + product: component.product, + branch: component.branch, + origin_service: component.origin_service, + origin_component_name_version: component.component_name_version, + origin_component_purl: component.component_purl, + origin_component_cpe: component.component_cpe, + origin_component_cyclonedx_bom_link: component.component_cyclonedx_bom_link, + }, + perPage: 25, + resource: "observations", + sort: { field: "current_severity", order: "ASC" }, + filterDefaultValues: { current_status: OBSERVATION_STATUS_ACTIVE }, + disableSyncWithLocation: false, + storeKey: "observations.component", + }); + + if (listContext.isLoading) { + return
Loading...
; + } + + return ( + + +
+ + } + expandSingle + bulkActionButtons={false} + > + + + + + + label="Age" + sortBy="last_observation_log" + render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} + /> + + +
+
+
+ ); +}; + +export default ObservationsComponentList; diff --git a/frontend/src/core/observations/ObservationCreate.tsx b/frontend/src/core/observations/ObservationCreate.tsx index 86ba25e77..6a3b3b7d6 100644 --- a/frontend/src/core/observations/ObservationCreate.tsx +++ b/frontend/src/core/observations/ObservationCreate.tsx @@ -1,12 +1,11 @@ import { Dialog, DialogContent, DialogTitle, Divider, Stack, Typography } from "@mui/material"; -import { Fragment, useState } from "react"; +import { Fragment, useRef, useState } from "react"; import { CreateBase, DateInput, FormDataConsumer, NumberInput, ReferenceInput, - SaveButton, SimpleForm, TextInput, useCreate, @@ -15,9 +14,9 @@ import { } from "react-admin"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_0_10, validate_0_999999, @@ -27,9 +26,11 @@ import { validate_required, validate_required_255, } from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; +import { justificationIsEnabledForStatus, settings_vex_justification_style } from "../../commons/functions"; import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; +import { VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX, VEX_JUSTIFICATION_TYPE_CYCLONEDX } from "../../commons/types"; import { + OBSERVATION_CYCLONEDX_VEX_JUSTIFICATION_CHOICES, OBSERVATION_SEVERITY_CHOICES, OBSERVATION_STATUS_CHOICES, OBSERVATION_STATUS_OPEN, @@ -43,6 +44,9 @@ export type ObservationCreateProps = { }; const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: ObservationCreateProps) => { + const dialogRef = useRef(null); + const [description, setDescription] = useState(""); + const [recommendation, setRecommendation] = useState(""); const [open, setOpen] = useState(false); const [status, setStatus] = useState(OBSERVATION_STATUS_OPEN); const justificationEnabled = justificationIsEnabledForStatus(status); @@ -56,13 +60,6 @@ const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: Obser setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - const create_observation = (data: any) => { data.product = id; if (!justificationEnabled) { @@ -71,6 +68,8 @@ const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: Obser if (data.parser_status != OBSERVATION_STATUS_RISK_ACCEPTED) { data.risk_acceptance_expiry_date = null; } + data.description = description; + data.recommendation = recommendation; create( "observations", @@ -91,11 +90,14 @@ const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: Obser return ( - + Add observation - }> + } + > Observation @@ -129,27 +131,36 @@ const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: Obser ) } - {justificationEnabled && ( - - )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX && ( + + )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CYCLONEDX && ( + + )} - - @@ -206,7 +217,7 @@ const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: Obser @@ -226,7 +237,7 @@ const ObservationCreate = ({ id, risk_acceptance_expiry_date_calculated }: Obser diff --git a/frontend/src/core/observations/ObservationDashboardList.tsx b/frontend/src/core/observations/ObservationDashboardList.tsx index 7a4964eaf..c48063e91 100644 --- a/frontend/src/core/observations/ObservationDashboardList.tsx +++ b/frontend/src/core/observations/ObservationDashboardList.tsx @@ -6,14 +6,15 @@ import { ListContextProvider, ResourceContextProvider, TextField, + WithListContext, useListController, } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; -import { humanReadableDate } from "../../commons/functions"; +import { has_attribute, humanReadableDate } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; -import { OBSERVATION_STATUS_OPEN } from "../types"; +import { OBSERVATION_STATUS_ACTIVE } from "../types"; import { Observation } from "../types"; import ObservationExpand from "./ObservationExpand"; import { IDENTIFIER_OBSERVATION_DASHBOARD_LIST, setListIdentifier } from "./functions"; @@ -28,7 +29,7 @@ const ObservationDashboardList = () => { const listContext = useListController({ filter: { age: "Past 7 days", - current_status: OBSERVATION_STATUS_OPEN, + current_status: OBSERVATION_STATUS_ACTIVE, }, perPage: 10, resource: "observations", @@ -45,32 +46,46 @@ const ObservationDashboardList = () => { return ( - Open observations of the last 7 days + Active observations of the last 7 days
- } - expandSingle - > - - - - - - - - label="Age" - sortBy="last_observation_log" - render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} - /> - + ( + } + expandSingle + > + + {has_attribute("branch_name", data, sort) && ( + + )} + {has_attribute("origin_service_name", data, sort) && ( + + )} + + + + {has_attribute("current_priority", data, sort) && ( + + )} + + + label="Age" + sortBy="last_observation_log" + render={(record) => + record ? humanReadableDate(record.last_observation_log) : "" + } + /> + + )} + />
diff --git a/frontend/src/core/observations/ObservationEdit.tsx b/frontend/src/core/observations/ObservationEdit.tsx index 88c106d28..f703334fc 100644 --- a/frontend/src/core/observations/ObservationEdit.tsx +++ b/frontend/src/core/observations/ObservationEdit.tsx @@ -15,7 +15,9 @@ import { useRecordContext, } from "react-admin"; +import observations from "."; import { PERMISSION_OBSERVATION_DELETE } from "../../access_control/types"; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { validate_0_10, @@ -26,9 +28,11 @@ import { validate_required, validate_required_255, } from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; +import { justificationIsEnabledForStatus, settings_vex_justification_style } from "../../commons/functions"; import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; +import { VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX, VEX_JUSTIFICATION_TYPE_CYCLONEDX } from "../../commons/types"; import { + OBSERVATION_CYCLONEDX_VEX_JUSTIFICATION_CHOICES, OBSERVATION_SEVERITY_CHOICES, OBSERVATION_STATUS_CHOICES, OBSERVATION_STATUS_RISK_ACCEPTED, @@ -40,23 +44,40 @@ const CustomToolbar = () => { return ( - - {observation && observation.product_data.permissions.includes(PERMISSION_OBSERVATION_DELETE) && ( + + {observation?.product_data.permissions.includes(PERMISSION_OBSERVATION_DELETE) && ( )} ); }; -const ObservationEditForm = () => { +interface ObservationEditFormProps { + setDescription: (value: string) => void; + setRecommendation: (value: string) => void; +} + +const ObservationEditForm = ({ setDescription, setRecommendation }: ObservationEditFormProps) => { const observation = useRecordContext(); const [status, setStatus] = useState(observation ? observation.parser_status : ""); const justificationEnabled = justificationIsEnabledForStatus(status); + const [descriptionSet, setDescriptionSet] = useState(false); + const [recommendationSet, setRecommendationSet] = useState(false); + + if (!descriptionSet && observation) { + setDescription(observation.description); + setDescriptionSet(true); + } + if (!recommendationSet && observation) { + setRecommendation(observation.recommendation); + setRecommendationSet(true); + } return ( }> - - Observation + + +   Observation @@ -87,27 +108,34 @@ const ObservationEditForm = () => { ) } - {justificationEnabled && ( - - )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX && ( + + )} + {justificationEnabled && + settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CYCLONEDX && ( + + )} - - @@ -150,7 +178,11 @@ const ObservationEditForm = () => { sx={{ width: "10em" }} /> - + { sx={{ width: "10em" }} /> - + { @@ -260,58 +297,28 @@ const ObservationEditForm = () => { }; const ObservationEdit = () => { + const [description, setDescription] = useState(""); + const [recommendation, setRecommendation] = useState(""); + const transform = (data: any) => { - if (!data.parser_severity) { - data.parser_severity = ""; - } - if (!data.description) { - data.description = ""; - } - if (!data.recommendation) { - data.recommendation = ""; - } - if (!data.origin_component_name) { - data.origin_component_name = ""; - } - if (!data.origin_component_version) { - data.origin_component_version = ""; - } - if (!data.origin_docker_image_name) { - data.origin_docker_image_name = ""; - } - if (!data.origin_docker_image_tag) { - data.origin_docker_image_tag = ""; - } - if (!data.origin_endpoint_url) { - data.origin_endpoint_url = ""; - } - if (!data.origin_source_file) { - data.origin_source_file = ""; - } - if (!data.origin_cloud_provider) { - data.origin_cloud_provider = ""; - } - if (!data.origin_cloud_account_subscription_project) { - data.origin_cloud_account_subscription_project = ""; - } - if (!data.origin_cloud_resource) { - data.origin_cloud_resource = ""; - } - if (!data.origin_cloud_resource_type) { - data.origin_cloud_resource_type = ""; - } - if (!data.origin_kubernetes_cluster) { - data.origin_kubernetes_cluster = ""; - } - if (!data.origin_kubernetes_namespace) { - data.origin_kubernetes_namespace = ""; - } - if (!data.origin_kubernetes_resource_type) { - data.origin_kubernetes_resource_type = ""; - } - if (!data.origin_kubernetes_resource_name) { - data.origin_kubernetes_resource_name = ""; - } + data.description = description; + data.recommendation = recommendation; + + data.parser_severity ??= ""; + data.origin_component_name ??= ""; + data.origin_component_version ??= ""; + data.origin_docker_image_name ??= ""; + data.origin_docker_image_tag ??= ""; + data.origin_endpoint_url ??= ""; + data.origin_source_file ??= ""; + data.origin_cloud_provider ??= ""; + data.origin_cloud_account_subscription_project ??= ""; + data.origin_cloud_resource ??= ""; + data.origin_cloud_resource_type ??= ""; + data.origin_kubernetes_cluster ??= ""; + data.origin_kubernetes_namespace ??= ""; + data.origin_kubernetes_resource_type ??= ""; + data.origin_kubernetes_resource_name ??= ""; if (!justificationIsEnabledForStatus(data.parser_status) || !data.parser_vex_justification) { data.parser_vex_justification = ""; } @@ -320,21 +327,15 @@ const ObservationEdit = () => { } data.origin_component_name_version = ""; data.origin_docker_image_name_tag = ""; - if (!data.vulnerability_id) { - data.vulnerability_id = ""; - } - if (!data.cvss3_vector) { - data.cvss3_vector = ""; - } - if (!data.cvss4_vector) { - data.cvss4_vector = ""; - } + data.vulnerability_id ??= ""; + data.cvss3_vector ??= ""; + data.cvss4_vector ??= ""; return data; }; return ( - + ); }; diff --git a/frontend/src/core/observations/ObservationEmbeddedList.tsx b/frontend/src/core/observations/ObservationEmbeddedList.tsx index 7dce79ee8..e8b3bdb41 100644 --- a/frontend/src/core/observations/ObservationEmbeddedList.tsx +++ b/frontend/src/core/observations/ObservationEmbeddedList.tsx @@ -1,7 +1,7 @@ import { Stack } from "@mui/material"; import { useEffect } from "react"; import { - AutocompleteInput, + AutocompleteArrayInput, BooleanField, ChipField, Datagrid, @@ -15,6 +15,7 @@ import { ResourceContextProvider, TextField, TextInput, + WithListContext, useListController, } from "react-admin"; import { useNavigate } from "react-router"; @@ -22,16 +23,15 @@ import { useNavigate } from "react-router"; import { PERMISSION_OBSERVATION_ASSESSMENT, PERMISSION_OBSERVATION_DELETE } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; -import { humanReadableDate } from "../../commons/functions"; +import { feature_exploit_information, has_attribute, humanReadableDate } from "../../commons/functions"; import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; import { AGE_CHOICES, OBSERVATION_SEVERITY_CHOICES, + OBSERVATION_STATUS_ACTIVE, OBSERVATION_STATUS_CHOICES, - OBSERVATION_STATUS_OPEN, Observation, - PURL_TYPE_CHOICES, Product, } from "../types"; import ObservationBulkAssessment from "./ObservationBulkAssessment"; @@ -41,32 +41,36 @@ import { IDENTIFIER_OBSERVATION_EMBEDDED_LIST, setListIdentifier } from "./funct function listFilters(product: Product) { const filters = []; - if (product && product.has_branches) { + if (product?.has_branches) { filters.push( ); } - filters.push(); - filters.push( - - ); filters.push( - + , + , + ); - if (product && product.has_services) { + if (product?.has_services) { filters.push( ); + if (product?.has_component) { filters.push( - , + + > + + ); } - if (product && product.has_docker_image) { + if (product?.has_docker_image) { filters.push(); } - if (product && product.has_endpoint) { + if (product?.has_endpoint) { filters.push(); } - if (product && product.has_source) { + if (product?.has_source) { filters.push(); } - if (product && product.has_cloud_resource) { + if (product?.has_cloud_resource) { filters.push(); } - if (product && product.has_kubernetes_resource) { + if (product?.has_kubernetes_resource) { filters.push(); } - filters.push(); - filters.push(); - filters.push(); - filters.push(); - if (product && product.has_potential_duplicates) { + filters.push( + , + , + , + + ); + if (product?.has_potential_duplicates) { filters.push(); } - if (product && product.observation_log_approvals > 0) { - filters.push(); + if (product?.has_component) { + if (feature_exploit_information()) { + filters.push(); + } + filters.push(); } - return filters; } @@ -128,10 +138,10 @@ type ObservationsEmbeddedListProps = { const BulkActionButtons = (product: any) => ( {product.product.permissions.includes(PERMISSION_OBSERVATION_ASSESSMENT) && ( - + )} {product.product.permissions.includes(PERMISSION_OBSERVATION_DELETE) && ( - + )} ); @@ -141,7 +151,7 @@ const ObservationsEmbeddedList = ({ product }: ObservationsEmbeddedListProps) => const navigate = useNavigate(); function get_observations_url(branch_id: Identifier): string { - return `?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22branch%22%3A${branch_id}%7D&order=ASC&sort=current_severity`; + return `?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A["Open"%2C"Affected"%2C"In review"]%2C%22branch%22%3A${branch_id}%7D&order=ASC&sort=current_severity`; } useEffect(() => { const current_product_id = localStorage.getItem("observationembeddedlist.product"); @@ -160,7 +170,7 @@ const ObservationsEmbeddedList = ({ product }: ObservationsEmbeddedListProps) => perPage: 25, resource: "observations", sort: { field: "current_severity", order: "ASC" }, - filterDefaultValues: { current_status: OBSERVATION_STATUS_OPEN, branch: product.repository_default_branch }, + filterDefaultValues: { current_status: OBSERVATION_STATUS_ACTIVE, branch: product.repository_default_branch }, disableSyncWithLocation: false, storeKey: "observations.embedded", }); @@ -174,75 +184,95 @@ const ObservationsEmbeddedList = ({ product }: ObservationsEmbeddedListProps) =>
- - ) - } - resource="observations" - expand={} - expandSingle - > - {product && product.has_branches && } - - - - {product && product.has_component && } - {product && product.has_services && } - {product && product.has_component && ( - - )} - {product && product.has_docker_image && ( - - )} - {product && product.has_endpoint && ( - - )} - {product && product.has_source && ( - - )} - {product && product.has_cloud_resource && ( - - )} - {product && product.has_kubernetes_resource && ( - - )} - - - label="Age" - sortBy="last_observation_log" - render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} - /> - {product && product.has_potential_duplicates && ( - + ( + + ) + } + resource="observations" + expand={} + expandSingle + > + {has_attribute("branch_name", data, sort) && ( + + )} + + + + {has_attribute("current_priority", data, sort) && ( + + )} + {has_attribute("epss_score", data, sort) && ( + + )} + {has_attribute("origin_service_name", data, sort) && ( + + )} + {has_attribute("origin_component_name_version", data, sort) && ( + + )} + {has_attribute("origin_docker_image_name_tag_short", data, sort) && ( + + )} + {has_attribute("origin_endpoint_hostname", data, sort) && ( + + )} + {has_attribute("origin_source_file_short", data, sort) && ( + + )} + {has_attribute("origin_cloud_qualified_resource", data, sort) && ( + + )} + {has_attribute("origin_kubernetes_qualified_resource", data, sort) && ( + + )} + + + label="Age" + sortBy="last_observation_log" + render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} + /> + {product?.has_potential_duplicates && ( + + )} + {has_attribute("update_impact_score", data, sort) && ( + + )} + )} - + />
diff --git a/frontend/src/core/observations/ObservationExpand.tsx b/frontend/src/core/observations/ObservationExpand.tsx index e5bb90055..58fb93d10 100644 --- a/frontend/src/core/observations/ObservationExpand.tsx +++ b/frontend/src/core/observations/ObservationExpand.tsx @@ -5,7 +5,11 @@ import { getElevation } from "../../metrics/functions"; import ObservationShowDescriptionRecommendation from "./ObservationShowDescriptionRecommendation"; import ObservationShowOrigins from "./ObservationShowOrigins"; -const ObservationExpand = () => { +type ObservationExpandProps = { + showComponent: boolean; +}; + +const ObservationExpand = ({ showComponent }: ObservationExpandProps) => { const observation = useRecordContext(); return ( @@ -17,7 +21,7 @@ const ObservationExpand = () => {
)} - + {showComponent && }
); }; diff --git a/frontend/src/core/observations/ObservationList.tsx b/frontend/src/core/observations/ObservationList.tsx index 5a5bee8db..a5e06ef0d 100644 --- a/frontend/src/core/observations/ObservationList.tsx +++ b/frontend/src/core/observations/ObservationList.tsx @@ -1,34 +1,34 @@ -import { Stack } from "@mui/material"; import { Fragment } from "react"; import { + AutocompleteArrayInput, AutocompleteInput, BooleanField, ChipField, - DatagridConfigurable, + Datagrid, FilterButton, FunctionField, List, NullableBooleanInput, NumberField, ReferenceInput, - SelectColumnsButton, TextField, TextInput, TopToolbar, + WithListContext, } from "react-admin"; import observations from "."; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; -import { humanReadableDate } from "../../commons/functions"; +import { feature_exploit_information, has_attribute, humanReadableDate } from "../../commons/functions"; import ListHeader from "../../commons/layout/ListHeader"; -import { AutocompleteInputMedium, AutocompleteInputWide } from "../../commons/layout/themes"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; import { AGE_CHOICES, OBSERVATION_SEVERITY_CHOICES, + OBSERVATION_STATUS_ACTIVE, OBSERVATION_STATUS_CHOICES, - OBSERVATION_STATUS_OPEN, Observation, PURL_TYPE_CHOICES, } from "../types"; @@ -36,72 +36,69 @@ import ObservationBulkAssessment from "./ObservationBulkAssessment"; import ObservationExpand from "./ObservationExpand"; import { IDENTIFIER_OBSERVATION_LIST, setListIdentifier } from "./functions"; -const listFilters = [ - - - , - - - , - - - , - , - , - , - - - , - , - , - , - , - , - , - , - , - , - , - , -]; +function listFilters() { + const filters = []; + filters.push( + , + , + + ); + filters.push( + + + , + + + , + , + , + , + , + , + , + , + , + , + , + , + + ); + if (feature_exploit_information()) { + filters.push(); + } + filters.push(); + return filters; +} const ListActions = () => ( - - - - + ); -const BulkActionButtons = () => ( - - - -); +const BulkActionButtons = () => ; const ObservationList = () => { setListIdentifier(IDENTIFIER_OBSERVATION_LIST); @@ -112,59 +109,97 @@ const ObservationList = () => { } - filters={listFilters} + filters={listFilters()} sort={{ field: "current_severity", order: "ASC" }} - filterDefaultValues={{ current_status: OBSERVATION_STATUS_OPEN }} + filterDefaultValues={{ current_status: OBSERVATION_STATUS_ACTIVE }} disableSyncWithLocation={false} storeKey="observations.list" actions={} sx={{ marginTop: 1 }} > - } - expand={} - expandSingle - > - - - - - - - - - - - - - - - - - label="Age" - sortBy="last_observation_log" - render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} - /> - - + ( + } + expand={} + expandSingle + > + + + + {has_attribute("current_priority", data, sort) && ( + + )} + {has_attribute("epss_score", data, sort) && ( + + )} + + {has_attribute("product_data.product_group_name", data, sort) && ( + + )} + {has_attribute("branch_name", data, sort) && ( + + )} + {has_attribute("origin_service_name", data, sort) && ( + + )} + {has_attribute("origin_component_name_version", data, sort) && ( + + )} + {has_attribute("origin_docker_image_name_tag_short", data, sort) && ( + + )} + {has_attribute("origin_endpoint_hostname", data, sort) && ( + + )} + {has_attribute("origin_source_file_short", data, sort) && ( + + )} + {has_attribute("origin_cloud_qualified_resource", data, sort) && ( + + )} + {has_attribute("origin_kubernetes_qualified_resource", data, sort) && ( + + )} + + + label="Age" + sortBy="last_observation_log" + render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} + /> + + {has_attribute("update_impact_score", data, sort) && ( + + )} + + )} + />
); diff --git a/frontend/src/core/observations/ObservationRemoveAssessment.tsx b/frontend/src/core/observations/ObservationRemoveAssessment.tsx index aa2379e9c..18d2fea60 100644 --- a/frontend/src/core/observations/ObservationRemoveAssessment.tsx +++ b/frontend/src/core/observations/ObservationRemoveAssessment.tsx @@ -1,15 +1,15 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; -import { Fragment, useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { Fragment, useRef, useState } from "react"; +import { SimpleForm, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; import RemoveButton from "../../commons/custom_fields/RemoveButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; -import { validate_required_4096 } from "../../commons/custom_validators"; -import { TextInputWide } from "../../commons/layout/themes"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; const ObservationRemoveAssessment = () => { + const dialogRef = useRef(null); + const [comment, setComment] = useState(""); const [open, setOpen] = useState(false); const refresh = useRefresh(); const notify = useNotify(); @@ -21,8 +21,15 @@ const ObservationRemoveAssessment = () => { }; const observationUpdate = async (data: any) => { + if (comment === "") { + notify("Comment is required", { + type: "warning", + }); + return; + } + const patch = { - comment: data.comment, + comment: comment, }; httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/observations/" + data.id + "/remove_assessment/", { @@ -44,25 +51,22 @@ const ObservationRemoveAssessment = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - return ( - + Observation Remove Assessment - }> - } + > + diff --git a/frontend/src/core/observations/ObservationReviewList.tsx b/frontend/src/core/observations/ObservationReviewList.tsx index 92f78c614..69cfb974a 100644 --- a/frontend/src/core/observations/ObservationReviewList.tsx +++ b/frontend/src/core/observations/ObservationReviewList.tsx @@ -1,5 +1,6 @@ import { Fragment } from "react"; import { + AutocompleteArrayInput, AutocompleteInput, BooleanField, ChipField, @@ -13,14 +14,15 @@ import { ResourceContextProvider, TextField, TextInput, + WithListContext, useListController, } from "react-admin"; import { PERMISSION_OBSERVATION_ASSESSMENT } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; -import { humanReadableDate } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide } from "../../commons/layout/themes"; +import { has_attribute, humanReadableDate } from "../../commons/functions"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; import { AGE_CHOICES, @@ -40,68 +42,59 @@ import { function listFilters(product: Product) { const filters = []; - if (!product) { + if (product?.has_branches) { filters.push( - + ); } + filters.push( + , + + ); if (!product) { filters.push( - - ); - } - if (!product) { - filters.push( - - - - ); - } - if (product && product.has_branches) { - filters.push( + , - - + + , + , + ); } - filters.push(); - filters.push( - - ); - if (product && product.has_services) { + if (product?.has_services) { filters.push( ); filters.push( ); } - if (!product || (product && product.has_docker_image)) { + if (!product || product?.has_docker_image) { filters.push(); } - if (!product || (product && product.has_endpoint)) { + if (!product || product?.has_endpoint) { filters.push(); } - if (!product || (product && product.has_source)) { + if (!product || product?.has_source) { filters.push(); } - if (!product || (product && product.has_cloud_resource)) { + if (!product || product?.has_cloud_resource) { filters.push(); } - if (!product || (product && product.has_kubernetes_resource)) { + if (!product || product?.has_kubernetes_resource) { filters.push(); } - filters.push(); - filters.push(); - if (product && product.has_potential_duplicates) { + filters.push( + , + + ); + if (product?.has_potential_duplicates) { filters.push(); } @@ -151,18 +146,23 @@ const ShowObservations = (id: any) => { return "../../../../observations/" + id + "/show"; }; -type ObservationsReviewListProps = { +type BulkActionButtonsProps = { product?: any; + storeKey: string; }; -const BulkActionButtons = ({ product }: any) => ( +const BulkActionButtons = ({ product, storeKey }: BulkActionButtonsProps) => ( - {(!product || (product && product.permissions.includes(PERMISSION_OBSERVATION_ASSESSMENT))) && ( - + {(!product || product?.permissions.includes(PERMISSION_OBSERVATION_ASSESSMENT)) && ( + )} ); +type ObservationsReviewListProps = { + product?: any; +}; + const ObservationsReviewList = ({ product }: ObservationsReviewListProps) => { if (product) { setListIdentifier(IDENTIFIER_OBSERVATION_REVIEW_LIST_PRODUCT); @@ -199,82 +199,94 @@ const ObservationsReviewList = ({ product }: ObservationsReviewListProps) => {
- - ) - } - resource="observations" - expand={} - expandSingle - > - {!product && } - {!product && } - {(!product || (product && product.has_branches)) && ( - - )} - - - {(!product || (product && product.has_component)) && ( - - )} - - {(!product || (product && product.has_services)) && ( - - )} - {(!product || (product && product.has_component)) && ( - - )} - {(!product || (product && product.has_docker_image)) && ( - - )} - {(!product || (product && product.has_endpoint)) && ( - - )} - {(!product || (product && product.has_source)) && ( - - )} - {(!product || (product && product.has_cloud_resource)) && ( - - )} - {(!product || (product && product.has_kubernetes_resource)) && ( - - )} - - - label="Age" - sortBy="last_observation_log" - render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} - /> - {product && product.has_potential_duplicates && ( - + ( + + ) + } + resource="observations" + expand={} + expandSingle + > + + + + {has_attribute("current_priority", data, sort) && ( + + )} + {has_attribute("epss_score", data, sort) && ( + + )} + {!product && } + {!product && has_attribute("product_data.product_group_name", data, sort) && ( + + )} + {has_attribute("branch_name", data, sort) && ( + + )} + {has_attribute("origin_service_name", data, sort) && ( + + )} + {has_attribute("origin_component_name_version", data, sort) && ( + + )} + {has_attribute("origin_docker_image_name_tag_short", data, sort) && ( + + )} + {has_attribute("origin_endpoint_hostname", data, sort) && ( + + )} + {has_attribute("origin_source_file_short", data, sort) && ( + + )} + {has_attribute("origin_cloud_qualified_resource", data, sort) && ( + + )} + {has_attribute("origin_kubernetes_qualified_resource", data, sort) && ( + + )} + + + label="Age" + sortBy="last_observation_log" + render={(record) => (record ? humanReadableDate(record.last_observation_log) : "")} + /> + {product?.has_potential_duplicates && ( + + )} + )} - + />
diff --git a/frontend/src/core/observations/ObservationShow.tsx b/frontend/src/core/observations/ObservationShow.tsx index a46219ef8..57232feac 100644 --- a/frontend/src/core/observations/ObservationShow.tsx +++ b/frontend/src/core/observations/ObservationShow.tsx @@ -1,12 +1,13 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import { Box, Paper, Stack, TableHead, Typography } from "@mui/material"; import { Fragment } from "react"; import { + ArrayField, + Datagrid, EditButton, Labeled, NumberField, PrevNextButtons, Show, - TextField, TopToolbar, WithRecord, useRecordContext, @@ -17,11 +18,13 @@ import { PERMISSION_OBSERVATION_EDIT, PERMISSION_OBSERVATION_LOG_APPROVAL, } from "../../access_control/types"; +import CVEFoundInField from "../../commons/custom_fields/CVEFoundInField"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; -import { get_cvss3_url, get_cvss4_url, get_cwe_url, get_vulnerability_url } from "../../commons/functions"; +import VulnerabilityIdField from "../../commons/custom_fields/VulnerabilityIdField"; +import { get_cvss3_url, get_cvss4_url, get_cwe_url } from "../../commons/functions"; import AssessmentApproval from "../observation_logs/AssessmentApproval"; import ObservationLogEmbeddedList from "../observation_logs/ObservationLogEmbeddedList"; -import { OBSERVATION_STATUS_IN_REVIEW, OBSERVATION_STATUS_OPEN } from "../types"; +import { OBSERVATION_STATUS_ACTIVE, OBSERVATION_STATUS_IN_REVIEW } from "../types"; import ObservationAssessment from "./ObservationAssessment"; import ObservationRemoveAssessment from "./ObservationRemoveAssessment"; import ObservationsShowAside from "./ObservationShowAside"; @@ -29,6 +32,7 @@ import ObservationShowHeader from "./ObservationShowHeader"; import ObservationShowOrigins from "./ObservationShowOrigins"; import PotentialDuplicatesList from "./PotentialDuplicatesList"; import { + IDENTIFIER_OBSERVATION_COMPONENT_LIST, IDENTIFIER_OBSERVATION_DASHBOARD_LIST, IDENTIFIER_OBSERVATION_EMBEDDED_LIST, IDENTIFIER_OBSERVATION_LIST, @@ -44,7 +48,7 @@ const ShowActions = () => { if (localStorage.getItem(IDENTIFIER_OBSERVATION_LIST) === "true") { filter = {}; - filterDefaultValues = { current_status: OBSERVATION_STATUS_OPEN }; + filterDefaultValues = { current_status: OBSERVATION_STATUS_ACTIVE }; storeKey = "observations.list"; } else if (observation && localStorage.getItem(IDENTIFIER_OBSERVATION_EMBEDDED_LIST) === "true") { filter = { product: observation.product }; @@ -52,7 +56,7 @@ const ShowActions = () => { } else if (localStorage.getItem(IDENTIFIER_OBSERVATION_DASHBOARD_LIST) === "true") { filter = { age: "Past 7 days", - current_status: OBSERVATION_STATUS_OPEN, + current_status: OBSERVATION_STATUS_ACTIVE, }; storeKey = "observations.dashboard"; } else if (observation && localStorage.getItem(IDENTIFIER_OBSERVATION_REVIEW_LIST_PRODUCT) === "true") { @@ -61,6 +65,16 @@ const ShowActions = () => { } else if (localStorage.getItem(IDENTIFIER_OBSERVATION_REVIEW_LIST) === "true") { filter = { current_status: OBSERVATION_STATUS_IN_REVIEW }; storeKey = "observations.review"; + } else if (observation && localStorage.getItem(IDENTIFIER_OBSERVATION_COMPONENT_LIST) === "true") { + filter = { + product: observation.product, + branch: observation.branch, + origin_service: observation.origin_service, + origin_component_name_version: observation.origin_component_name_version, + origin_component_purl_type: observation.origin_component_purl_type, + current_status: OBSERVATION_STATUS_ACTIVE, + }; + storeKey = "observations.component"; } return ( @@ -76,24 +90,17 @@ const ShowActions = () => { storeKey={storeKey} /> )} - {observation && - observation.product_data.permissions && - observation.product_data.permissions.includes(PERMISSION_OBSERVATION_ASSESSMENT) && ( - - )} - {observation && - observation.product_data.permissions && - observation.product_data.permissions.includes(PERMISSION_OBSERVATION_ASSESSMENT) && - (observation.assessment_severity || observation.assessment_status) && ( + {observation?.product_data?.permissions?.includes(PERMISSION_OBSERVATION_ASSESSMENT) && ( + + )} + {observation?.product_data?.permissions?.includes(PERMISSION_OBSERVATION_ASSESSMENT) && + (observation?.assessment_severity || observation?.assessment_status) && ( )} - {observation && - observation.product_data.permissions && - observation.parser_data.type == "Manual" && - observation.product_data.permissions.includes(PERMISSION_OBSERVATION_EDIT) && } - {observation && - observation.assessment_needs_approval && - observation.product_data.permissions.includes(PERMISSION_OBSERVATION_LOG_APPROVAL) && ( + {observation?.parser_data?.type == "Manual" && + observation?.product_data?.permissions?.includes(PERMISSION_OBSERVATION_EDIT) && } + {observation?.assessment_needs_approval && + observation?.product_data?.permissions?.includes(PERMISSION_OBSERVATION_LOG_APPROVAL) && ( )} @@ -101,6 +108,8 @@ const ShowActions = () => { ); }; +const EmptyDatagridHeader = () => ; + const ObservationShowComponent = () => { return ( { Vulnerability
- {observation.vulnerability_id != "" && - get_vulnerability_url(observation.vulnerability_id) == null && ( - - - - )} - {observation.vulnerability_id != "" && - get_vulnerability_url(observation.vulnerability_id) != null && ( - - + + ( + + )} /> - )} + {observation.vulnerability_id_aliases && + observation.vulnerability_id_aliases.length > 0 && ( + + + + + ( + + )} + /> + + + + + )} + + )} {(observation.cvss3_score != null || observation.cvss3_vector != "" || observation.cvss4_score != null || @@ -155,9 +188,10 @@ const ObservationShowComponent = () => { {observation.cvss4_vector != "" && ( )} @@ -173,14 +207,23 @@ const ObservationShowComponent = () => { {observation.cvss3_vector != "" && ( )} )} + {observation.cve_found_in && observation.cve_found_in.length > 0 && ( + + + + )} )} {observation.cwe != null && ( @@ -189,6 +232,7 @@ const ObservationShowComponent = () => { label="CWE" text={observation.cwe} url={get_cwe_url(observation.cwe)} + new_tab={true} /> )} @@ -215,7 +259,7 @@ const ObservationShowComponent = () => { - {observation && observation.has_potential_duplicates && ( + {observation?.has_potential_duplicates && ( Potential Duplicates diff --git a/frontend/src/core/observations/ObservationShowAside.tsx b/frontend/src/core/observations/ObservationShowAside.tsx index 9e8ccfd8b..f42e83c94 100644 --- a/frontend/src/core/observations/ObservationShowAside.tsx +++ b/frontend/src/core/observations/ObservationShowAside.tsx @@ -16,7 +16,7 @@ import { Link } from "react-router-dom"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { is_superuser } from "../../commons/functions"; import { useLinkStyles } from "../../commons/layout/themes"; -import { getSettingTheme } from "../../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../../commons/user_settings/functions"; const ObservationsShowAside = () => { return ( @@ -58,7 +58,16 @@ const MetaData = () => { + )} + {observation.general_rule_rego != null && ( + @@ -67,7 +76,16 @@ const MetaData = () => { + )} + {observation.product_rule_rego != null && ( + @@ -91,6 +109,7 @@ const MetaData = () => { observation.issue_tracker_issue_id } url={observation.issue_tracker_issue_url} + new_tab={true} /> )} @@ -107,7 +126,7 @@ const MetaData = () => { const EmptyDatagridHeader = () => ; const References = () => { - const { classes } = useLinkStyles({ setting_theme: getSettingTheme() }); + const { classes } = useLinkStyles({ setting_theme: getResolvedSettingTheme() }); return ( { }; const Evidences = () => { - const { classes } = useLinkStyles({ setting_theme: getSettingTheme() }); + const { classes } = useLinkStyles({ setting_theme: getResolvedSettingTheme() }); return ( ( diff --git a/frontend/src/core/observations/ObservationShowDescriptionRecommendation.tsx b/frontend/src/core/observations/ObservationShowDescriptionRecommendation.tsx index b8587edc1..56ff764f8 100644 --- a/frontend/src/core/observations/ObservationShowDescriptionRecommendation.tsx +++ b/frontend/src/core/observations/ObservationShowDescriptionRecommendation.tsx @@ -1,5 +1,5 @@ import { Stack } from "@mui/material"; -import { Labeled, useRecordContext } from "react-admin"; +import { Labeled, NumberField, useRecordContext } from "react-admin"; import MarkdownField from "../../commons/custom_fields/MarkdownField"; @@ -12,11 +12,18 @@ const ObservationShowDescriptionRecommendation = () => { )} - {observation && observation.recommendation != "" && ( - - - - )} + + {observation && observation?.recommendation != "" && ( + + + + )} + {observation?.update_impact_score !== null && ( + + + + )} + ); }; diff --git a/frontend/src/core/observations/ObservationShowHeader.tsx b/frontend/src/core/observations/ObservationShowHeader.tsx index 2a6eacb93..2760a6281 100644 --- a/frontend/src/core/observations/ObservationShowHeader.tsx +++ b/frontend/src/core/observations/ObservationShowHeader.tsx @@ -1,4 +1,5 @@ import { Paper, Stack, Typography } from "@mui/material"; +import { Fragment } from "react"; import { ChipField, DateField, @@ -9,6 +10,7 @@ import { useRecordContext, } from "react-admin"; +import observations from "."; import { SeverityField } from "../../commons/custom_fields/SeverityField"; import { useStyles } from "../../commons/layout/themes"; import ObservationShowDescriptionRecommendation from "./ObservationShowDescriptionRecommendation"; @@ -32,8 +34,14 @@ const ObservationShowHeader = ({ observation }: ObservationShowHeaderProps) => { {observation && ( - - Observation + + {!in_observation_log && ( + + +   Observation + + )} + {in_observation_log && Observation} {in_observation_log && ( @@ -60,48 +68,89 @@ const ObservationShowHeader = ({ observation }: ObservationShowHeaderProps) => { - {observation.parser_severity != "" && - (observation.rule_severity != "" || observation.assessment_severity != "") && ( - - - - )} - {observation.rule_severity != "" && ( + {observation.assessment_severity != "" && ( - + )} - {observation.assessment_severity != "" && ( + {observation.rule_rego_severity != "" && ( - + )} + {observation.rule_severity != "" && ( + + + + )} + {observation.parser_severity != "" && + (observation.rule_rego_severity != "" || + observation.rule_severity != "" || + observation.assessment_severity != "") && ( + + + + )} + {observation.assessment_status != "" && ( + + + + )} + {observation.rule_rego_status != "" && ( + + + + )} + {observation.rule_status != "" && ( + + + + )} + {observation.vex_status != "" && ( + + + + )} {observation.parser_status != "" && - (observation.rule_status != "" || + (observation.rule_rego_status != "" || + observation.rule_status != "" || observation.assessment_status != "" || observation.vex_status != "") && ( )} - {observation.vex_status != "" && ( - - + + + {observation.current_priority && ( + + )} - {observation.rule_status != "" && ( + {observation.assessment_priority && ( - + )} - {observation.assessment_status != "" && ( + {observation.rule_rego_priority && ( - + + + )} + {observation.rule_priority && ( + + )} diff --git a/frontend/src/core/observations/ObservationShowOrigins.tsx b/frontend/src/core/observations/ObservationShowOrigins.tsx index cd751d3d8..5dce68b2f 100644 --- a/frontend/src/core/observations/ObservationShowOrigins.tsx +++ b/frontend/src/core/observations/ObservationShowOrigins.tsx @@ -57,37 +57,23 @@ const ObservationShowOrigins = ({ observation, showDependencies, elevated }: Obs )} - {observation.origin_component_purl != "" && - get_component_purl_url( - observation.origin_component_name, - observation.origin_component_version, - observation.origin_component_purl_type, - observation.origin_component_purl_namespace - ) == null && ( + {observation.origin_component_purl !== "" && + get_component_purl_url(observation.origin_component_purl) === null && ( )} - {observation.origin_component_purl != "" && - get_component_purl_url( - observation.origin_component_name, - observation.origin_component_version, - observation.origin_component_purl_type, - observation.origin_component_purl_namespace - ) != null && ( + {observation.origin_component_purl !== "" && + get_component_purl_url(observation.origin_component_purl) !== null && ( )} @@ -139,6 +125,7 @@ const ObservationShowOrigins = ({ observation, showDependencies, elevated }: Obs label="Endpoint URL" text={observation.origin_endpoint_url} url={observation.origin_endpoint_url} + new_tab={true} /> )} @@ -173,6 +160,7 @@ const ObservationShowOrigins = ({ observation, showDependencies, elevated }: Obs text={observation.origin_source_file} url={observation.origin_source_file_url} label="Source file" + new_tab={true} /> )} diff --git a/frontend/src/core/observations/PotentialDuplicatesList.tsx b/frontend/src/core/observations/PotentialDuplicatesList.tsx index aa03a3361..6db90e18f 100644 --- a/frontend/src/core/observations/PotentialDuplicatesList.tsx +++ b/frontend/src/core/observations/PotentialDuplicatesList.tsx @@ -6,16 +6,16 @@ import { ListContextProvider, ResourceContextProvider, TextField, + WithListContext, useListController, } from "react-admin"; import { PERMISSION_OBSERVATION_ASSESSMENT } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; -import { humanReadableDate } from "../../commons/functions"; +import { has_attribute, humanReadableDate } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; -import { OBSERVATION_STATUS_OPEN } from "../types"; -import { Observation } from "../types"; +import { OBSERVATION_STATUS_OPEN, Observation } from "../types"; import ObservationBulkDuplicatesButton from "./ObservationBulkDuplicatesButton"; const ShowObservations = (id: any, resource: any, record: any) => { @@ -28,13 +28,9 @@ type PotentialDuplicatesListProps = { const BulkActionButtons = (observation: any) => ( - {observation && - observation.observation && - observation.observation.product_data && - observation.observation.product_data.permissions && - observation.observation.product_data.permissions.includes(PERMISSION_OBSERVATION_ASSESSMENT) && ( - - )} + {observation?.observation?.product_data?.permissions?.includes(PERMISSION_OBSERVATION_ASSESSMENT) && ( + + )} ); @@ -55,67 +51,94 @@ const PotentialDuplicatesList = ({ observation }: PotentialDuplicatesListProps) return ( - } - resource="potential_duplicates" - > - - - - - {observation && observation.product_data.has_component && ( - + ( + } + resource="potential_duplicates" + > + + + + {has_attribute("potential_duplicate_observation.origin_service_name", data, sort) && ( + + )} + {has_attribute( + "potential_duplicate_observation.origin_component_name_version", + data, + sort + ) && ( + + )} + {has_attribute( + "potential_duplicate_observation.origin_docker_image_name_tag_short", + data, + sort + ) && ( + + )} + {has_attribute("potential_duplicate_observation.origin_endpoint_hostname", data, sort) && ( + + )} + {has_attribute("potential_duplicate_observation.origin_source_file", data, sort) && ( + + )} + {has_attribute( + "potential_duplicate_observation.origin_cloud_qualified_resource", + data, + sort + ) && ( + + )} + {has_attribute( + "potential_duplicate_observation.origin_kubernetes_qualified_resource", + data, + sort + ) && ( + + )} + + + label="Age" + sortBy="potential_duplicate_observation.last_observation_log" + render={(record) => + record + ? humanReadableDate(record.potential_duplicate_observation.last_observation_log) + : "" + } + /> + )} - {observation && observation.product_data.has_docker_image && ( - - )} - {observation && observation.product_data.has_endpoint && ( - - )} - {observation && observation.product_data.has_source && ( - - )} - {observation && observation.product_data.has_cloud_resource && ( - - )} - {observation && observation.product_data.has_kubernetes_resource && ( - - )} - - - label="Age" - sortBy="potential_duplicate_observation.last_observation_log" - render={(record) => - record ? humanReadableDate(record.potential_duplicate_observation.last_observation_log) : "" - } - /> - + /> diff --git a/frontend/src/core/observations/functions.ts b/frontend/src/core/observations/functions.ts index 7272443b1..ed0c9215b 100644 --- a/frontend/src/core/observations/functions.ts +++ b/frontend/src/core/observations/functions.ts @@ -3,6 +3,7 @@ export const IDENTIFIER_OBSERVATION_EMBEDDED_LIST = "observationembeddedlist"; export const IDENTIFIER_OBSERVATION_DASHBOARD_LIST = "observationdashboardlist"; export const IDENTIFIER_OBSERVATION_REVIEW_LIST = "observationreviewlist"; export const IDENTIFIER_OBSERVATION_REVIEW_LIST_PRODUCT = "observationreviewlistproduct"; +export const IDENTIFIER_OBSERVATION_COMPONENT_LIST = "observationcomponentlist"; export function setListIdentifier(identifier: string): void { localStorage.removeItem(IDENTIFIER_OBSERVATION_LIST); @@ -10,6 +11,7 @@ export function setListIdentifier(identifier: string): void { localStorage.removeItem(IDENTIFIER_OBSERVATION_DASHBOARD_LIST); localStorage.removeItem(IDENTIFIER_OBSERVATION_REVIEW_LIST); localStorage.removeItem(IDENTIFIER_OBSERVATION_REVIEW_LIST_PRODUCT); + localStorage.removeItem(IDENTIFIER_OBSERVATION_COMPONENT_LIST); localStorage.setItem(identifier, "true"); } diff --git a/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberAdd.tsx b/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberAdd.tsx index 9b3de905e..1c64b829f 100644 --- a/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberAdd.tsx +++ b/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberAdd.tsx @@ -30,7 +30,7 @@ const ProductAuthorizationGroupMemberAdd = ({ id }: ProductAuthorizationGroupMem setOpen(false); }; - const [authorization_group, setAuthorizationGroup] = useState(); + const [authorizationGroup, setAuthorizationGroup] = useState(); const [role, setRole] = useState(); const resetState = () => { setAuthorizationGroup(undefined); @@ -43,7 +43,7 @@ const ProductAuthorizationGroupMemberAdd = ({ id }: ProductAuthorizationGroupMem const handleSaveContinue = (e: any) => { e.preventDefault(); // necessary to prevent default SaveButton submit logic const data = { - authorization_group: authorization_group, + authorization_group: authorizationGroup, role: role, }; add_product_authorization_group_member(data, false); @@ -52,7 +52,7 @@ const ProductAuthorizationGroupMemberAdd = ({ id }: ProductAuthorizationGroupMem const handleSaveClose = (e: any) => { e.preventDefault(); // necessary to prevent default SaveButton submit logic const data = { - authorization_group: authorization_group, + authorization_group: authorizationGroup, role: role, }; add_product_authorization_group_member(data, true); diff --git a/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEdit.tsx b/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEdit.tsx index 1459d461a..baddb7c85 100644 --- a/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEdit.tsx +++ b/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEdit.tsx @@ -1,11 +1,10 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; import { ROLE_CHOICES } from "../../access_control/types"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_required } from "../../commons/custom_validators"; import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; @@ -49,20 +48,16 @@ const ProductAuthorizationGroupMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - return ( Edit authorization group member - }> + } + > diff --git a/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEmbeddedList.tsx b/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEmbeddedList.tsx index 7b92d88aa..ddfa88966 100644 --- a/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEmbeddedList.tsx +++ b/frontend/src/core/product_authorization_group_members/ProductAuthorizationGroupMemberEmbeddedList.tsx @@ -1,7 +1,6 @@ import { Stack } from "@mui/material"; import { Datagrid, - Identifier, ListContextProvider, ResourceContextProvider, SelectField, @@ -14,8 +13,8 @@ import { PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_EDIT, ROLE_CHOICES, } from "../../access_control/types"; +import { AuthorizationGroupNameURLField } from "../../commons/custom_fields/AuthorizationGroupNameURLField"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { getSettingListSize } from "../../commons/user_settings/functions"; import ProductAuthorizationGroupMemberDelete from "./ProductAuthorizationGroupMemberDelete"; import ProductAuthorizationGroupMemberEdit from "./ProductAuthorizationGroupMemberEdit"; @@ -24,10 +23,6 @@ type ProductAuthorizationGroupMemberEmbeddedListProps = { product: any; }; -const showAuthorizationGroup = (id: Identifier) => { - return "#/authorization_groups/" + id + "/show"; -}; - const ProductAuthorizationGroupMemberEmbeddedList = ({ product }: ProductAuthorizationGroupMemberEmbeddedListProps) => { const listContext = useListController({ filter: { product: Number(product.id) }, @@ -52,34 +47,24 @@ const ProductAuthorizationGroupMemberEmbeddedList = ({ product }: ProductAuthori rowClick={false} resource="product_authorization_group_members" > - ( - - )} /> ( - {product && - product.permissions.includes( - PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_EDIT - ) && } - {product && - product.permissions.includes( - PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_DELETE - ) && ( - - )} + {product?.permissions.includes( + PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_EDIT + ) && } + {product?.permissions.includes( + PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_DELETE + ) && ( + + )} )} /> diff --git a/frontend/src/core/product_groups/ProductGroupCreate.tsx b/frontend/src/core/product_groups/ProductGroupCreate.tsx index 185d1c66a..a27a4627f 100644 --- a/frontend/src/core/product_groups/ProductGroupCreate.tsx +++ b/frontend/src/core/product_groups/ProductGroupCreate.tsx @@ -1,301 +1,20 @@ -import { Divider, Stack, Typography } from "@mui/material"; -import { RichTextInput } from "ra-input-rich-text"; -import { Fragment } from "react"; -import { - BooleanInput, - Create, - FormDataConsumer, - NullableBooleanInput, - NumberInput, - ReferenceInput, - SimpleForm, -} from "react-admin"; +import { useState } from "react"; +import { Create, SimpleForm } from "react-admin"; -import { validate_0_999999, validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; -import { feature_license_management } from "../../commons/functions"; -import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; +import { transform_product_group_and_product } from "../functions"; +import { ProductGroupCreateEditComponent } from "./functions"; const ProductGroupCreate = () => { + const [description, setDescription] = useState(""); + const transform = (data: any) => { - if (!data.description) { - data.description = ""; - } - if (data.repository_branch_housekeeping_active) { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = 1; - } - } else { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = null; - } - } - if (!data.repository_branch_housekeeping_exempt_branches) { - data.repository_branch_housekeeping_exempt_branches = ""; - } - if (!data.notification_email_to) { - data.notification_email_to = ""; - } - if (!data.notification_ms_teams_webhook) { - data.notification_ms_teams_webhook = ""; - } - if (!data.notification_slack_webhook) { - data.notification_slack_webhook = ""; - } - if (data.security_gate_active) { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = 0; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = 0; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = 0; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = 0; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = 0; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = 0; - } - } else { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = null; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = null; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = null; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = null; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = null; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = null; - } - } - if (data.risk_acceptance_expiry_active) { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = 30; - } - } else { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = null; - } - } - return data; + return transform_product_group_and_product(data, description); }; return ( - - Product Group - - - - - - - - Housekeeping (for products) - - - - {({ formData }) => - formData.repository_branch_housekeeping_active && ( - - - - - ) - } - - - - - - Notifications (for products) - - - - - - - - - - - Security Gate (for products) - - - - {({ formData }) => - formData.security_gate_active && ( - - - - - - - - - ) - } - - - - - Review - - - - - - - - - Risk acceptance expiry - - - - {({ formData }) => - formData.risk_acceptance_expiry_active && ( - - - - ) - } - - - {feature_license_management() && ( - - - - License management - - - - - - )} + {" "} ); diff --git a/frontend/src/core/product_groups/ProductGroupEdit.tsx b/frontend/src/core/product_groups/ProductGroupEdit.tsx index 07005b5bb..2f36e7e64 100644 --- a/frontend/src/core/product_groups/ProductGroupEdit.tsx +++ b/frontend/src/core/product_groups/ProductGroupEdit.tsx @@ -1,311 +1,39 @@ -import { Divider, Stack, Typography } from "@mui/material"; -import { RichTextInput } from "ra-input-rich-text"; -import { Fragment } from "react"; -import { - BooleanInput, - DeleteButton, - Edit, - FormDataConsumer, - NullableBooleanInput, - NumberInput, - ReferenceInput, - SaveButton, - SimpleForm, - Toolbar, - useRecordContext, -} from "react-admin"; +import { useState } from "react"; +import { DeleteButton, Edit, SaveButton, SimpleForm, Toolbar, WithRecord, useRecordContext } from "react-admin"; import { PERMISSION_PRODUCT_DELETE } from "../../access_control/types"; -import { validate_0_999999, validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; -import { feature_license_management } from "../../commons/functions"; -import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; +import { transform_product_group_and_product } from "../functions"; +import { ProductGroupCreateEditComponent } from "./functions"; const CustomToolbar = () => { const product = useRecordContext(); return ( - - {product && product.permissions.includes(PERMISSION_PRODUCT_DELETE) && ( - - )} + + {product?.permissions.includes(PERMISSION_PRODUCT_DELETE) && } ); }; const ProductGroupEdit = () => { + const [description, setDescription] = useState(""); + const transform = (data: any) => { - if (!data.description) { - data.description = ""; - } - if (data.repository_branch_housekeeping_active) { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = 1; - } - } else { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = null; - } - } - if (!data.repository_branch_housekeeping_exempt_branches) { - data.repository_branch_housekeeping_exempt_branches = ""; - } - if (!data.notification_email_to) { - data.notification_email_to = ""; - } - if (!data.notification_ms_teams_webhook) { - data.notification_ms_teams_webhook = ""; - } - if (!data.notification_slack_webhook) { - data.notification_slack_webhook = ""; - } - if (data.security_gate_active) { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = 0; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = 0; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = 0; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = 0; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = 0; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = 0; - } - } else { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = null; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = null; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = null; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = null; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = null; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = null; - } - } - if (data.risk_acceptance_expiry_active) { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = 30; - } - } else { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = null; - } - } - return data; + return transform_product_group_and_product(data, description); }; return ( }> - - Product Group - - - - - - Housekeeping (for products) - - - - {({ formData }) => - formData.repository_branch_housekeeping_active && ( - - - - - ) - } - - - - Notifications (for products) - - - - - - - - - Security Gate (for products) - - - - {({ formData }) => - formData.security_gate_active && ( - - - - - - - - - ) - } - - - - Review - - - - ( + + )} /> - - - - - Risk acceptance expiry - - - - {({ formData }) => - formData.risk_acceptance_expiry_active && ( - - - - ) - } - - - {feature_license_management() && ( - - - - License management - - - - - - )} ); diff --git a/frontend/src/core/product_groups/ProductGroupEmbeddedList.tsx b/frontend/src/core/product_groups/ProductGroupEmbeddedList.tsx index a00a75460..957ba85af 100644 --- a/frontend/src/core/product_groups/ProductGroupEmbeddedList.tsx +++ b/frontend/src/core/product_groups/ProductGroupEmbeddedList.tsx @@ -60,8 +60,10 @@ const ProductGroupEmbeddedList = ({ license_policy }: ProductGroupEmbeddedListPr > - - {feature_license_management() && } + + {feature_license_management() && ( + + )} diff --git a/frontend/src/core/product_groups/ProductGroupHeader.tsx b/frontend/src/core/product_groups/ProductGroupHeader.tsx index 7308a9991..9564d6a64 100644 --- a/frontend/src/core/product_groups/ProductGroupHeader.tsx +++ b/frontend/src/core/product_groups/ProductGroupHeader.tsx @@ -2,8 +2,10 @@ import { Box, Paper, Stack, Typography } from "@mui/material"; import { Labeled, RecordContextProvider, TextField, useGetOne } from "react-admin"; import { useParams } from "react-router-dom"; +import product_groups from "."; import LicensesCountField from "../../commons/custom_fields/LicensesCountField"; import ObservationsCountField from "../../commons/custom_fields/ObservationsCountField"; +import { feature_license_management } from "../../commons/functions"; import { useStyles } from "../../commons/layout/themes"; import { ProductGroup } from "../types"; @@ -20,8 +22,9 @@ const ProductGroupHeader = () => { marginTop: 2, }} > - - Product Group + + +   Product Group { - + - {product_group && + {feature_license_management() && + product_group && product_group.forbidden_licenses_count + product_group.review_required_licenses_count + product_group.unknown_licenses_count + @@ -45,7 +49,7 @@ const ProductGroupHeader = () => { product_group.ignored_licenses_count > 0 && ( - + )} diff --git a/frontend/src/core/product_groups/ProductGroupList.tsx b/frontend/src/core/product_groups/ProductGroupList.tsx index 7fba47074..ce1d4638d 100644 --- a/frontend/src/core/product_groups/ProductGroupList.tsx +++ b/frontend/src/core/product_groups/ProductGroupList.tsx @@ -36,9 +36,11 @@ const ProductGroupList = () => { > - - - {feature_license_management() && } + + + {feature_license_management() && ( + + )} diff --git a/frontend/src/core/product_groups/ProductGroupShow.tsx b/frontend/src/core/product_groups/ProductGroupShow.tsx index 2578a85a6..602cdcb3a 100644 --- a/frontend/src/core/product_groups/ProductGroupShow.tsx +++ b/frontend/src/core/product_groups/ProductGroupShow.tsx @@ -12,7 +12,6 @@ import { NumberField, PrevNextButtons, ReferenceField, - RichTextField, Show, Tab, TabbedShowLayout, @@ -23,8 +22,8 @@ import { useRecordContext, } from "react-admin"; -import CreateProductApiToken from "../../access_control/product_api_token/ProductApiTokenCreate"; -import ProductApiTokenEmbeddedList from "../../access_control/product_api_token/ProductApiTokenEmbeddedList"; +import ApiTokenCreate from "../../access_control/api_tokens/ApiTokenCreate"; +import ApiTokenEmbeddedList from "../../access_control/api_tokens/ApiTokenEmbeddedList"; import { PERMISSION_PRODUCT_API_TOKEN_CREATE, PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_CREATE, @@ -33,6 +32,8 @@ import { PERMISSION_PRODUCT_RULE_APPLY, PERMISSION_PRODUCT_RULE_CREATE, } from "../../access_control/types"; +import MarkdownField from "../../commons/custom_fields/MarkdownField"; +import { feature_email } from "../../commons/functions"; import MetricsHeader from "../../metrics/MetricsHeader"; import MetricsSeveritiesCurrent from "../../metrics/MetricsSeveritiesCurrent"; import MetricsSeveritiesTimeline from "../../metrics/MetricsSeveritiesTimeLine"; @@ -47,6 +48,7 @@ import ProductMemberAdd from "../product_members/ProductMemberAdd"; import ProductMemberEmbeddedList from "../product_members/ProductMemberEmbeddedList"; import product from "../products"; import ExportMenu from "../products/ExportMenu"; +import ProductCreateDialog from "../products/ProductCreateDialog"; import ProductEmbeddedList from "../products/ProductEmbeddedList"; import ProductGroupHeader from "./ProductGroupHeader"; import ProductGroupReviews from "./ProductGroupReviews"; @@ -63,7 +65,7 @@ const ShowActions = () => { queryOptions={{ meta: { api_resource: "product_group_names" } }} /> - {product_group && product_group.permissions.includes(PERMISSION_PRODUCT_GROUP_EDIT) && } + {product_group?.permissions.includes(PERMISSION_PRODUCT_GROUP_EDIT) && } ); @@ -78,6 +80,7 @@ const ProductGroupShow = () => { render={(product_group) => ( }> }> + }> @@ -117,7 +120,7 @@ const ProductGroupShow = () => { {product_group.description && ( - + )} @@ -125,38 +128,42 @@ const ProductGroupShow = () => { - Housekeeping (for products) + Housekeeping - - - - {product_group.repository_branch_housekeeping_active == true && ( - - - - - - - - - )} + + + + + {product_group.repository_branch_housekeeping_active && + product_group.repository_branch_housekeeping_keep_inactive_days && ( + + + + )} + {product_group.repository_branch_housekeeping_active && + product_group.repository_branch_housekeeping_exempt_branches && ( + + + + )} + )} - {(product_group.notification_email_to || + {((feature_email() && product_group.notification_email_to) || product_group.notification_ms_teams_webhook || product_group.notification_slack_webhook) && ( - Notifications (for products) + Notifications - {product_group.notification_email_to && ( + {feature_email() && product_group.notification_email_to && ( @@ -178,7 +185,7 @@ const ProductGroupShow = () => { - Security Gate (for products) + Security Gate { valueLabelTrue="Product group specific" /> - {product_group.security_gate_active == true && ( + {product_group.security_gate_active && ( @@ -212,21 +219,34 @@ const ProductGroupShow = () => { )} - - - Review - - - - - - - - - - - - + {(product_group.assessments_need_approval || + product_group.product_rules_need_approval || + product_group.new_observations_in_review) && ( + + + + Review + + + {product_group.assessments_need_approval && ( + + + + )} + {product_group.product_rules_need_approval && ( + + + + )} + {product_group.new_observations_in_review && ( + + + + )} + + + )} + {product_group.risk_acceptance_expiry_active != null && ( @@ -241,7 +261,7 @@ const ProductGroupShow = () => { valueLabelTrue="Product group specific" /> - {product_group.risk_acceptance_expiry_active == true && ( + {product_group.risk_acceptance_expiry_active && ( @@ -250,6 +270,7 @@ const ProductGroupShow = () => { )} )} + {product_group.license_policy && ( @@ -277,14 +298,12 @@ const ProductGroupShow = () => { alignItems: "center", }} > - {product_group && - product_group.permissions.includes(PERMISSION_PRODUCT_RULE_CREATE) && ( - - )} - {product_group && - product_group.permissions.includes(PERMISSION_PRODUCT_RULE_APPLY) && ( - - )} + {product_group?.permissions.includes(PERMISSION_PRODUCT_RULE_CREATE) && ( + + )} + {product_group?.permissions.includes(PERMISSION_PRODUCT_RULE_APPLY) && ( + + )} @@ -292,28 +311,25 @@ const ProductGroupShow = () => { User members - {product_group && - product_group.permissions.includes(PERMISSION_PRODUCT_MEMBER_CREATE) && ( - - )} + {product_group?.permissions.includes(PERMISSION_PRODUCT_MEMBER_CREATE) && ( + + )} Authorization group members - {product_group && - product_group.permissions.includes( - PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_CREATE - ) && } + {product_group?.permissions.includes( + PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_CREATE + ) && } }> - {product_group && - product_group.permissions.includes(PERMISSION_PRODUCT_API_TOKEN_CREATE) && ( - - )} - + {product_group?.permissions.includes(PERMISSION_PRODUCT_API_TOKEN_CREATE) && ( + + )} + )} diff --git a/frontend/src/core/product_groups/functions.tsx b/frontend/src/core/product_groups/functions.tsx new file mode 100644 index 000000000..07c14323d --- /dev/null +++ b/frontend/src/core/product_groups/functions.tsx @@ -0,0 +1,228 @@ +import { Divider, Stack, Typography } from "@mui/material"; +import { Fragment } from "react"; +import { BooleanInput, FormDataConsumer, NullableBooleanInput, NumberInput, ReferenceInput } from "react-admin"; + +import product_groups from "."; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; +import { validate_0_999999, validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; +import { feature_email, feature_license_management } from "../../commons/functions"; +import { AutocompleteInputWide, TextInputExtraWide, TextInputWide } from "../../commons/layout/themes"; + +export type ProductGroupCreateEditComponentProps = { + initialDescription: string; + setDescription: (value: string) => void; +}; + +export const ProductGroupCreateEditComponent = ({ + initialDescription, + setDescription, +}: ProductGroupCreateEditComponentProps) => { + return ( + + + +   Product Group + + + + + + + + Housekeeping + + + + {({ formData }) => + formData.repository_branch_housekeeping_active && ( + + + + + ) + } + + + + + + Notifications + + + {feature_email() && ( + + )} + + + + + + + + Security Gate + + + + {({ formData }) => + formData.security_gate_active && ( + + + + + + + + + ) + } + + + + + Review + + + + + + + + + Risk acceptance expiry + + + + {({ formData }) => + formData.risk_acceptance_expiry_active && ( + + + + ) + } + + + {feature_license_management() && ( + + + + License management + + + + + + )} + + ); +}; diff --git a/frontend/src/core/product_members/ProductMemberEdit.tsx b/frontend/src/core/product_members/ProductMemberEdit.tsx index 7c12e1a25..c701dad0e 100644 --- a/frontend/src/core/product_members/ProductMemberEdit.tsx +++ b/frontend/src/core/product_members/ProductMemberEdit.tsx @@ -1,11 +1,10 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; import { ROLE_CHOICES } from "../../access_control/types"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_required } from "../../commons/custom_validators"; import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; @@ -50,20 +49,13 @@ const ProductMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - return ( Edit user member - }> + }> diff --git a/frontend/src/core/product_members/ProductMemberEmbeddedList.tsx b/frontend/src/core/product_members/ProductMemberEmbeddedList.tsx index cfffad008..635477c11 100644 --- a/frontend/src/core/product_members/ProductMemberEmbeddedList.tsx +++ b/frontend/src/core/product_members/ProductMemberEmbeddedList.tsx @@ -1,7 +1,6 @@ import { Stack } from "@mui/material"; import { Datagrid, - Identifier, ListContextProvider, ResourceContextProvider, SelectField, @@ -15,7 +14,7 @@ import { ROLE_CHOICES, } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { UserFullNameURLField } from "../../commons/custom_fields/UserFullNameURLField"; import { getSettingListSize } from "../../commons/user_settings/functions"; import ProductMemberDelete from "./ProductMemberDelete"; import ProductMemberEdit from "./ProductMemberEdit"; @@ -24,10 +23,6 @@ type ProductMemberEmbeddedListProps = { product: any; }; -const showUser = (id: Identifier) => { - return "#/users/" + id + "/show"; -}; - const ProductMemberEmbeddedList = ({ product }: ProductMemberEmbeddedListProps) => { const listContext = useListController({ filter: { product: Number(product.id) }, @@ -52,24 +47,15 @@ const ProductMemberEmbeddedList = ({ product }: ProductMemberEmbeddedListProps) rowClick={false} resource="product_members" > - ( - - )} - /> + ( - {product && product.permissions.includes(PERMISSION_PRODUCT_MEMBER_EDIT) && ( + {product?.permissions.includes(PERMISSION_PRODUCT_MEMBER_EDIT) && ( )} - {product && product.permissions.includes(PERMISSION_PRODUCT_MEMBER_DELETE) && ( + {product?.permissions.includes(PERMISSION_PRODUCT_MEMBER_DELETE) && ( )} diff --git a/frontend/src/core/products/ExportMenu.tsx b/frontend/src/core/products/ExportMenu.tsx index aee2068d9..922d60422 100644 --- a/frontend/src/core/products/ExportMenu.tsx +++ b/frontend/src/core/products/ExportMenu.tsx @@ -1,6 +1,7 @@ import { faFileCsv, faFileExcel } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import DownloadIcon from "@mui/icons-material/Download"; +import SyncIcon from "@mui/icons-material/Sync"; import ViewQuiltIcon from "@mui/icons-material/ViewQuilt"; import { ListItemIcon } from "@mui/material"; import Button from "@mui/material/Button"; @@ -11,6 +12,7 @@ import { useNotify } from "react-admin"; import axios_instance from "../../access_control/auth_provider/axios_instance"; import { feature_license_management, getIconAndFontColor } from "../../commons/functions"; +import { httpClient } from "../../commons/ra-data-django-rest-framework"; interface ExportMenuProps { product: any; @@ -97,7 +99,9 @@ const ExportMenu = (props: ExportMenuProps) => { const exportOpenObservationsExcel = async () => { exportDataExcel( - "/products/" + props.product.id + "/export_observations_excel/?status=Open", + "/products/" + + props.product.id + + "/export_observations_excel/?status=Open&status=Affected&status=In%20review", "open_observations.xlsx", "Observations" ); @@ -113,7 +117,9 @@ const ExportMenu = (props: ExportMenuProps) => { const exportOpenObservationsCsv = async () => { exportDataCsv( - "/products/" + props.product.id + "/export_observations_csv/?status=Open", + "/products/" + + props.product.id + + "/export_observations_csv/?status=Open&status=Affected&status=In%20review", "open_observations.csv", "Observations" ); @@ -147,6 +153,19 @@ const ExportMenu = (props: ExportMenuProps) => { ); }; + const synchronizeIssues = async () => { + httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/products/" + props.product.id + "/synchronize_issues/", { + method: "POST", + }) + .then(() => { + notify("Synchronization of issues started in background", { type: "success" }); + }) + .catch((error) => { + notify(error.message, { type: "warning" }); + }); + handleClose(); + }; + const showLicenseExport = (): boolean => { return ( feature_license_management() && @@ -187,13 +206,13 @@ const ExportMenu = (props: ExportMenuProps) => { - Open observations / Excel + Active observations / Excel - Open observations / CSV + Active observations / CSV @@ -223,7 +242,10 @@ const ExportMenu = (props: ExportMenuProps) => { Metrics / CSV {!props.is_product_group && ( - + @@ -239,13 +261,21 @@ const ExportMenu = (props: ExportMenuProps) => { )} {showLicenseExport() && ( - + Licenses / CSV )} + {!props.is_product_group && props.product?.issue_tracker_active && ( + + + + + Synchronize issues + + )} ); diff --git a/frontend/src/core/products/ProductCreate.tsx b/frontend/src/core/products/ProductCreate.tsx index 2917b4c76..d4fbe1ced 100644 --- a/frontend/src/core/products/ProductCreate.tsx +++ b/frontend/src/core/products/ProductCreate.tsx @@ -1,429 +1,19 @@ -import { Divider, Stack, Typography } from "@mui/material"; -import { RichTextInput } from "ra-input-rich-text"; -import { Fragment } from "react"; -import { - BooleanInput, - Create, - FormDataConsumer, - NullableBooleanInput, - NumberInput, - ReferenceInput, - SimpleForm, -} from "react-admin"; +import { useState } from "react"; +import { Create, SimpleForm } from "react-admin"; -import { validate_0_999999, validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; -import { feature_license_management } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; -import { ISSUE_TRACKER_TYPE_CHOICES, OBSERVATION_SEVERITY_CHOICES } from "../types"; +import { ProductCreateEditComponent, transform } from "./functions"; const ProductCreate = () => { - const transform = (data: any) => { - if (!data.description) { - data.description = ""; - } - if (!data.purl) { - data.purl = ""; - } - if (!data.cpe23) { - data.cpe23 = ""; - } - if (!data.repository_prefix) { - data.repository_prefix = ""; - } - if (data.repository_branch_housekeeping_active) { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = 1; - } - } else { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = null; - } - } - if (!data.repository_branch_housekeeping_exempt_branches) { - data.repository_branch_housekeeping_exempt_branches = ""; - } - if (!data.notification_email_to) { - data.notification_email_to = ""; - } - if (!data.notification_ms_teams_webhook) { - data.notification_ms_teams_webhook = ""; - } - if (!data.notification_slack_webhook) { - data.notification_slack_webhook = ""; - } - if (data.security_gate_active) { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = 0; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = 0; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = 0; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = 0; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = 0; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = 0; - } - } else { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = null; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = null; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = null; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = null; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = null; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = null; - } - } - if (!data.issue_tracker_type) { - data.issue_tracker_type = ""; - } - if (!data.issue_tracker_base_url) { - data.issue_tracker_base_url = ""; - } - if (!data.issue_tracker_api_key) { - data.issue_tracker_api_key = ""; - } - if (!data.issue_tracker_project_id) { - data.issue_tracker_project_id = ""; - } - if (!data.issue_tracker_labels) { - data.issue_tracker_labels = ""; - } - if (!data.issue_tracker_username) { - data.issue_tracker_username = ""; - } - if (!data.issue_tracker_issue_type) { - data.issue_tracker_issue_type = ""; - } - if (!data.issue_tracker_status_closed) { - data.issue_tracker_status_closed = ""; - } - if (!data.issue_tracker_minimum_severity) { - data.issue_tracker_minimum_severity = ""; - } - if (data.risk_acceptance_expiry_active) { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = 30; - } - } else { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = null; - } - } - return data; + const [description, setDescription] = useState(""); + + const create_transform = (data: any) => { + return transform(data, description); }; return ( - + - - Product - - - - - - - - - - - - - Rules - - - - - - - Source code repository - - - - - - - {({ formData }) => - formData.repository_branch_housekeeping_active && ( - - - - - ) - } - - - - - Notifications - - - - - - - - - - Security Gate - - - - {({ formData }) => - formData.security_gate_active && ( - - - - - - - - - ) - } - - - - - Issue Tracker - - - - - {({ formData }) => - formData.issue_tracker_type && ( - - - - - - - - {({ formData }) => - formData.issue_tracker_type == "Jira" && ( - - - - - - ) - } - - - ) - } - - - - - Review - - - - - - - - Risk acceptance expiry - - - - {({ formData }) => - formData.risk_acceptance_expiry_active && ( - - - - ) - } - - - {feature_license_management() && ( - - - - License management - - - - - - )} + ); diff --git a/frontend/src/core/products/ProductCreateDialog.tsx b/frontend/src/core/products/ProductCreateDialog.tsx new file mode 100644 index 000000000..8fa11454d --- /dev/null +++ b/frontend/src/core/products/ProductCreateDialog.tsx @@ -0,0 +1,68 @@ +import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useRef, useState } from "react"; +import { CreateBase, SimpleForm, useCreate, useNotify, useRefresh } from "react-admin"; + +import AddButton from "../../commons/custom_fields/AddButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { ProductCreateEditComponent } from "../products/functions"; + +export type ProductCreateDialogProps = { + productGroupId: any; +}; + +const ProductCreateDialog = ({ productGroupId }: ProductCreateDialogProps) => { + const dialogRef = useRef(null); + const [description, setDescription] = useState(""); + const [open, setOpen] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const [create] = useCreate(); + const handleOpen = () => setOpen(true); + const handleCancel = () => setOpen(false); + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + }; + + const createProduct = (data: any) => { + data.product_group = productGroupId; + data.description = description; + + create( + "products", + { data: data }, + { + onSuccess: () => { + refresh(); + notify("Product added", { type: "success" }); + setOpen(false); + }, + onError: (error: any) => { + notify(error.message, { type: "warning" }); + }, + } + ); + }; + + return ( + + + + Add product + + + }> + + + + + + + ); +}; + +export default ProductCreateDialog; diff --git a/frontend/src/core/products/ProductEdit.tsx b/frontend/src/core/products/ProductEdit.tsx index 74d36a937..73715182f 100644 --- a/frontend/src/core/products/ProductEdit.tsx +++ b/frontend/src/core/products/ProductEdit.tsx @@ -1,454 +1,38 @@ -import { Divider, Stack, Typography } from "@mui/material"; -import { RichTextInput } from "ra-input-rich-text"; -import { Fragment } from "react"; -import { - BooleanInput, - DeleteButton, - Edit, - FormDataConsumer, - NullableBooleanInput, - NumberInput, - ReferenceInput, - SaveButton, - SimpleForm, - Toolbar, - WithRecord, - useRecordContext, -} from "react-admin"; +import { useState } from "react"; +import { DeleteButton, Edit, SaveButton, SimpleForm, Toolbar, WithRecord, useRecordContext } from "react-admin"; import { PERMISSION_PRODUCT_DELETE } from "../../access_control/types"; -import { validate_0_999999, validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; -import { feature_license_management } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; -import { ISSUE_TRACKER_TYPE_CHOICES, OBSERVATION_SEVERITY_CHOICES } from "../types"; +import { ProductCreateEditComponent, transform } from "./functions"; const CustomToolbar = () => { const product = useRecordContext(); return ( - - {product && product.permissions.includes(PERMISSION_PRODUCT_DELETE) && ( - - )} + + {product?.permissions.includes(PERMISSION_PRODUCT_DELETE) && } ); }; const ProductEdit = () => { - const transform = (data: any) => { - if (!data.description) { - data.description = ""; - } - if (!data.purl) { - data.purl = ""; - } - if (!data.cpe23) { - data.cpe23 = ""; - } - if (!data.repository_prefix) { - data.repository_prefix = ""; - } - if (data.repository_branch_housekeeping_active) { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = 1; - } - } else { - if (data.repository_branch_housekeeping_keep_inactive_days == "") { - data.repository_branch_housekeeping_keep_inactive_days = null; - } - } - if (!data.repository_branch_housekeeping_exempt_branches) { - data.repository_branch_housekeeping_exempt_branches = ""; - } - if (!data.notification_email_to) { - data.notification_email_to = ""; - } - if (!data.notification_ms_teams_webhook) { - data.notification_ms_teams_webhook = ""; - } - if (!data.notification_slack_webhook) { - data.notification_slack_webhook = ""; - } - if (data.security_gate_active) { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = 0; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = 0; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = 0; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = 0; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = 0; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = 0; - } - } else { - if (data.security_gate_threshold_critical == "") { - data.security_gate_threshold_critical = null; - } - if (data.security_gate_threshold_high == "") { - data.security_gate_threshold_high = null; - } - if (data.security_gate_threshold_medium == "") { - data.security_gate_threshold_medium = null; - } - if (data.security_gate_threshold_low == "") { - data.security_gate_threshold_low = null; - } - if (data.security_gate_threshold_none == "") { - data.security_gate_threshold_none = null; - } - if (data.security_gate_threshold_unknown == "") { - data.security_gate_threshold_unknown = null; - } - } - if (!data.issue_tracker_type) { - data.issue_tracker_type = ""; - } - if (!data.issue_tracker_base_url) { - data.issue_tracker_base_url = ""; - } - if (!data.issue_tracker_api_key) { - data.issue_tracker_api_key = ""; - } - if (!data.issue_tracker_project_id) { - data.issue_tracker_project_id = ""; - } - if (!data.issue_tracker_labels) { - data.issue_tracker_labels = ""; - } - if (!data.issue_tracker_username) { - data.issue_tracker_username = ""; - } - if (!data.issue_tracker_issue_type) { - data.issue_tracker_issue_type = ""; - } - if (!data.issue_tracker_status_closed) { - data.issue_tracker_status_closed = ""; - } - if (!data.issue_tracker_minimum_severity) { - data.issue_tracker_minimum_severity = ""; - } - if (data.risk_acceptance_expiry_active) { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = 30; - } - } else { - if (data.risk_acceptance_expiry_days == "") { - data.risk_acceptance_expiry_days = null; - } - } - return data; + const [description, setDescription] = useState(""); + + const edit_transform = (data: any) => { + return transform(data, description); }; return ( - + }> - - Product - - - - - - - - - - - - - Rules - - - - - - - Source code repository and housekeeping - - ( - - - + )} /> - - - {({ formData }) => - formData.repository_branch_housekeeping_active && ( - - - - - ) - } - - - - - - Notifications - - - - - - - - - - - Security Gate - - - - {({ formData }) => - formData.security_gate_active && ( - - - - - - - - - ) - } - - - - - - Issue Tracker - - - - - {({ formData }) => - formData.issue_tracker_type && ( - - - - - - - - {({ formData }) => - formData.issue_tracker_type == "Jira" && ( - - - - - - ) - } - - - ) - } - - - - - Review - - - - - - - - - Risk acceptance expiry - - - - {({ formData }) => - formData.risk_acceptance_expiry_active && ( - - - - ) - } - - - {feature_license_management() && ( - - - - License management - - - - - - )} ); diff --git a/frontend/src/core/products/ProductEmbeddedList.tsx b/frontend/src/core/products/ProductEmbeddedList.tsx index 8d43682ad..4653e880f 100644 --- a/frontend/src/core/products/ProductEmbeddedList.tsx +++ b/frontend/src/core/products/ProductEmbeddedList.tsx @@ -79,7 +79,7 @@ const ProductEmbeddedList = ({ product_group, license_policy }: ProductEmbeddedL sortable={false} /> - + {feature_license_management() && ((product_group && product_group.forbidden_licenses_count + @@ -88,7 +88,9 @@ const ProductEmbeddedList = ({ product_group, license_policy }: ProductEmbeddedL product_group.allowed_licenses_count + product_group.ignored_licenses_count > 0) || - license_policy) && } + license_policy) && ( + + )} label="Last observation change" sortBy="last_observation_change" diff --git a/frontend/src/core/products/ProductHeader.tsx b/frontend/src/core/products/ProductHeader.tsx index 402705d05..3b0f9a6dd 100644 --- a/frontend/src/core/products/ProductHeader.tsx +++ b/frontend/src/core/products/ProductHeader.tsx @@ -1,10 +1,12 @@ import { Box, Paper, Stack, Typography } from "@mui/material"; -import { Labeled, RecordContextProvider, TextField, useGetOne } from "react-admin"; +import { Labeled, RecordContextProvider, ReferenceField, TextField, useGetOne } from "react-admin"; import { useParams } from "react-router-dom"; +import products from "."; import LicensesCountField from "../../commons/custom_fields/LicensesCountField"; import ObservationsCountField from "../../commons/custom_fields/ObservationsCountField"; import { SecurityGateTextField } from "../../commons/custom_fields/SecurityGateTextField"; +import { feature_license_management } from "../../commons/functions"; import { useStyles } from "../../commons/layout/themes"; import { Product } from "../types"; @@ -14,17 +16,17 @@ const ProductHeader = () => { const { classes } = useStyles(); function get_open_observations_label(product: Product | undefined) { - if (!product || product.repository_default_branch == null) { - return "Open observations"; + if (product?.repository_default_branch == null) { + return "Active observations"; } - return "Open observations (" + product.repository_default_branch_name + ")"; + return "Active observations (" + product.repository_default_branch_name + ")"; } function get_licenses_label(product: Product | undefined) { - if (!product || product.repository_default_branch == null) { - return "Licenses"; + if (product?.repository_default_branch == null) { + return "Licenses / Components"; } - return "Licenses (" + product.repository_default_branch_name + ")"; + return "Licenses / Components (" + product.repository_default_branch_name + ")"; } return ( @@ -35,8 +37,9 @@ const ProductHeader = () => { marginTop: 2, }} > - - Product + + +   Product { justifyContent: "space-between", }} > - - - - {product && product.security_gate_passed != undefined && ( + + {product?.product_group && ( + + + + + + )} + + + + + {product?.security_gate_passed != undefined && ( @@ -57,7 +75,8 @@ const ProductHeader = () => { - {product && + {feature_license_management() && + product && product.forbidden_licenses_count + product.review_required_licenses_count + product.unknown_licenses_count + diff --git a/frontend/src/core/products/ProductList.tsx b/frontend/src/core/products/ProductList.tsx index adffe0723..2b300a41e 100644 --- a/frontend/src/core/products/ProductList.tsx +++ b/frontend/src/core/products/ProductList.tsx @@ -81,8 +81,10 @@ const ProductList = () => { sortable={false} /> - - {feature_license_management() && } + + {feature_license_management() && ( + + )} label="Last observation change" sortBy="last_observation_change" diff --git a/frontend/src/core/products/ProductReviews.tsx b/frontend/src/core/products/ProductReviews.tsx index 5d37afc06..6ecd1d9be 100644 --- a/frontend/src/core/products/ProductReviews.tsx +++ b/frontend/src/core/products/ProductReviews.tsx @@ -28,6 +28,7 @@ const ProductReviews = ({ product }: ProductReviewsProps) => { product.observation_log_approvals == 0 && product.product_rule_approvals == 0 } + disableGutters > }> @@ -48,6 +49,7 @@ const ProductReviews = ({ product }: ProductReviewsProps) => { product.observation_reviews == 0 && product.product_rule_approvals == 0 } + disableGutters > }> @@ -72,6 +74,7 @@ const ProductReviews = ({ product }: ProductReviewsProps) => { product.observation_log_approvals == 0 && product.observation_reviews == 0 } + disableGutters > }> diff --git a/frontend/src/core/products/ProductShow.tsx b/frontend/src/core/products/ProductShow.tsx index 0e8365c1f..0d77669ec 100644 --- a/frontend/src/core/products/ProductShow.tsx +++ b/frontend/src/core/products/ProductShow.tsx @@ -4,7 +4,6 @@ import ChecklistIcon from "@mui/icons-material/Checklist"; import UploadIcon from "@mui/icons-material/CloudUpload"; import ConstructionIcon from "@mui/icons-material/Construction"; import FactCheckIcon from "@mui/icons-material/FactCheck"; -import GradingIcon from "@mui/icons-material/Grading"; import PeopleAltIcon from "@mui/icons-material/PeopleAlt"; import SettingsIcon from "@mui/icons-material/Settings"; import TokenIcon from "@mui/icons-material/Token"; @@ -23,8 +22,8 @@ import { } from "react-admin"; import { useLocation } from "react-router"; -import CreateProductApiToken from "../../access_control/product_api_token/ProductApiTokenCreate"; -import ProductApiTokenEmbeddedList from "../../access_control/product_api_token/ProductApiTokenEmbeddedList"; +import ApiTokenCreate from "../../access_control/api_tokens/ApiTokenCreate"; +import ApiTokenEmbeddedList from "../../access_control/api_tokens/ApiTokenEmbeddedList"; import { PERMISSION_API_CONFIGURATION_CREATE, PERMISSION_BRANCH_CREATE, @@ -36,13 +35,16 @@ import { PERMISSION_PRODUCT_MEMBER_CREATE, PERMISSION_PRODUCT_RULE_APPLY, PERMISSION_PRODUCT_RULE_CREATE, + PERMISSION_SERVICE_CREATE, } from "../../access_control/types"; import { feature_license_management } from "../../commons/functions"; +import { useStyles } from "../../commons/layout/themes"; import observations from "../../core/observations"; import ApiConfigurationCreate from "../../import_observations/api_configurations/ApiConfigurationCreate"; import ApiConfigurationEmbeddedList from "../../import_observations/api_configurations/ApiConfigurationEmbeddedList"; import ImportMenu from "../../import_observations/import/ImportMenu"; import VulnerabilityCheckEmbeddedList from "../../import_observations/vulnerability_checks/VulnerabilityCheckEmbeddedList"; +import license_components from "../../licenses/license_components"; import ProductShowLicenseComponents from "../../licenses/license_components/ProductShowLicenseComponents"; import MetricsHeader from "../../metrics/MetricsHeader"; import MetricsSeveritiesCurrent from "../../metrics/MetricsSeveritiesCurrent"; @@ -61,6 +63,7 @@ import ProductAuthorizationGroupMemberAdd from "../product_authorization_group_m import ProductAuthorizationGroupMemberEmbeddedList from "../product_authorization_group_members/ProductAuthorizationGroupMemberEmbeddedList"; import ProductMemberAdd from "../product_members/ProductMemberAdd"; import ProductMemberEmbeddedList from "../product_members/ProductMemberEmbeddedList"; +import ServiceCreate from "../services/ServiceCreate"; import ServiceEmbeddedList from "../services/ServiceEmbeddedList"; import ExportMenu from "./ExportMenu"; import ProductHeader from "./ProductHeader"; @@ -84,19 +87,20 @@ const ShowActions = (props: ShowActionsProps) => { queryOptions={{ meta: { api_resource: "product_names" } }} storeKey={props.storeKey} /> - {product && product.permissions.includes(PERMISSION_PRODUCT_IMPORT_OBSERVATIONS) && ( + {product?.permissions.includes(PERMISSION_PRODUCT_IMPORT_OBSERVATIONS) && ( )} - {product && product.permissions.includes(PERMISSION_PRODUCT_EDIT) && } + {product?.permissions.includes(PERMISSION_PRODUCT_EDIT) && } ); }; const ProductShow = () => { + const { classes } = useStyles(); const [settingsTabsShow, setSettingsTabsShow] = useState(false); - const [tabs_changed, setTabsChanged] = useState(false); + const [tabsChanged, setTabsChanged] = useState(false); function showSettingsTabs() { setSettingsTabsShow(true); setTabsChanged(true); @@ -108,7 +112,7 @@ const ProductShow = () => { } const location = useLocation(); - if (!tabs_changed) { + if (!tabsChanged) { setTabsChanged(true); setSettingsTabsShow( location.pathname.endsWith("api_token") || @@ -149,7 +153,7 @@ const ProductShow = () => { }} > - {product && product.permissions.includes(PERMISSION_OBSERVATION_CREATE) && ( + {product?.permissions.includes(PERMISSION_OBSERVATION_CREATE) && ( { + Branches + Versions + + } path="branches" icon={} onClick={hideSettingsTabs} > - {product && product.permissions.includes(PERMISSION_BRANCH_CREATE) && ( - + {product?.permissions.includes(PERMISSION_BRANCH_CREATE) && ( + )} - {product.has_services && ( + } + onClick={hideSettingsTabs} + > + {product?.permissions.includes(PERMISSION_SERVICE_CREATE) && ( + + )} + + + {feature_license_management() && product.has_licenses && ( } + label={ + + Licenses + Components + + } + path="licenses" + icon={} onClick={hideSettingsTabs} > - - - )} - {feature_license_management() && product.has_licenses && ( - } onClick={hideSettingsTabs}> )} @@ -251,10 +271,10 @@ const ProductShow = () => { alignItems: "center", }} > - {product && product.permissions.includes(PERMISSION_PRODUCT_RULE_CREATE) && ( - + {product?.permissions.includes(PERMISSION_PRODUCT_RULE_CREATE) && ( + )} - {product && product.permissions.includes(PERMISSION_PRODUCT_RULE_APPLY) && ( + {product?.permissions.includes(PERMISSION_PRODUCT_RULE_APPLY) && ( )} @@ -263,7 +283,7 @@ const ProductShow = () => { )} {settingsTabsShow && ( }> - {product && product.permissions.includes(PERMISSION_API_CONFIGURATION_CREATE) && ( + {product?.permissions.includes(PERMISSION_API_CONFIGURATION_CREATE) && ( )} @@ -272,26 +292,25 @@ const ProductShow = () => { {settingsTabsShow && ( }> User members - {product && product.permissions.includes(PERMISSION_PRODUCT_MEMBER_CREATE) && ( + {product?.permissions.includes(PERMISSION_PRODUCT_MEMBER_CREATE) && ( )} Authorization group members - {product && - product.permissions.includes( - PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_CREATE - ) && } + {product?.permissions.includes( + PERMISSION_PRODUCT_AUTHORIZATION_GROUP_MEMBER_CREATE + ) && } )} {settingsTabsShow && ( }> - {product && product.permissions.includes(PERMISSION_PRODUCT_API_TOKEN_CREATE) && ( - + {product?.permissions.includes(PERMISSION_PRODUCT_API_TOKEN_CREATE) && ( + )} - + )} diff --git a/frontend/src/core/products/ProductShowProduct.tsx b/frontend/src/core/products/ProductShowProduct.tsx index 154410e51..1d2173551 100644 --- a/frontend/src/core/products/ProductShowProduct.tsx +++ b/frontend/src/core/products/ProductShowProduct.tsx @@ -1,8 +1,11 @@ import { Divider, Stack, Typography } from "@mui/material"; import { Fragment } from "react"; -import { BooleanField, Labeled, NumberField, ReferenceField, RichTextField, TextField } from "react-admin"; +import { BooleanField, Labeled, NumberField, ReferenceField, TextField, WithRecord } from "react-admin"; +import MarkdownField from "../../commons/custom_fields/MarkdownField"; +import OSVLinuxDistributionField from "../../commons/custom_fields/OSVLinuxDistributionField"; import { SeverityField } from "../../commons/custom_fields/SeverityField"; +import { feature_email } from "../../commons/functions"; import { Product } from "../types"; type ProductShowProductProps = { @@ -21,7 +24,7 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { {product.description && ( - + )} {product.product_group && ( @@ -49,48 +52,34 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { )} - - - - Rules - - - - + {product.apply_general_rules && ( + + + + Rules + + + + + + )} - {(product.repository_prefix || - product.repository_default_branch || - product.repository_branch_housekeeping_active != null) && ( + {(product.repository_prefix || product.repository_branch_housekeeping_active != null) && ( - Source code repository + Source code repository and housekeeping - - {product.repository_prefix && ( - - - - )} - {product.repository_default_branch && ( - - - - - - )} - + {product.repository_prefix && ( + + + + )} {((!product.product_group && product.repository_branch_housekeeping_active != null) || (product.product_group && product.product_group_repository_branch_housekeeping_active == null && product.repository_branch_housekeeping_active != null)) && ( - + { valueLabelTrue="Product specific" /> - {product.repository_branch_housekeeping_active == true && ( - + {product.repository_branch_housekeeping_active && + product.repository_branch_housekeeping_keep_inactive_days != null && ( + )} + {product.repository_branch_housekeeping_active && + product.repository_branch_housekeeping_exempt_branches != "" && ( - - )} - + )} + )} {product.product_group && product.product_group_repository_branch_housekeeping_active != null && ( @@ -122,7 +113,7 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { )} - {(product.notification_email_to || + {((feature_email() && product.notification_email_to) || product.notification_ms_teams_webhook || product.notification_slack_webhook) && ( @@ -131,7 +122,7 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { Notifications - {product.notification_email_to && ( + {feature_email() && product.notification_email_to && ( @@ -166,7 +157,7 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { valueLabelTrue="Product specific" /> - {product.security_gate_active == true && ( + {product.security_gate_active && ( @@ -207,80 +198,93 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { )} - - - Issue Tracker - - - - {product.issue_tracker_active && ( - - - - - - - - - - - - + + + + Issue Tracker + + + - {product && product.issue_tracker_minimum_severity && ( + - + - )} - {product.issue_tracker_username && ( - + - )} - {product.issue_tracker_issue_type && ( - + - )} - {product.issue_tracker_status_closed && ( - - - - )} - + {product.issue_tracker_labels && ( + + + + )} + {product.issue_tracker_minimum_severity && ( + + + + )} + {product.issue_tracker_username && ( + + + + )} + {product.issue_tracker_issue_type && ( + + + + )} + {product.issue_tracker_status_closed && ( + + + + )} + + )} - - - Review - - - - - - {product.product_group_assessments_need_approval && ( - - - - )} - - - - {product.product_group_product_rules_need_approval && ( - - - - )} - - - - {product.product_group_new_observations_in_review && ( - - - - )} - + {(product.assessments_need_approval || + product.product_group_assessments_need_approval || + product.product_rules_need_approval || + product.product_group_product_rules_need_approval || + product.new_observations_in_review || + product.product_group_new_observations_in_review) && ( + + + + Review + + + + + + {product.product_group_assessments_need_approval && ( + + + + )} + + + + {product.product_group_product_rules_need_approval && ( + + + + )} + + + + {product.product_group_new_observations_in_review && ( + + + + )} + + + )} {product.risk_acceptance_expiry_active != null && ( @@ -289,20 +293,20 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { Risk acceptance expiry - - - - {product.risk_acceptance_expiry_active == true && ( - + + + + + {product.risk_acceptance_expiry_active && ( - - )} + )} + )} {product.license_policy && ( @@ -323,6 +327,41 @@ const ProductShowProduct = ({ product }: ProductShowProductProps) => { )} + + {product.osv_enabled && ( + + + + Vulnerability scanning + + + + + + {product.osv_linux_distribution && ( + + ( + + )} + /> + + )} + + {product.automatic_osv_scanning_enabled && ( + + + + )} + + )} ); }; diff --git a/frontend/src/core/products/functions.tsx b/frontend/src/core/products/functions.tsx new file mode 100644 index 000000000..2ba2c2390 --- /dev/null +++ b/frontend/src/core/products/functions.tsx @@ -0,0 +1,405 @@ +import { Divider, Stack, Typography } from "@mui/material"; +import { Fragment } from "react"; +import { + BooleanInput, + FormDataConsumer, + Identifier, + NullableBooleanInput, + NumberInput, + ReferenceInput, +} from "react-admin"; + +import products from "."; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; +import OSVLinuxDistributionInput from "../../commons/custom_fields/OSVLinuxDistributionInput"; +import { validate_0_999999, validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; +import { feature_automatic_osv_scanning, feature_email, feature_license_management } from "../../commons/functions"; +import { + AutocompleteInputMedium, + AutocompleteInputWide, + TextInputExtraWide, + TextInputWide, +} from "../../commons/layout/themes"; +import { transform_product_group_and_product } from "../functions"; +import { ISSUE_TRACKER_TYPE_CHOICES, OBSERVATION_SEVERITY_CHOICES } from "../types"; + +export const transform = (data: any, description: string) => { + data = transform_product_group_and_product(data, description); + + data.purl ??= ""; + data.cpe23 ??= ""; + data.repository_prefix ??= ""; + + data.issue_tracker_type ??= ""; + data.issue_tracker_base_url ??= ""; + data.issue_tracker_api_key ??= ""; + data.issue_tracker_project_id ??= ""; + data.issue_tracker_labels ??= ""; + data.issue_tracker_username ??= ""; + data.issue_tracker_issue_type ??= ""; + data.issue_tracker_status_closed ??= ""; + data.issue_tracker_minimum_severity ??= ""; + + if (!data.osv_enabled) { + data.osv_linux_distribution = ""; + data.osv_linux_release = ""; + data.automatic_osv_scanning_enabled = false; + } + data.osv_linux_distribution ??= ""; + data.osv_linux_release ??= ""; + + return data; +}; + +export type ProductCreateEditComponentProps = { + initialDescription: string; + setDescription: (value: string) => void; + productGroupId?: Identifier; +}; + +export const ProductCreateEditComponent = ({ + initialDescription, + setDescription, + productGroupId, +}: ProductCreateEditComponentProps) => { + return ( + + + +   Product + + + + {!productGroupId && ( + + + + )} + {productGroupId && ( + + + + )} + + + + + + + + + Rules + + + + + + + Source code repository and housekeeping + + + + + + {({ formData }) => + formData.repository_branch_housekeeping_active && ( + + + + + ) + } + + + + + + + Notifications + + + {feature_email() && ( + + )} + + + + + + + + Security Gate + + + + {({ formData }) => + formData.security_gate_active && ( + + + + + + + + + ) + } + + + + + + Issue Tracker + + + + + {({ formData }) => + formData.issue_tracker_type && ( + + + + + + + + {({ formData }) => + formData.issue_tracker_type == "Jira" && ( + + + + + + ) + } + + + ) + } + + + + + Review + + + + + + + + + Risk acceptance expiry + + + + + {({ formData }) => + formData.risk_acceptance_expiry_active && ( + + ) + } + + + + {feature_license_management() && ( + + + + License management + + + + + + )} + + + + Vulnerability scanning + + + + + + {({ formData }) => formData.osv_enabled && } + + + + {({ formData }) => + formData.osv_enabled && ( + + {feature_automatic_osv_scanning() && ( + + )} + + ) + } + + + ); +}; diff --git a/frontend/src/core/reviews/Reviews.tsx b/frontend/src/core/reviews/Reviews.tsx index dfc0aa4dc..5be0259c6 100644 --- a/frontend/src/core/reviews/Reviews.tsx +++ b/frontend/src/core/reviews/Reviews.tsx @@ -1,6 +1,6 @@ import ChecklistIcon from "@mui/icons-material/Checklist"; import { Badge, Box, Divider, Paper, Tab, Tabs } from "@mui/material"; -import { Fragment, useEffect, useState } from "react"; +import { Fragment, ReactNode, useEffect, useState } from "react"; import { useNotify } from "react-admin"; import { Link, matchPath, useLocation } from "react-router-dom"; @@ -24,7 +24,7 @@ function useRouteMatch(patterns: readonly string[]) { } interface TabPanelProps { - children?: React.ReactNode; + children?: ReactNode; index: number; value: number; } @@ -114,7 +114,7 @@ export default function Reviews() { // nosemgrep because the props are well defined in the import /> diff --git a/frontend/src/core/services/ServiceCreate.tsx b/frontend/src/core/services/ServiceCreate.tsx new file mode 100644 index 000000000..53b25cd5a --- /dev/null +++ b/frontend/src/core/services/ServiceCreate.tsx @@ -0,0 +1,62 @@ +import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useState } from "react"; +import { CreateBase, SimpleForm, useCreate, useNotify, useRefresh } from "react-admin"; + +import AddButton from "../../commons/custom_fields/AddButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_required_255 } from "../../commons/custom_validators"; +import { TextInputWide } from "../../commons/layout/themes"; + +export type ServiceCreateProps = { + product: any; +}; + +const ServiceCreate = ({ product }: ServiceCreateProps) => { + const [open, setOpen] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const [create] = useCreate(); + const handleOpen = () => setOpen(true); + const handleCancel = () => setOpen(false); + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + }; + + const create_service = (data: any) => { + data.product = product.id; + + create( + "services", + { data: data }, + { + onSuccess: () => { + refresh(); + notify("Service added", { type: "success" }); + setOpen(false); + }, + onError: (error: any) => { + notify(error.message, { type: "warning" }); + }, + } + ); + }; + + return ( + + + + Add service + + + }> + + + + + + + ); +}; + +export default ServiceCreate; diff --git a/frontend/src/core/services/ServiceEdit.tsx b/frontend/src/core/services/ServiceEdit.tsx new file mode 100644 index 000000000..7cf31d7b2 --- /dev/null +++ b/frontend/src/core/services/ServiceEdit.tsx @@ -0,0 +1,64 @@ +import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useState } from "react"; +import { SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; + +import EditButton from "../../commons/custom_fields/EditButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_required_255 } from "../../commons/custom_validators"; +import { TextInputWide } from "../../commons/layout/themes"; + +const ServiceEdit = () => { + const [open, setOpen] = useState(false); + const [update] = useUpdate(); + const refresh = useRefresh(); + const notify = useNotify(); + const handleOpen = () => setOpen(true); + const handleCancel = () => setOpen(false); + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + }; + const service_update = async (data: any) => { + const patch = { + name: data.name, + }; + + update( + "services", + { + id: data.id, + data: patch, + }, + { + onSuccess: () => { + refresh(); + notify("Service updated", { + type: "success", + }); + setOpen(false); + }, + onError: (error: any) => { + notify(error.message, { + type: "warning", + }); + }, + } + ); + }; + + return ( + + + + Edit service + + }> + + + + + + ); +}; + +export default ServiceEdit; diff --git a/frontend/src/core/services/ServiceEmbeddedList.tsx b/frontend/src/core/services/ServiceEmbeddedList.tsx index 59a598b39..ba3d97277 100644 --- a/frontend/src/core/services/ServiceEmbeddedList.tsx +++ b/frontend/src/core/services/ServiceEmbeddedList.tsx @@ -1,17 +1,50 @@ import { Stack } from "@mui/material"; -import { Datagrid, ListContextProvider, ResourceContextProvider, WithRecord, useListController } from "react-admin"; +import { + Datagrid, + FieldProps, + ListContextProvider, + ResourceContextProvider, + WithRecord, + useListController, + useRecordContext, +} from "react-admin"; -import { PERMISSION_SERVICE_DELETE } from "../../access_control/types"; +import { PERMISSION_SERVICE_DELETE, PERMISSION_SERVICE_EDIT } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import LicensesCountField from "../../commons/custom_fields/LicensesCountField"; import ObservationsCountField from "../../commons/custom_fields/ObservationsCountField"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { feature_license_management } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import ServiceDelete from "./ServiceDelete"; +import ServiceEdit from "./ServiceEdit"; -type ServiceEmbeddedListProps = { +interface ServiceNameURLFieldProps extends FieldProps { product: any; +} + +export const ServiceNameURLField = (props: ServiceNameURLFieldProps) => { + const record = useRecordContext(props); + return record ? ( + + ) : null; }; +function get_observations_url(product_id: number, service_id: number, repository_default_branch_id: number): string { + if (repository_default_branch_id) { + return `#/products/${product_id}/show?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A["Open"%2C"Affected"%2C"In review"]%2C%22origin_service%22%3A${service_id}%2C%22branch%22%3A${repository_default_branch_id}%7D&order=ASC&sort=current_severity`; + } else { + return `#/products/${product_id}/show?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A["Open"%2C"Affected"%2C"In review"]%2C%22origin_service%22%3A${service_id}%7D&order=ASC&sort=current_severity`; + } +} + +interface ServiceEmbeddedListProps { + product: any; +} + const ServiceEmbeddedList = ({ product }: ServiceEmbeddedListProps) => { const listContext = useListController({ filter: { product: Number(product.id) }, @@ -26,18 +59,6 @@ const ServiceEmbeddedList = ({ product }: ServiceEmbeddedListProps) => { return
Loading...
; } - function get_observations_url( - product_id: number, - service_id: number, - repository_default_branch_id: number - ): string { - if (repository_default_branch_id) { - return `#/products/${product_id}/show?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22origin_service%22%3A${service_id}%2C%22branch%22%3A${repository_default_branch_id}%7D&order=ASC&sort=current_severity`; - } else { - return `#/products/${product_id}/show?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22origin_service%22%3A${service_id}%7D&order=ASC&sort=current_severity`; - } - } - return ( @@ -48,26 +69,16 @@ const ServiceEmbeddedList = ({ product }: ServiceEmbeddedListProps) => { bulkActionButtons={false} rowClick={false} > - ( - - )} - /> - + + + {feature_license_management() && product?.has_licenses && ( + + )} ( - {product && - product.permissions.includes(PERMISSION_SERVICE_DELETE) && + {product?.permissions.includes(PERMISSION_SERVICE_EDIT) && } + {product?.permissions.includes(PERMISSION_SERVICE_DELETE) && !service.is_default_service && } )} diff --git a/frontend/src/core/types.ts b/frontend/src/core/types.ts index 1e108342e..1988cddac 100644 --- a/frontend/src/core/types.ts +++ b/frontend/src/core/types.ts @@ -65,10 +65,12 @@ export interface Observation extends RaRecord { current_severity: string; parser_severity: string; assessment_severity: string; + rule_rego_severity: string; rule_severity: string; current_status: string; parser_status: string; assessment_status: string; + rule_rego_status: string; rule_status: string; scanner_observation_id: string; origin_component_name: string; @@ -142,23 +144,25 @@ export const OBSERVATION_SEVERITY_CHOICES = [ ]; export const OBSERVATION_STATUS_OPEN = "Open"; +export const OBSERVATION_STATUS_AFFECTED = "Affected"; +export const OBSERVATION_STATUS_IN_REVIEW = "In review"; export const OBSERVATION_STATUS_RESOLVED = "Resolved"; export const OBSERVATION_STATUS_DUPLICATE = "Duplicate"; export const OBSERVATION_STATUS_FALSE_POSITIVE = "False positive"; -export const OBSERVATION_STATUS_IN_REVIEW = "In review"; export const OBSERVATION_STATUS_NOT_AFFECTED = "Not affected"; export const OBSERVATION_STATUS_NOT_SECURITY = "Not security"; export const OBSERVATION_STATUS_RISK_ACCEPTED = "Risk accepted"; export const OBSERVATION_STATUS_CHOICES = [ { id: OBSERVATION_STATUS_OPEN, name: OBSERVATION_STATUS_OPEN }, + { id: OBSERVATION_STATUS_AFFECTED, name: OBSERVATION_STATUS_AFFECTED }, + { id: OBSERVATION_STATUS_IN_REVIEW, name: OBSERVATION_STATUS_IN_REVIEW }, { id: OBSERVATION_STATUS_RESOLVED, name: OBSERVATION_STATUS_RESOLVED }, { id: OBSERVATION_STATUS_DUPLICATE, name: OBSERVATION_STATUS_DUPLICATE }, { id: OBSERVATION_STATUS_FALSE_POSITIVE, name: OBSERVATION_STATUS_FALSE_POSITIVE, }, - { id: OBSERVATION_STATUS_IN_REVIEW, name: OBSERVATION_STATUS_IN_REVIEW }, { id: OBSERVATION_STATUS_NOT_AFFECTED, name: OBSERVATION_STATUS_NOT_AFFECTED, @@ -173,6 +177,12 @@ export const OBSERVATION_STATUS_CHOICES = [ }, ]; +export const OBSERVATION_STATUS_ACTIVE = [ + OBSERVATION_STATUS_OPEN, + OBSERVATION_STATUS_AFFECTED, + OBSERVATION_STATUS_IN_REVIEW, +]; + export const AGE_CHOICES = [ { id: "Today", name: "Today" }, { id: "Past 7 days", name: "Past 7 days" }, @@ -211,6 +221,56 @@ export const OBSERVATION_VEX_JUSTIFICATION_CHOICES = [ }, ]; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT = "code_not_present"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE = "code_not_reachable"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION = "requires_configuration"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY = "requires_dependency"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT = "requires_environment"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER = "protected_by_compiler"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME = "protected_at_runtime"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER = "protected_at_perimeter"; +export const OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL = + "protected_by_mitigating_control"; + +export const OBSERVATION_CYCLONEDX_VEX_JUSTIFICATION_CHOICES = [ + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_CODE_NOT_PRESENT, + name: "Code not present", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_CODE_NOT_REACHABLE, + name: "Code not reachable", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_REQUIRES_CONFIGURATION, + name: "Requires configuration", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_REQUIRES_DEPENDENCY, + name: "Requires dependency", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_REQUIRES_ENVIRONMENT, + name: "Requires environment", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_BY_COMPILER, + name: "Protected by compiler", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_AT_RUNTIME, + name: "Protected at runtime", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_AT_PERIMETER, + name: "Protected at perimeter", + }, + { + id: OBSERVATION_VEX_CYCLONEDX_JUSTIFICATION_CYCLONEDX_PROTECTED_BY_MITIGATING_CONTROL, + name: "Protected by mitigation control", + }, +]; + export const ASSESSMENT_STATUS_APPROVED = "Approved"; export const ASSESSMENT_STATUS_NEEDS_APPROVAL = "Needs approval"; export const ASSESSMENT_STATUS_REJECTED = "Rejected"; diff --git a/frontend/src/dashboard/Dashboard.tsx b/frontend/src/dashboard/Dashboard.tsx index d826882d5..904b678ae 100644 --- a/frontend/src/dashboard/Dashboard.tsx +++ b/frontend/src/dashboard/Dashboard.tsx @@ -3,7 +3,7 @@ import { Fragment, useEffect, useState } from "react"; import { useTheme } from "react-admin"; import { setUserInfo } from "../access_control/auth_provider/authProvider"; -import { getSettingTheme, getTheme } from "../commons/user_settings/functions"; +import { getResolvedSettingTheme, getTheme } from "../commons/user_settings/functions"; import ObservationDashboardList from "../core/observations/ObservationDashboardList"; import MetricsHeader from "../metrics/MetricsHeader"; import MetricsSeveritiesCurrent from "../metrics/MetricsSeveritiesCurrent"; @@ -19,7 +19,7 @@ const Dashboard = () => { if (!user) { await setUserInfo(); } - setSettingTheme(getSettingTheme()); + setSettingTheme(getResolvedSettingTheme()); }; useEffect(() => { diff --git a/frontend/src/import_observations/api_configurations/ApiConfigurationCreate.tsx b/frontend/src/import_observations/api_configurations/ApiConfigurationCreate.tsx index 1012b37b1..97cb6d814 100644 --- a/frontend/src/import_observations/api_configurations/ApiConfigurationCreate.tsx +++ b/frontend/src/import_observations/api_configurations/ApiConfigurationCreate.tsx @@ -4,7 +4,6 @@ import { BooleanInput, CreateBase, ReferenceInput, - SaveButton, SimpleForm, useCreate, useDataProvider, @@ -14,8 +13,7 @@ import { import { useWatch } from "react-hook-form"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_513, @@ -23,6 +21,7 @@ import { validate_required, validate_required_255, } from "../../commons/custom_validators"; +import { feature_automatic_api_import } from "../../commons/functions"; import { AutocompleteInputWide, PasswordInputWide, TextInputWide } from "../../commons/layout/themes"; export type ApiConfigurationCreateProps = { @@ -60,13 +59,6 @@ const ApiConfigurationCreate = ({ id }: ApiConfigurationCreateProps) => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - const createApiConfiguration = (data: any) => { data.product = id; @@ -176,7 +168,16 @@ const ApiConfigurationCreate = ({ id }: ApiConfigurationCreateProps) => { > - + + + { Add API configuration - }> + } + > { - - + {feature_automatic_api_import() && ( + + + + + )} diff --git a/frontend/src/import_observations/api_configurations/ApiConfigurationEdit.tsx b/frontend/src/import_observations/api_configurations/ApiConfigurationEdit.tsx index a5f94e356..a9a9cf9d6 100644 --- a/frontend/src/import_observations/api_configurations/ApiConfigurationEdit.tsx +++ b/frontend/src/import_observations/api_configurations/ApiConfigurationEdit.tsx @@ -3,7 +3,6 @@ import { Fragment, useEffect, useState } from "react"; import { BooleanInput, ReferenceInput, - SaveButton, SimpleForm, WithRecord, useDataProvider, @@ -13,9 +12,8 @@ import { } from "react-admin"; import { useWatch } from "react-hook-form"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_513, @@ -23,6 +21,7 @@ import { validate_required, validate_required_255, } from "../../commons/custom_validators"; +import { feature_automatic_api_import } from "../../commons/functions"; import { AutocompleteInputWide, PasswordInputWide, TextInputWide } from "../../commons/layout/themes"; const ApiConfigurationEdit = () => { @@ -131,13 +130,6 @@ const ApiConfigurationEdit = () => { ); }; - const CustomToolbar = () => ( - - - - - ); - const ParserInput = () => { const parserId = useWatch({ name: "parser" }); const selectedParser = parsers.find((parser) => parser.id === parserId); @@ -199,7 +191,16 @@ const ApiConfigurationEdit = () => { > - + + + { Edit API configuration - }> + } + > { - - + {feature_automatic_api_import() && ( + + + + + )} diff --git a/frontend/src/import_observations/api_configurations/ApiConfigurationEmbeddedList.tsx b/frontend/src/import_observations/api_configurations/ApiConfigurationEmbeddedList.tsx index a1fff13c8..801d60083 100644 --- a/frontend/src/import_observations/api_configurations/ApiConfigurationEmbeddedList.tsx +++ b/frontend/src/import_observations/api_configurations/ApiConfigurationEmbeddedList.tsx @@ -21,7 +21,13 @@ import ApiConfigurationEdit from "./ApiConfigurationEdit"; const listFilters = [ , - + , ]; @@ -68,10 +74,10 @@ const ApiConfigurationEmbeddedList = ({ product }: ApiConfigurationEmbeddedListP ( - {product && product.permissions.includes(PERMISSION_API_CONFIGURATION_EDIT) && ( + {product?.permissions.includes(PERMISSION_API_CONFIGURATION_EDIT) && ( )} - {product && product.permissions.includes(PERMISSION_API_CONFIGURATION_DELETE) && ( + {product?.permissions.includes(PERMISSION_API_CONFIGURATION_DELETE) && ( )} diff --git a/frontend/src/import_observations/import/ApiImportObservations.tsx b/frontend/src/import_observations/import/ApiImportObservations.tsx index 565429334..67d2666a8 100644 --- a/frontend/src/import_observations/import/ApiImportObservations.tsx +++ b/frontend/src/import_observations/import/ApiImportObservations.tsx @@ -1,10 +1,10 @@ import UploadIcon from "@mui/icons-material/CloudUpload"; -import { Backdrop, Button, CircularProgress, Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { ReferenceInput, SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { ReferenceInput, SimpleForm, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import MenuButton from "../../commons/custom_fields/MenuButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_513, validate_2048, validate_required } from "../../commons/custom_validators"; import { getIconAndFontColor } from "../../commons/functions"; import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; @@ -36,8 +36,8 @@ const ApiImportObservations = (product: any) => { if (data.branch) { formData.branch = data.branch; } - if (data.service) { - formData.service = data.service; + if (data.service_id) { + formData.service_id = data.service_id; } if (data.docker_image_name_tag) { formData.docker_image_name_tag = data.docker_image_name_tag; @@ -77,36 +77,26 @@ const ApiImportObservations = (product: any) => { }); }; - const CustomToolbar = () => ( - - - } /> - - ); - return ( - + icon={} + /> Import observations from API - }> + } + /> + } + > { /> )} - + {product.product.has_services && ( + + + + )} { const observationUpdate = async (data: any) => { setLoading(true); - const formData = new FormData(); - formData.append("file", data.file.rawFile, data.file.title); - formData.append("product", data.id); - if (data.branch) { - formData.append("branch", data.branch); - } - if (data.service) { - formData.append("service", data.service); - } - if (data.docker_image_name_tag) { - formData.append("docker_image_name_tag", data.docker_image_name_tag); - } - if (data.endpoint_url) { - formData.append("endpoint_url", data.endpoint_url); - } - if (data.kubernetes_cluster) { - formData.append("kubernetes_cluster", data.kubernetes_cluster); - } - formData.append("suppress_licenses", data.suppress_licenses); + let new_observations = 0; + let updated_observations = 0; + let resolved_observations = 0; - httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/import/file_upload_observations_by_id/", { - method: "POST", - body: formData, - }) - .then((result) => { - const observations = - result.json.observations_new + - result.json.observations_updated + - result.json.observations_resolved > - 0; - const license_components = - result.json.license_components_new + - result.json.license_components_updated + - result.json.license_components_deleted > - 0; - let message = ""; - if (observations || !license_components) - message += - result.json.observations_new + - " new observations\n" + - result.json.observations_updated + - " updated observations\n" + - result.json.observations_resolved + - " resolved observations"; - if (observations && license_components) message += "\n"; - if (license_components) { - message += - result.json.license_components_new + - " new license components\n" + - result.json.license_components_updated + - " updated license components\n" + - result.json.license_components_deleted + - " deleted license components"; - } - refresh(); - setLoading(false); - setOpen(false); - notify(message, { - type: "success", - multiLine: true, - }); + let upload_error = false; + let error_message = ""; + let error_sbom = ""; + + for (const file of data.file) { + const formData = new FormData(); + formData.append("file", file.rawFile, file.title); + formData.append("product", data.id); + if (data.branch) { + formData.append("branch", data.branch); + } + if (data.service_id) { + formData.append("service_id", data.service_id); + } + if (data.docker_image_name_tag) { + formData.append("docker_image_name_tag", data.docker_image_name_tag); + } + if (data.endpoint_url) { + formData.append("endpoint_url", data.endpoint_url); + } + if (data.kubernetes_cluster) { + formData.append("kubernetes_cluster", data.kubernetes_cluster); + } + formData.append("suppress_licenses", "true"); + + await httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/import/file_upload_observations_by_id/", { + method: "POST", + body: formData, }) - .catch((error) => { - setLoading(false); - setOpen(false); - notify(error.message, { - type: "warning", + .then((result) => { + new_observations += result.json.observations_new; + updated_observations += result.json.observations_updated; + resolved_observations += result.json.observations_resolved; + }) + .catch((error) => { + upload_error = true; + error_message = error.message; + error_sbom = file.title; }); + + if (upload_error) { + break; + } + } + + setLoading(false); + setOpen(false); + refresh(); + + if (upload_error) { + notify("Error '" + error_message + "' while processing '" + error_sbom + "'", { type: "warning" }); + } else { + const message = + new_observations + + " new observations\n" + + updated_observations + + " updated observations\n" + + resolved_observations + + " resolved observations"; + notify(message, { + type: "success", + multiLine: true, }); + } }; - const CustomToolbar = () => ( - - - } /> - - ); - return ( - + icon={} + /> - Upload observations from file + Upload observations from files - }> + } + /> + } + > Drop some files to upload, or click to select some.

} >
@@ -167,10 +149,21 @@ const FileUploadObservations = () => { />
)} + {product.has_services && ( + + + + )} )} /> - { /> -
diff --git a/frontend/src/import_observations/import/FileUploadSBOM.tsx b/frontend/src/import_observations/import/FileUploadSBOM.tsx new file mode 100644 index 000000000..77de665f6 --- /dev/null +++ b/frontend/src/import_observations/import/FileUploadSBOM.tsx @@ -0,0 +1,169 @@ +import UploadIcon from "@mui/icons-material/Upload"; +import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useState } from "react"; +import { FileField, FileInput, ReferenceInput, SimpleForm, WithRecord, useNotify, useRefresh } from "react-admin"; + +import MenuButton from "../../commons/custom_fields/MenuButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_required } from "../../commons/custom_validators"; +import { getIconAndFontColor } from "../../commons/functions"; +import { AutocompleteInputWide } from "../../commons/layout/themes"; +import { httpClient } from "../../commons/ra-data-django-rest-framework"; + +const FileUploadSBOM = () => { + const [open, setOpen] = useState(false); + const [loading, setLoading] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const handleOpen = () => setOpen(true); + const handleCancel = () => { + setOpen(false); + setLoading(false); + }; + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + setLoading(false); + }; + + const uploadSBOM = async (data: any) => { + setLoading(true); + + let new_license_components = 0; + let updated_license_components = 0; + let deleted_license_components = 0; + + let upload_error = false; + let error_message = ""; + let error_sbom = ""; + + for (const file of data.file) { + const formData = new FormData(); + formData.append("file", file.rawFile, file.title); + formData.append("product", data.id); + if (data.branch) { + formData.append("branch", data.branch); + } + if (data.service_id) { + formData.append("service_id", data.service_id); + } + + await httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/import/file_upload_sbom_by_id/", { + method: "POST", + body: formData, + }) + .then((result) => { + new_license_components += result.json.license_components_new; + updated_license_components += result.json.license_components_updated; + deleted_license_components += result.json.license_components_deleted; + }) + .catch((error) => { + upload_error = true; + error_message = error.message; + error_sbom = file.title; + }); + + if (upload_error) { + break; + } + } + + setLoading(false); + setOpen(false); + refresh(); + + if (upload_error) { + notify("Error '" + error_message + "' while processing '" + error_sbom + "'", { type: "warning" }); + } else { + const message = + new_license_components + + " new license components\n" + + updated_license_components + + " updated license components\n" + + deleted_license_components + + " deleted license components"; + notify(message, { + type: "success", + multiLine: true, + }); + } + }; + + return ( + + } + /> + + Upload SBOMs from files + + } + /> + } + > + Drop some files to upload, or click to select some.

} + > + +
+ ( + + {product.has_branches && ( + + + + )} + {product.has_services && ( + + + + )} + + )} + /> +
+
+
+ {loading ? ( + theme.zIndex.drawer + 1 }} open={open}> + + + ) : null} +
+ ); +}; + +export default FileUploadSBOM; diff --git a/frontend/src/import_observations/import/ImportMenu.tsx b/frontend/src/import_observations/import/ImportMenu.tsx index 7a099a799..7bb45db9d 100644 --- a/frontend/src/import_observations/import/ImportMenu.tsx +++ b/frontend/src/import_observations/import/ImportMenu.tsx @@ -4,14 +4,17 @@ import Menu from "@mui/material/Menu"; import MenuItem from "@mui/material/MenuItem"; import { Fragment, MouseEvent, useState } from "react"; +import { feature_license_management } from "../../commons/functions"; import ApiImportObservations from "./ApiImportObservations"; import FileUploadObservations from "./FileUploadObservations"; +import FileUploadSBOM from "./FileUploadSBOM"; +import ScanOSV from "./ScanOSV"; interface ImportMenuProps { product: any; } -const ImportMenu = (props: ImportMenuProps) => { +const ImportMenu = ({ product }: ImportMenuProps) => { const [anchorEl, setAnchorEl] = useState(null); const open = Boolean(anchorEl); const handleClick = (event: MouseEvent) => { @@ -35,21 +38,25 @@ const ImportMenu = (props: ImportMenuProps) => { > import - + e.stopPropagation()}> - e.stopPropagation()}> - - + {feature_license_management() && ( + e.stopPropagation()}> + + + )} + {product.has_api_configurations && ( + e.stopPropagation()}> + + + )} + {product.osv_enabled && product.has_licenses && ( + e.stopPropagation()}> + + + )} ); diff --git a/frontend/src/import_observations/import/ScanOSV.tsx b/frontend/src/import_observations/import/ScanOSV.tsx new file mode 100644 index 000000000..31f7b32af --- /dev/null +++ b/frontend/src/import_observations/import/ScanOSV.tsx @@ -0,0 +1,126 @@ +import UploadIcon from "@mui/icons-material/CloudUpload"; +import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useState } from "react"; +import { ReferenceInput, SimpleForm, WithRecord, useNotify, useRefresh } from "react-admin"; + +import MenuButton from "../../commons/custom_fields/MenuButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { getIconAndFontColor } from "../../commons/functions"; +import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; +import { httpClient } from "../../commons/ra-data-django-rest-framework"; + +interface ScanOSVProps { + product: any; +} + +const ScanOSV = ({ product }: ScanOSVProps) => { + const [open, setOpen] = useState(false); + const [loading, setLoading] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const handleOpen = () => setOpen(true); + const handleCancel = () => { + setOpen(false); + setLoading(false); + }; + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + setLoading(false); + }; + + const scanOSV = async (data: any) => { + setLoading(true); + + let url = ""; + if (data.branch) { + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/products/" + product.id + "/" + data.branch + "/scan_osv/"; + } else { + url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/products/" + product.id + "/scan_osv/"; + } + + httpClient(url, { + method: "POST", + }) + .then((result) => { + const message = + result.json.observations_new + + " new observations\n" + + result.json.observations_updated + + " updated observations\n" + + result.json.observations_resolved + + " resolved observations"; + refresh(); + setLoading(false); + setOpen(false); + notify(message, { + type: "success", + multiLine: true, + }); + }) + .catch((error) => { + setLoading(false); + setOpen(false); + notify(error.message, { + type: "warning", + }); + }); + }; + + return ( + + } + /> + + Scan vulnerabilities from OSV + + } + alwaysEnable + /> + } + > + + ( + + {product.has_branches && ( + + + + )} + + )} + /> + + + + {loading ? ( + theme.zIndex.drawer + 1 }} open={open}> + + + ) : null} + + ); +}; + +export default ScanOSV; diff --git a/frontend/src/import_observations/vulnerability_checks/VulnerabilityCheckEmbeddedList.tsx b/frontend/src/import_observations/vulnerability_checks/VulnerabilityCheckEmbeddedList.tsx index 252a2dbd2..79c1f8bf6 100644 --- a/frontend/src/import_observations/vulnerability_checks/VulnerabilityCheckEmbeddedList.tsx +++ b/frontend/src/import_observations/vulnerability_checks/VulnerabilityCheckEmbeddedList.tsx @@ -13,8 +13,7 @@ import { } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import { humanReadableDate } from "../../commons/functions"; -import { feature_license_management } from "../../commons/functions"; +import { feature_license_management, humanReadableDate } from "../../commons/functions"; import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; import { VulnerabilityCheck } from "../types"; @@ -27,7 +26,7 @@ type VulnerabilityCheckEmbeddedListProps = { const VulnerabilityCheckEmbeddedList = ({ product, long_list }: VulnerabilityCheckEmbeddedListProps) => { function listFilters(product: any) { const filters = []; - if (product && product.has_branches) { + if (product?.has_branches) { filters.push( ); } + if (product?.has_services) { + filters.push( + + + + ); + } filters.push(); filters.push(); - filters.push(); + if (product?.has_api_configurations) { + filters.push(); + } return filters; } @@ -63,10 +78,15 @@ const VulnerabilityCheckEmbeddedList = ({ product, long_list }: VulnerabilityChe } function get_observations_url(vulnerability_check: any): string { - if (vulnerability_check.branch == null) { - return `..?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22scanner%22%3A%22${vulnerability_check.scanner_name}%22%2C%22upload_filename%22%3A%22${vulnerability_check.filename}%22%2C%22api_configuration_name%22%3A%22${vulnerability_check.api_configuration_name}%22%7D&order=ASC&sort=current_severity`; + let url = "..?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open"; + if (vulnerability_check.branch !== null) { + url += `%22%2C%22branch%22%3A%22${vulnerability_check.branch}`; + } + if (vulnerability_check.service !== null) { + url += `%22%2C%22origin_service%22%3A%22${vulnerability_check.service}`; } - return `..?displayedFilters=%7B%7D&filter=%7B%22current_status%22%3A%22Open%22%2C%22branch%22%3A%22${vulnerability_check.branch}%22%2C%22scanner%22%3A%22${vulnerability_check.scanner_name}%22%2C%22upload_filename%22%3A%22${vulnerability_check.filename}%22%2C%22api_configuration_name%22%3A%22${vulnerability_check.api_configuration_name}%22%7D&order=ASC&sort=current_severity`; + url += `%22%2C%22scanner%22%3A%22${vulnerability_check.scanner_name}%22%2C%22upload_filename%22%3A%22${vulnerability_check.filename}%22%2C%22api_configuration_name%22%3A%22${vulnerability_check.api_configuration_name}%22%7D&order=ASC&sort=current_severity`; + return url; } return ( @@ -81,10 +101,13 @@ const VulnerabilityCheckEmbeddedList = ({ product, long_list }: VulnerabilityChe rowClick={(id, resource, record) => get_observations_url(record)} resource="vulnerability_checks" > - {product && product.has_branches && } + {product?.has_branches && } + {product?.has_services && } {long_list && } - {long_list && } + {long_list && product?.has_api_configurations && ( + + )} diff --git a/frontend/src/licenses/concluded_licenses/ConcludedLicenseEmbeddedList.tsx b/frontend/src/licenses/concluded_licenses/ConcludedLicenseEmbeddedList.tsx new file mode 100644 index 000000000..7d581cdcf --- /dev/null +++ b/frontend/src/licenses/concluded_licenses/ConcludedLicenseEmbeddedList.tsx @@ -0,0 +1,94 @@ +import { + Datagrid, + FilterForm, + FunctionField, + ListContextProvider, + ReferenceInput, + ResourceContextProvider, + TextField, + TextInput, + useListController, +} from "react-admin"; + +import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { humanReadableDate } from "../../commons/functions"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; +import { getSettingListSize } from "../../commons/user_settings/functions"; +import { AGE_CHOICES } from "../../core/types"; + +const showLicense = (id: any) => { + return "../../../../concluded_licenses/" + id + "/show"; +}; + +const listFilters = [ + + + , + , + + + , + , + , + + + , + , +]; + +const ConcludedLicenseEmbeddedList = () => { + const listContext = useListController({ + filter: {}, + perPage: 25, + resource: "concluded_licenses", + sort: { field: "product_data.name", order: "ASC" }, + disableSyncWithLocation: false, + storeKey: "concluded_licenses.embedded", + }); + + if (listContext.isLoading) { + return
Loading...
; + } + + return ( + + +
+ + + + + + + + + + label="Age" + sortBy="last_updated" + render={(record) => (record ? humanReadableDate(record.last_updated) : "")} + /> + + +
+
+
+ ); +}; + +export default ConcludedLicenseEmbeddedList; diff --git a/frontend/src/licenses/concluded_licenses/ConcludedLicenseShow.tsx b/frontend/src/licenses/concluded_licenses/ConcludedLicenseShow.tsx new file mode 100644 index 000000000..c8ce22d05 --- /dev/null +++ b/frontend/src/licenses/concluded_licenses/ConcludedLicenseShow.tsx @@ -0,0 +1,101 @@ +import { Paper, Stack, Typography } from "@mui/material"; +import { Fragment } from "react"; +import { + DateField, + DeleteWithConfirmButton, + Labeled, + PrevNextButtons, + ReferenceField, + Show, + TextField, + TopToolbar, + WithRecord, + useRecordContext, +} from "react-admin"; + +import concluded_licenses from "."; +import { PERMISSION_CONCLUDED_LICENSE_DELETE } from "../../access_control/types"; + +const ShowActions = () => { + const concluded_license = useRecordContext(); + + return ( + + + + {concluded_license?.product_data?.permissions?.includes(PERMISSION_CONCLUDED_LICENSE_DELETE) && ( + + )} + + + ); +}; + +const ConcludedLicenseComponent = () => { + return ( + ( + + + + + +   Concluded license + + + `/${reference}/${record.id}/show/licenses`} + sx={{ "& a": { textDecoration: "none" } }} + /> + + + + + {concluded_license.manual_concluded_spdx_license_id && ( + + + + )} + {concluded_license.manual_concluded_license_expression && ( + + + + )} + {concluded_license.manual_concluded_non_spdx_license && ( + + + + )} + + + + + + + {" "} + + + )} + /> + ); +}; + +const ConcludedLicenseShow = () => { + return ( + } component={ConcludedLicenseComponent}> + + + ); +}; + +export default ConcludedLicenseShow; diff --git a/frontend/src/licenses/concluded_licenses/index.ts b/frontend/src/licenses/concluded_licenses/index.ts new file mode 100644 index 000000000..df3acd3ce --- /dev/null +++ b/frontend/src/licenses/concluded_licenses/index.ts @@ -0,0 +1,8 @@ +import ConcludedLicenseIcon from "@mui/icons-material/Verified"; + +import ConcludedLicenseShow from "./ConcludedLicenseShow"; + +export default { + icon: ConcludedLicenseIcon, + show: ConcludedLicenseShow, +}; diff --git a/frontend/src/licenses/license_administration/LicenseAdministration.tsx b/frontend/src/licenses/license_administration/LicenseAdministration.tsx index 20b70ae8f..6b8fe4fe0 100644 --- a/frontend/src/licenses/license_administration/LicenseAdministration.tsx +++ b/frontend/src/licenses/license_administration/LicenseAdministration.tsx @@ -1,9 +1,12 @@ import { Box, Divider, Paper, Tab, Tabs } from "@mui/material"; +import { ReactNode } from "react"; import { Fragment } from "react"; import { Link, matchPath, useLocation } from "react-router-dom"; import administration from "."; import ListHeader from "../../commons/layout/ListHeader"; +import concluded_licenses from "../concluded_licenses"; +import ConcludedLicenseEmbeddedList from "../concluded_licenses/ConcludedLicenseEmbeddedList"; import license_groups from "../license_groups"; import LicenseGroupEmbeddedList from "../license_groups/LicenseGroupEmbeddedList"; import license_policies from "../license_policies"; @@ -24,7 +27,7 @@ function useRouteMatch(patterns: readonly string[]) { } interface TabPanelProps { - children?: React.ReactNode; + children?: ReactNode; index: number; value: number; } @@ -53,7 +56,12 @@ function a11yProps(index: number) { } export default function LicenseAdministration() { - const routeMatch = useRouteMatch(["/license/licenses", "/license/license_groups", "/license/license_policies"]); + const routeMatch = useRouteMatch([ + "/license/licenses", + "/license/license_groups", + "/license/license_policies", + "/license/concluded_licenses", + ]); function currentTab(): number { switch (routeMatch?.pattern?.path) { case "/license/licenses": { @@ -65,6 +73,9 @@ export default function LicenseAdministration() { case "/license/license_policies": { return 2; } + case "/license/concluded_licenses": { + return 3; + } default: { return 0; } @@ -77,7 +88,7 @@ export default function LicenseAdministration() { } to="/license/licenses" component={Link} @@ -100,6 +111,14 @@ export default function LicenseAdministration() { {...a11yProps(2)} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading // nosemgrep because the props are well defined in the import /> + } + to="/license/concluded_licenses" + component={Link} + {...a11yProps(3)} // nosemgrep: typescript.react.best-practice.react-props-spreading.react-props-spreading + // nosemgrep because the props are well defined in the import + /> @@ -111,6 +130,9 @@ export default function LicenseAdministration() { + + + ); diff --git a/frontend/src/licenses/license_component_evidences/LicenseComponentEvidenceShow.tsx b/frontend/src/licenses/license_component_evidences/LicenseComponentEvidenceShow.tsx index 99a561c2a..467859eba 100644 --- a/frontend/src/licenses/license_component_evidences/LicenseComponentEvidenceShow.tsx +++ b/frontend/src/licenses/license_component_evidences/LicenseComponentEvidenceShow.tsx @@ -12,8 +12,9 @@ import { useRecordContext, } from "react-admin"; +import license_component_evidences from "."; import { useStyles } from "../../commons/layout/themes"; -import { getSettingTheme } from "../../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../../commons/user_settings/functions"; const ShowActions = () => { const evidence = useRecordContext(); @@ -35,7 +36,10 @@ const LicenseComponentEvidenceShow = () => { return ( }> - License Component Evidence + + +   License Component Evidence + { collapseStringsAfterLength={false} enableClipboard={false} className={classes.displayFontSize} - theme={getSettingTheme() as JsonViewerTheme} + theme={getResolvedSettingTheme() as JsonViewerTheme} sx={{ padding: 1 }} /> diff --git a/frontend/src/licenses/license_components/ConcludedLicense.tsx b/frontend/src/licenses/license_components/ConcludedLicense.tsx new file mode 100644 index 000000000..6fd62f0ef --- /dev/null +++ b/frontend/src/licenses/license_components/ConcludedLicense.tsx @@ -0,0 +1,118 @@ +import PlaylistAddCheckIcon from "@mui/icons-material/PlaylistAddCheck"; +import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useRef, useState } from "react"; +import { ReferenceInput, SimpleForm, useNotify, useRefresh } from "react-admin"; + +import SmallButton from "../../commons/custom_fields/SmallButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_255 } from "../../commons/custom_validators"; +import { AutocompleteInputExtraWide, TextInputExtraWide } from "../../commons/layout/themes"; +import { httpClient } from "../../commons/ra-data-django-rest-framework"; + +const ConcludedLicense = () => { + const dialogRef = useRef(null); + const [open, setOpen] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + + const concludedLicenseUpdate = async (data: any) => { + if (!data.manual_concluded_license_expression) { + data.manual_concluded_license_expression = ""; + } + if (!data.manual_concluded_non_spdx_license) { + data.manual_concluded_non_spdx_license = ""; + } + + const patch = { + manual_concluded_spdx_license: data.manual_concluded_spdx_license, + manual_concluded_license_expression: data.manual_concluded_license_expression, + manual_concluded_non_spdx_license: data.manual_concluded_non_spdx_license, + }; + + httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/license_components/" + data.id + "/concluded_license/", { + method: "PATCH", + body: JSON.stringify(patch), + }) + .then(() => { + refresh(); + notify("Concluded license updated", { + type: "success", + }); + setOpen(false); + }) + .catch((error) => { + notify(error.message, { + type: "warning", + }); + }); + }; + + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + }; + + const handleCancel = () => setOpen(false); + const handleOpen = () => setOpen(true); + + const validateFields = (values: any) => { + const errors: any = {}; + + // check if only one field is set + const fields = [ + values.manual_concluded_spdx_license, + values.manual_concluded_license_expression, + values.manual_concluded_non_spdx_license, + ]; + const filledFields = fields.filter(Boolean); + if (filledFields.length > 1) { + if (values.manual_concluded_spdx_license) { + errors.manual_concluded_spdx_license = "Only one field must be set"; + } + if (values.manual_concluded_license_expression) { + errors.manual_concluded_license_expression = "Only one field must be set"; + } + if (values.manual_concluded_non_spdx_license) { + errors.manual_concluded_non_spdx_license = "Only one field must be set"; + } + } + + return errors; + }; + + return ( + + } /> + + Add / edit concluded license + + } + validate={validateFields} + > + + + + + + + + + + ); +}; + +export default ConcludedLicense; diff --git a/frontend/src/licenses/license_components/LicenseComponentBulkDeleteButton.tsx b/frontend/src/licenses/license_components/LicenseComponentBulkDeleteButton.tsx index 28aa30e11..c94ba12e4 100644 --- a/frontend/src/licenses/license_components/LicenseComponentBulkDeleteButton.tsx +++ b/frontend/src/licenses/license_components/LicenseComponentBulkDeleteButton.tsx @@ -15,7 +15,7 @@ const LicenseComponentBulkDeleteButton = (props: LicenseComponentBulkDeleteButto const refresh = useRefresh(); const [loading, setLoading] = useState(false); const notify = useNotify(); - const unselectAll = useUnselectAll("license_components"); + const unselectAll = useUnselectAll("license_components", "license_components.embedded"); const handleClick = () => setOpen(true); const handleDialogClose = () => setOpen(false); diff --git a/frontend/src/licenses/license_components/LicenseComponentEmbeddedList.tsx b/frontend/src/licenses/license_components/LicenseComponentEmbeddedList.tsx index baa5e3397..3daf103fd 100644 --- a/frontend/src/licenses/license_components/LicenseComponentEmbeddedList.tsx +++ b/frontend/src/licenses/license_components/LicenseComponentEmbeddedList.tsx @@ -1,6 +1,5 @@ -import { Fragment } from "react"; +import { Fragment, useEffect, useState } from "react"; import { - AutocompleteInput, Datagrid, FilterForm, FunctionField, @@ -17,11 +16,8 @@ import { import { PERMISSION_COMPONENT_LICENSE_DELETE } from "../../access_control/types"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; import { EvaluationResultField } from "../../commons/custom_fields/EvaluationResultField"; -import { humanReadableDate } from "../../commons/functions"; import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; -import { PURL_TYPE_CHOICES } from "../../core/types"; -import { AGE_CHOICES } from "../../core/types"; import { EVALUATION_RESULT_CHOICES } from "../types"; import LicenseComponentBulkDeleteButton from "./LicenseComponentBulkDeleteButton"; @@ -29,6 +25,7 @@ type LicenseComponentEmbeddedListProps = { product: any; expand?: boolean; component_purl_type?: string; + origin_service?: number; }; const BulkActionButtons = (product: any) => ( @@ -40,53 +37,75 @@ const BulkActionButtons = (product: any) => ( ); const licenseNameStyle = (type: string): string => { - if (type === "" || type === "Non-SPDX") { + if (type === "" || type === "Non-SPDX" || type === "Multiple") { return "italic"; } return "normal"; }; -const LicenseComponentEmbeddedList = ({ product, expand, component_purl_type }: LicenseComponentEmbeddedListProps) => { +const LicenseComponentEmbeddedList = ({ + product, + expand, + component_purl_type, + origin_service, +}: LicenseComponentEmbeddedListProps) => { + const [initialExpand, setInitialExpand] = useState(true); + const showLicenseComponent = (id: any) => { return "../../../../license_components/" + id + "/show"; }; function listFilters() { const filters = []; - if (product && product.has_branches) { + if (product?.has_branches) { filters.push( ); } - filters.push(); filters.push( + , - ); - filters.push(); - filters.push( - , + , + + > + + ); - filters.push(); - + if (product?.has_services) { + filters.push( + + + + ); + } + if (product?.has_concluded_comments) { + filters.push(); + } return filters; } @@ -95,24 +114,32 @@ const LicenseComponentEmbeddedList = ({ product, expand, component_purl_type }: const record = useRecordContext(); if (expand) { - if (record && record.branch_name) { - filter = { ...filter, branch_name: record.branch_name }; + if (initialExpand) { + localStorage.removeItem("RaStore.license_components.embedded"); + setInitialExpand(false); + } + if (record?.branch_name) { + filter = { ...filter, branch_name_exact: record.branch_name }; } - if (record && record.license_name) { - filter = { ...filter, license_name_exact: record.license_name }; + if (record?.effective_license_name) { + filter = { ...filter, effective_license_name_exact: record.effective_license_name }; } - if (record && record.evaluation_result) { + if (record?.evaluation_result) { filter = { ...filter, evaluation_result: record.evaluation_result }; } if (component_purl_type) { filter = { ...filter, component_purl_type: component_purl_type }; } + if (origin_service) { + filter = { ...filter, origin_service: origin_service }; + } if (record) { const storedFilters = { branch_name: record.branch_name, - license_name: record.license_name, + effective_license_name: record.effective_license_name, evaluation_result: record.evaluation_result, component_purl_type: component_purl_type, + origin_service: origin_service, }; localStorage.setItem("license_component_expand_filters", JSON.stringify({ storedFilters })); } @@ -131,6 +158,13 @@ const LicenseComponentEmbeddedList = ({ product, expand, component_purl_type }: storeKey: "license_components.embedded", }); + useEffect(() => { + const storage = localStorage.getItem("RaStore.license_components.embedded"); + if (storage) { + localStorage.setItem("RaStore.license_components.overview", storage); + } + }, [listContext.filterValues, listContext.sort]); + if (listContext.isLoading) { return
Loading...
; } @@ -144,21 +178,22 @@ const LicenseComponentEmbeddedList = ({ product, expand, component_purl_type }: size={getSettingListSize()} rowClick={showLicenseComponent} bulkActionButtons={ - product && - product.permissions.includes(PERMISSION_COMPONENT_LICENSE_DELETE) && ( + product?.permissions.includes(PERMISSION_COMPONENT_LICENSE_DELETE) && ( ) } resource="license_components" > - {!expand && product && product.has_branches && ( + {!expand && product?.has_branches && ( )} ( - {record.license_name} + + {record.effective_license_name} + )} /> {!expand && ( @@ -168,13 +203,9 @@ const LicenseComponentEmbeddedList = ({ product, expand, component_purl_type }: sortable={true} /> )} - - - (record ? humanReadableDate(record.last_change) : "")} - /> + + {product?.has_services && } + {product?.has_concluded_comments && } ( diff --git a/frontend/src/licenses/license_components/LicenseComponentOverview.tsx b/frontend/src/licenses/license_components/LicenseComponentOverview.tsx index 2d0122e64..0aac9dca9 100644 --- a/frontend/src/licenses/license_components/LicenseComponentOverview.tsx +++ b/frontend/src/licenses/license_components/LicenseComponentOverview.tsx @@ -1,7 +1,6 @@ import { Paper } from "@mui/material"; import { useEffect, useState } from "react"; import { - AutocompleteInput, Datagrid, FilterForm, FunctionField, @@ -19,7 +18,6 @@ import { EvaluationResultField } from "../../commons/custom_fields/EvaluationRes import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; import { getSettingListSize } from "../../commons/user_settings/functions"; -import { PURL_TYPE_CHOICES } from "../../core/types"; import { getElevation } from "../../metrics/functions"; import { EVALUATION_RESULT_CHOICES } from "../types"; import LicenseComponentEmbeddedList from "./LicenseComponentEmbeddedList"; @@ -29,7 +27,7 @@ type LicenseComponentOverviewProps = { }; const licenseNameStyle = (type: string): string => { - if (type === "" || type === "Non-SPDX") { + if (type === "" || type === "Non-SPDX" || type === "Multiple") { return "italic"; } return "normal"; @@ -43,21 +41,21 @@ const LicenseComponentOverview = ({ product }: LicenseComponentOverviewProps) => function listFilters(product: any) { const filters = []; - if (product && product.has_branches) { + if (product?.has_branches) { filters.push( ); } - filters.push(); + filters.push(); filters.push( /> ); filters.push( - + > + +
); + if (product?.has_services) { + filters.push( + + + + ); + } return filters; } @@ -109,16 +123,18 @@ const LicenseComponentOverview = ({ product }: LicenseComponentOverviewProps) => const filterStorage: { [key: string]: any } = {}; const filter = { branch: listContext.filterValues.branch, - license_name: listContext.filterValues.license_name, + effective_license_name: listContext.filterValues.effective_license_name, evaluation_result: listContext.filterValues.evaluation_result, component_purl_type: listContext.filterValues.component_purl_type, + origin_service: listContext.filterValues.origin_service, }; - filterStorage["filter"] = filter; + filterStorage.filter = filter; if (listContext.sort.field) { - filterStorage["sort"] = listContext.sort.field; - filterStorage["order"] = listContext.sort.order; + filterStorage.sort = listContext.sort.field; + filterStorage.order = listContext.sort.order; } localStorage.setItem("RaStore.license_components.overview", JSON.stringify(filterStorage)); + localStorage.setItem("RaStore.license_components.embedded", JSON.stringify(filterStorage)); } function get_data() { @@ -132,8 +148,8 @@ const LicenseComponentOverview = ({ product }: LicenseComponentOverviewProps) => if (filter.branch) { url += "&branch=" + filter.branch; } - if (filter.license_name) { - url += "&license_name=" + encodeURIComponent(filter.license_name); + if (filter.effective_license_name) { + url += "&effective_license_name=" + encodeURIComponent(filter.effective_license_name); } if (filter.evaluation_result) { url += "&evaluation_result=" + encodeURIComponent(filter.evaluation_result); @@ -141,6 +157,9 @@ const LicenseComponentOverview = ({ product }: LicenseComponentOverviewProps) => if (filter.component_purl_type) { url += "&component_purl_type=" + encodeURIComponent(filter.component_purl_type); } + if (filter.origin_service) { + url += "&origin_service=" + encodeURIComponent(filter.origin_service); + } if (listContext.sort.field) { url += "&ordering=" + (listContext.sort.order === "ASC" ? "" : "-") + listContext.sort.field; @@ -186,17 +205,20 @@ const LicenseComponentOverview = ({ product }: LicenseComponentOverviewProps) => product={product} expand={true} component_purl_type={listContext.filterValues.component_purl_type} + origin_service={listContext.filterValues.origin_service} /> } expandSingle > - {product && product.has_branches && } + {product?.has_branches && } ( - {record.license_name} + + {record.effective_license_name} + )} /> diff --git a/frontend/src/licenses/license_components/LicenseComponentShow.tsx b/frontend/src/licenses/license_components/LicenseComponentShow.tsx index 66c9f2eb0..c6ca39d60 100644 --- a/frontend/src/licenses/license_components/LicenseComponentShow.tsx +++ b/frontend/src/licenses/license_components/LicenseComponentShow.tsx @@ -1,22 +1,12 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import { Box, Paper, Stack } from "@mui/material"; import { Fragment } from "react"; -import { - Labeled, - PrevNextButtons, - ReferenceField, - Show, - TextField, - TopToolbar, - WithRecord, - useRecordContext, -} from "react-admin"; +import { PrevNextButtons, Show, TopToolbar, WithRecord, useRecordContext } from "react-admin"; -import { EvaluationResultField } from "../../commons/custom_fields/EvaluationResultField"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; -import { get_component_purl_url } from "../../commons/functions"; -import { useStyles } from "../../commons/layout/themes"; -import MermaidDependencies from "../../core/observations/Mermaid_Dependencies"; +import { PERMISSION_COMPONENT_LICENSE_EDIT } from "../../access_control/types"; +import ComponentShowComponent from "../../core/components/ComponentShowComponent"; +import ConcludedLicense from "./ConcludedLicense"; import LicenseComponentShowAside from "./LicenseComponentShowAside"; +import LicenseComponentShowLicense from "./LicenseComponentShowLicense"; const ShowActions = () => { const license_component = useRecordContext(); @@ -25,19 +15,28 @@ const ShowActions = () => { // eslint-disable-next-line @typescript-eslint/consistent-indexed-object-style const filter: { [key: string]: any } = {}; if (license_component) { - filter["product"] = Number(license_component.product); + filter.product = Number(license_component.product); } const license_component_expand_filters = localStorage.getItem("license_component_expand_filters"); const storedFilters = license_component_expand_filters ? JSON.parse(license_component_expand_filters) : {}; if (storedFilters.storedFilters) { if (storedFilters.storedFilters.branch_name) { - filter["branch_name"] = storedFilters.storedFilters.branch_name; + filter.branch_name_exact = storedFilters.storedFilters.branch_name; } - if (storedFilters.storedFilters.license_name) { - filter["license_name_exact"] = storedFilters.storedFilters.license_name; + if (storedFilters.storedFilters.effective_license_name) { + filter.effective_license_name_exact = storedFilters.storedFilters.effective_license_name; } if (storedFilters.storedFilters.evaluation_result) { - filter["evaluation_result"] = storedFilters.storedFilters.evaluation_result; + filter.evaluation_result = storedFilters.storedFilters.evaluation_result; + } + } else { + if ( + localStorage.getItem("RaStore.license_components.embedded") === null && + license_component?.branch_name !== null && + license_component?.branch_name !== undefined && + license_component?.branch_name !== "" + ) { + filter.branch_name_exact = license_component.branch_name; } } return filter; @@ -45,142 +44,32 @@ const ShowActions = () => { return ( - {license_component && ( - - )} + + {license_component && ( + + )} + {license_component?.permissions?.includes(PERMISSION_COMPONENT_LICENSE_EDIT) && } + ); }; export const LicenseComponentComponent = () => { - const { classes } = useStyles(); - return ( ( - - License - - - {component.license && ( - - - - - - - - - - - )} - {component.license_expression && ( - - - - )} - {component.non_spdx_license && ( - - - - )} - {!component.license && !component.license_expression && !component.non_spdx_license && ( - - - - )} - - - - + - - Component - - {component.component_name != "" && ( - - - - )} - {component.component_version != "" && ( - - - - )} - - {component.component_purl != "" && - get_component_purl_url( - component.component_name, - component.component_version, - component.component_purl_type, - component.component_purl_namespace - ) == null && ( - - - - )} - {component.component_purl != "" && - get_component_purl_url( - component.component_name, - component.component_version, - component.component_purl_type, - component.component_purl_namespace - ) != null && ( - - - - )} - {component.component_cpe != "" && ( - - - - )} - {component.component_dependencies && component.component_dependencies != "" && ( - - )} - + )} diff --git a/frontend/src/licenses/license_components/LicenseComponentShowAside.tsx b/frontend/src/licenses/license_components/LicenseComponentShowAside.tsx index 447d27676..26e49219e 100644 --- a/frontend/src/licenses/license_components/LicenseComponentShowAside.tsx +++ b/frontend/src/licenses/license_components/LicenseComponentShowAside.tsx @@ -5,7 +5,7 @@ import { Link } from "react-router-dom"; import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { useLinkStyles } from "../../commons/layout/themes"; -import { getSettingTheme } from "../../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../../commons/user_settings/functions"; const LicenseComponentShowAside = () => { return ( @@ -48,6 +48,11 @@ const MetaData = () => { )} + {component.origin_service_name && ( + + + + )} {component.upload_filename != "" && ( @@ -72,7 +77,7 @@ const MetaData = () => { const EmptyDatagridHeader = () => ; const Evidences = () => { - const { classes } = useLinkStyles({ setting_theme: getSettingTheme() }); + const { classes } = useLinkStyles({ setting_theme: getResolvedSettingTheme() }); return ( ( diff --git a/frontend/src/licenses/license_components/LicenseComponentShowLicense.tsx b/frontend/src/licenses/license_components/LicenseComponentShowLicense.tsx new file mode 100644 index 000000000..e88da1e1b --- /dev/null +++ b/frontend/src/licenses/license_components/LicenseComponentShowLicense.tsx @@ -0,0 +1,201 @@ +import { Stack, Typography } from "@mui/material"; +import { Labeled, RecordContextProvider, ReferenceField, TextField } from "react-admin"; + +import license_components from "."; +import { EvaluationResultField } from "../../commons/custom_fields/EvaluationResultField"; +import { useStyles } from "../../commons/layout/themes"; + +type LicenseComponentShowLicenseProps = { + licenseComponent: any; + direction: "row" | "column"; +}; + +const LicenseComponentShowLicense = ({ licenseComponent, direction }: LicenseComponentShowLicenseProps) => { + const { classes } = useStyles(); + + let spacing = 8; + if (direction == "column") { + spacing = 2; + classes.fontBigBold = ""; + } + + return ( + + {direction === "row" && ( + + +   License + + )} + {direction === "column" && ( + + License + + )} + + + {licenseComponent.imported_declared_license_name === "No license information" && + licenseComponent.imported_concluded_license_name === "No license information" && + licenseComponent.manual_concluded_license_name === "No license information" && ( + + + + )} + {licenseComponent.imported_declared_spdx_license && ( + + + + + + )} + {licenseComponent.imported_declared_license_expression && ( + + + + )} + {licenseComponent.imported_declared_non_spdx_license && ( + + + + )} + {licenseComponent.imported_declared_multiple_licenses && ( + + + + )} + {!licenseComponent.imported_declared_spdx_license && + !licenseComponent.imported_declared_license_expression && + !licenseComponent.imported_declared_non_spdx_license && + !licenseComponent.imported_declared_multiple_licenses && + licenseComponent.imported_declared_license_name !== "No license information" && ( + + + + )} + {licenseComponent.imported_concluded_spdx_license && ( + + + + + + )} + {licenseComponent.imported_concluded_license_expression && ( + + + + )} + {licenseComponent.imported_concluded_non_spdx_license && ( + + + + )} + {licenseComponent.imported_concluded_multiple_licenses && ( + + + + )} + {!licenseComponent.imported_concluded_spdx_license && + !licenseComponent.imported_concluded_license_expression && + !licenseComponent.imported_concluded_non_spdx_license && + !licenseComponent.imported_concluded_multiple_licenses && + licenseComponent.imported_concluded_license_name !== "No license information" && ( + + + + )} + + {licenseComponent.manual_concluded_spdx_license && ( + + + + + + )} + {licenseComponent.manual_concluded_license_expression && ( + + + + )} + {licenseComponent.manual_concluded_non_spdx_license && ( + + + + )} + {!licenseComponent.manual_concluded_spdx_license && + !licenseComponent.manual_concluded_license_expression && + !licenseComponent.manual_concluded_non_spdx_license && + licenseComponent.manual_concluded_license_name !== "No license information" && ( + + + + )} + {licenseComponent.manual_concluded_comment && ( + + + + )} + + + + + + + + ); +}; + +export default LicenseComponentShowLicense; diff --git a/frontend/src/licenses/license_components/ProductShowLicenseComponents.tsx b/frontend/src/licenses/license_components/ProductShowLicenseComponents.tsx index 174ba5227..f2da23e84 100644 --- a/frontend/src/licenses/license_components/ProductShowLicenseComponents.tsx +++ b/frontend/src/licenses/license_components/ProductShowLicenseComponents.tsx @@ -56,7 +56,7 @@ const ProductShowLicenseComponents = ({ product }: ProductShowLicenseComponentsP helperText={false} sx={{ width: "fit-content", margin: 0 }} /> - {product && product.license_policy && ( + {product?.license_policy && ( )} - {product && product.permissions.includes(PERMISSION_PRODUCT_EDIT) && ( - - )} + {product?.permissions.includes(PERMISSION_PRODUCT_EDIT) && } {product && !product.permissions.includes(PERMISSION_PRODUCT_EDIT) && ( )} diff --git a/frontend/src/licenses/license_components/index.ts b/frontend/src/licenses/license_components/index.ts index 90c390013..d290f06a7 100644 --- a/frontend/src/licenses/license_components/index.ts +++ b/frontend/src/licenses/license_components/index.ts @@ -1,5 +1,8 @@ +import GradingIcon from "@mui/icons-material/Grading"; + import LicenseComponentShow from "./LicenseComponentShow"; export default { show: LicenseComponentShow, + icon: GradingIcon, }; diff --git a/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberAdd.tsx b/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberAdd.tsx index 9e2a47bed..8e3438a8d 100644 --- a/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberAdd.tsx +++ b/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberAdd.tsx @@ -108,8 +108,9 @@ const LicenseGroupAuthorizationGroupMemberAdd = ({ id }: LicenseGroupAuthorizati /> setIsManager(e.target.checked)} /> diff --git a/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEdit.tsx b/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEdit.tsx index 4f543030e..d8abf7d13 100644 --- a/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEdit.tsx +++ b/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEdit.tsx @@ -1,10 +1,9 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { BooleanInput, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { TextInputWide } from "../../commons/layout/themes"; const LicenseGroupAuthorizationGroupMemberEdit = () => { @@ -48,20 +47,13 @@ const LicenseGroupAuthorizationGroupMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - return ( Edit authorization group - }> + }> diff --git a/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEmbeddedList.tsx b/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEmbeddedList.tsx index 08155335b..fde15fc9c 100644 --- a/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEmbeddedList.tsx +++ b/frontend/src/licenses/license_group_authorization_group_members/LicenseGroupAuthorizationGroupMemberEmbeddedList.tsx @@ -4,7 +4,6 @@ import { BooleanField, Datagrid, FilterForm, - Identifier, ListContextProvider, NullableBooleanInput, ResourceContextProvider, @@ -13,8 +12,8 @@ import { useListController, } from "react-admin"; +import { AuthorizationGroupNameURLField } from "../../commons/custom_fields/AuthorizationGroupNameURLField"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { is_superuser } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import LicenseGroupAuthorizationGroupMemberAdd from "./LicenseGroupAuthorizationGroupMemberAdd"; @@ -28,10 +27,6 @@ function listFilters() { ]; } -const showAuthorizationGroup = (id: Identifier) => { - return "#/authorization_groups/" + id + "/show"; -}; - type LicenseGroupAuthorizationGroupMemberEmbeddedListProps = { license_group: any; }; @@ -68,17 +63,9 @@ const LicenseGroupAuthorizationGroupMemberEmbeddedList = ({ bulkActionButtons={false} resource="license_group_authorization_group_members" > - ( - - )} + {(is_superuser() || license_group.is_manager) && ( diff --git a/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseAdd.tsx b/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseAdd.tsx index 3f26b9241..5c7fc63bb 100644 --- a/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseAdd.tsx +++ b/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseAdd.tsx @@ -84,11 +84,11 @@ const LicenseGroupLicenseAdd = ({ id }: LicenseGroupLicenseAddProps) => { setLicense(e)} diff --git a/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseEmbeddedList.tsx b/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseEmbeddedList.tsx index 2b425cc56..eda69e3a0 100644 --- a/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseEmbeddedList.tsx +++ b/frontend/src/licenses/license_group_licenses/LicenseGroupLicenseEmbeddedList.tsx @@ -2,6 +2,7 @@ import { Fragment } from "react"; import { BooleanField, Datagrid, + FieldProps, FilterForm, Identifier, ListContextProvider, @@ -11,6 +12,7 @@ import { TextInput, WithRecord, useListController, + useRecordContext, } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; @@ -29,6 +31,11 @@ function listFilters() { ]; } +export const LicenseIDURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? : null; +}; + const showLicense = (id: Identifier) => { return "#/licenses/" + id + "/show"; }; @@ -66,16 +73,7 @@ const LicenseGroupLicenseEmbeddedList = ({ license_group }: LicenseGroupLicenseE bulkActionButtons={false} resource="users" > - ( - - )} - /> + diff --git a/frontend/src/licenses/license_group_members/LicenseGroupMemberAdd.tsx b/frontend/src/licenses/license_group_members/LicenseGroupMemberAdd.tsx index 323db3857..5b6d112da 100644 --- a/frontend/src/licenses/license_group_members/LicenseGroupMemberAdd.tsx +++ b/frontend/src/licenses/license_group_members/LicenseGroupMemberAdd.tsx @@ -108,8 +108,9 @@ const LicenseGroupMemberAdd = ({ id }: LicenseGroupMemberAddProps) => { /> setIsManager(e.target.checked)} /> diff --git a/frontend/src/licenses/license_group_members/LicenseGroupMemberEdit.tsx b/frontend/src/licenses/license_group_members/LicenseGroupMemberEdit.tsx index 1292eedc7..3414f8ab0 100644 --- a/frontend/src/licenses/license_group_members/LicenseGroupMemberEdit.tsx +++ b/frontend/src/licenses/license_group_members/LicenseGroupMemberEdit.tsx @@ -1,10 +1,9 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { BooleanInput, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { TextInputWide } from "../../commons/layout/themes"; const LicenseGroupMemberEdit = () => { @@ -48,20 +47,13 @@ const LicenseGroupMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - return ( Edit user - }> + }> diff --git a/frontend/src/licenses/license_group_members/LicenseGroupMemberEmbeddedList.tsx b/frontend/src/licenses/license_group_members/LicenseGroupMemberEmbeddedList.tsx index 97e9a099c..9ed28bab4 100644 --- a/frontend/src/licenses/license_group_members/LicenseGroupMemberEmbeddedList.tsx +++ b/frontend/src/licenses/license_group_members/LicenseGroupMemberEmbeddedList.tsx @@ -4,7 +4,6 @@ import { BooleanField, Datagrid, FilterForm, - Identifier, ListContextProvider, NullableBooleanInput, ResourceContextProvider, @@ -14,7 +13,7 @@ import { } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { UserFullNameURLField } from "../../commons/custom_fields/UserFullNameURLField"; import { is_superuser } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import LicenseGroupMemberAdd from "./LicenseGroupMemberAdd"; @@ -29,10 +28,6 @@ function listFilters() { ]; } -const showUser = (id: Identifier) => { - return "#/users/" + id + "/show"; -}; - type LicenseGroupMemberEmbeddedListProps = { license_group: any; }; @@ -65,26 +60,7 @@ const LicenseGroupMemberEmbeddedList = ({ license_group }: LicenseGroupMemberEmb bulkActionButtons={false} resource="users" > - ( - - )} - /> - ( - - )} - /> + {(is_superuser() || license_group.is_manager) && ( { - const [open, setOpen] = useState(false); - const [loading, setLoading] = useState(false); - const refresh = useRefresh(); - const notify = useNotify(); - const handleClick = () => setOpen(true); - const handleDialogClose = () => setOpen(false); - - const importScanCodeLicenseDB = async () => { - setLoading(true); - const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/license_groups/import_scancode_licensedb/"; - httpClient(url, { - method: "POST", - }) - .then(() => { - refresh(); - setLoading(false); - notify("ScanCode LicenseDB imported", { type: "success" }); - }) - .catch((error) => { - setLoading(false); - notify(error.message, { type: "warning" }); - }); - - setOpen(false); - }; - - return ( - <> -
); diff --git a/frontend/src/licenses/license_policies/LicensePolicyCopy.tsx b/frontend/src/licenses/license_policies/LicensePolicyCopy.tsx index 32f033fd5..aca46cded 100644 --- a/frontend/src/licenses/license_policies/LicensePolicyCopy.tsx +++ b/frontend/src/licenses/license_policies/LicensePolicyCopy.tsx @@ -1,12 +1,11 @@ import LibraryAddIcon from "@mui/icons-material/LibraryAdd"; import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { CreateBase, SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { CreateBase, SimpleForm, useNotify, useRefresh } from "react-admin"; import { useNavigate } from "react-router"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import SmallButton from "../../commons/custom_fields/SmallButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_required } from "../../commons/custom_validators"; import { TextInputWide } from "../../commons/layout/themes"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; @@ -27,13 +26,6 @@ const LicensePolicyCopy = ({ license_policy }: LicensePolicyCopyProps) => { setOpen(false); }; - const CustomToolbar = () => ( - - - } /> - - ); - const copyLicensePolicy = (data: any) => { const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/license_policies/" + license_policy.id + "/copy/"; const body = JSON.stringify({ name: data.new_name }); @@ -58,7 +50,16 @@ const LicensePolicyCopy = ({ license_policy }: LicensePolicyCopyProps) => { Copy license policy - }> + } + /> + } + > diff --git a/frontend/src/licenses/license_policies/LicensePolicyCreate.tsx b/frontend/src/licenses/license_policies/LicensePolicyCreate.tsx index ce7acaad4..61be4d7eb 100644 --- a/frontend/src/licenses/license_policies/LicensePolicyCreate.tsx +++ b/frontend/src/licenses/license_policies/LicensePolicyCreate.tsx @@ -1,34 +1,29 @@ import { Typography } from "@mui/material"; +import { useState } from "react"; import { BooleanInput, Create, ReferenceInput, SimpleForm } from "react-admin"; -import { validate_255, validate_2048, validate_required_255 } from "../../commons/custom_validators"; +import license_policies from "."; +import MarkdownEdit from "../../commons/custom_fields/MarkdownEdit"; +import { validate_255, validate_required_255 } from "../../commons/custom_validators"; import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; const LicensePolicyCreate = () => { + const [description, setDescription] = useState(""); const transform = (data: any) => { - if (!data.description) { - data.description = ""; - } - if (!data.ignore_component_types) { - data.ignore_component_types = ""; - } + data.description = description; + data.ignore_component_types ??= ""; return data; }; return ( - - License Policy + + +   License Policy - + { return ( - + ); }; +interface LicensePolicyEditFormProps { + setDescription: (value: string) => void; +} + +const LicensePolicyEditForm = ({ setDescription }: LicensePolicyEditFormProps) => { + const license_policy = useRecordContext(); + const [descriptionSet, setDescriptionSet] = useState(false); + + if (!descriptionSet && license_policy) { + setDescription(license_policy.description); + setDescriptionSet(true); + } + + return ( + }> + + +   License Policy + + + + ( + + {!license_policy.is_parent && ( + + + + )} + + )} + /> + + + + ); +}; const LicensePolicyEdit = () => { + const [description, setDescription] = useState(""); + const transform = (data: any) => { - if (!data.description) { - data.description = ""; - } - if (!data.ignore_component_types) { - data.ignore_component_types = ""; - } + data.description = description; + data.ignore_component_types ??= ""; return data; }; return ( - }> - - License Policy - - - - ( - - {!license_policy.is_parent && ( - - - - )} - - )} - /> - - - + ); }; diff --git a/frontend/src/licenses/license_policies/LicensePolicyShow.tsx b/frontend/src/licenses/license_policies/LicensePolicyShow.tsx index e57b4e191..fedc88e51 100644 --- a/frontend/src/licenses/license_policies/LicensePolicyShow.tsx +++ b/frontend/src/licenses/license_policies/LicensePolicyShow.tsx @@ -1,4 +1,5 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; +import { Accordion, AccordionDetails, AccordionSummary, Box, Paper, Stack, Typography } from "@mui/material"; import { Fragment } from "react"; import { BooleanField, @@ -13,6 +14,7 @@ import { useRecordContext, } from "react-admin"; +import license_policies from "."; import MarkdownField from "../../commons/custom_fields/MarkdownField"; import { is_external, is_superuser } from "../../commons/functions"; import { useStyles } from "../../commons/layout/themes"; @@ -43,7 +45,7 @@ const ShowActions = () => { {license_policy && (!is_external() || is_superuser()) && ( )} - {((license_policy && license_policy.is_manager) || is_superuser()) && } + {(license_policy?.is_manager || is_superuser()) && } ); @@ -57,8 +59,9 @@ const LicensePolicyComponent = () => { render={(license_policy) => ( - - License Policy + + +   License Policy @@ -97,34 +100,42 @@ const LicensePolicyComponent = () => { - - - Users - - - - - - Authorization Groups - - - {license_policy.has_product_groups && ( - - - Product Groups using this license policy - - - + + }> + Product Groups using this license policy + + + + + )} {license_policy.has_products && ( - - - Products using this license policy - - - + + }> + Products using this license policy + + + + + )} + + }> + Users + + + + + + + }> + Authorization Groups + + + + + )} /> diff --git a/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberAdd.tsx b/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberAdd.tsx index ac0c7d512..319412cdc 100644 --- a/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberAdd.tsx +++ b/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberAdd.tsx @@ -108,8 +108,9 @@ const LicensePolicyAuthorizationGroupMemberAdd = ({ id }: LicensePolicyAuthoriza /> setIsManager(e.target.checked)} /> diff --git a/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEdit.tsx b/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEdit.tsx index a8f7120d2..8b31e5b35 100644 --- a/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEdit.tsx +++ b/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEdit.tsx @@ -1,10 +1,9 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { BooleanInput, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { TextInputWide } from "../../commons/layout/themes"; const LicensePolicyAuthorizationGroupMemberEdit = () => { @@ -48,20 +47,13 @@ const LicensePolicyAuthorizationGroupMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - return ( Edit authorization group - }> + }> diff --git a/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEmbeddedList.tsx b/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEmbeddedList.tsx index 1c81e6177..4cc1212d9 100644 --- a/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEmbeddedList.tsx +++ b/frontend/src/licenses/license_policy_authorization_group_members/LicensePolicyAuthorizationGroupMemberEmbeddedList.tsx @@ -4,7 +4,6 @@ import { BooleanField, Datagrid, FilterForm, - Identifier, ListContextProvider, NullableBooleanInput, ResourceContextProvider, @@ -13,8 +12,8 @@ import { useListController, } from "react-admin"; +import { AuthorizationGroupNameURLField } from "../../commons/custom_fields/AuthorizationGroupNameURLField"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { is_superuser } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import LicensePolicyAuthorizationGroupMemberAdd from "./LicensePolicyAuthorizationGroupMemberAdd"; @@ -28,10 +27,6 @@ function listFilters() { ]; } -const showAuthorizationGroup = (id: Identifier) => { - return "#/authorization_groups/" + id + "/show"; -}; - type LicensePolicyAuthorizationGroupMemberEmbeddedListProps = { license_policy: any; }; @@ -68,17 +63,9 @@ const LicensePolicyAuthorizationGroupMemberEmbeddedList = ({ bulkActionButtons={false} resource="license_policy_authorization_group_members" > - ( - - )} + {(is_superuser() || license_policy.is_manager) && ( diff --git a/frontend/src/licenses/license_policy_items/LicensePolicyItemAdd.tsx b/frontend/src/licenses/license_policy_items/LicensePolicyItemAdd.tsx index ceb63e292..4abf037f8 100644 --- a/frontend/src/licenses/license_policy_items/LicensePolicyItemAdd.tsx +++ b/frontend/src/licenses/license_policy_items/LicensePolicyItemAdd.tsx @@ -30,11 +30,11 @@ const LicensePolicyItemAdd = ({ id }: LicensePolicyItemAddProps) => { setOpen(false); }; - const [license_group, setLicenseGroup] = useState(); + const [licenseGroup, setLicenseGroup] = useState(); const [license, setLicense] = useState(); - const [license_expression, setLicenseExpression] = useState(); - const [non_spdx_license, setNonSPDXLicense] = useState(); - const [evaluation_result, setEvaluationResult] = useState(); + const [licenseExpression, setLicenseExpression] = useState(); + const [nonSPDXLicense, setNonSPDXLicense] = useState(); + const [evaluationResult, setEvaluationResult] = useState(); const [comment, setComment] = useState(); const resetState = () => { @@ -61,11 +61,11 @@ const LicensePolicyItemAdd = ({ id }: LicensePolicyItemAddProps) => { const setData = () => { const data = { - license_group: license_group, + license_group: licenseGroup, license: license, - license_expression: license_expression, - non_spdx_license: non_spdx_license, - evaluation_result: evaluation_result, + license_expression: licenseExpression, + non_spdx_license: nonSPDXLicense, + evaluation_result: evaluationResult, comment: comment, }; return data; @@ -130,11 +130,14 @@ const LicensePolicyItemAdd = ({ id }: LicensePolicyItemAddProps) => { - setLicense(e)} /> + setLicense(e)} + /> ( - - - - - ); return ( Edit license policy item - }> + }> - + , - , + , , , { + const record = useRecordContext(props); + return record ? ( + + ) : null; +}; + const showLicenseGroup = (id: Identifier) => { return "#/license_groups/" + id + "/show"; }; +export const LicenseIDURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? : null; +}; + const showLicense = (id: Identifier) => { return "#/licenses/" + id + "/show"; }; @@ -86,26 +104,8 @@ const LicensePolicyItemEmbeddedList = ({ license_policy }: LicensePolicyItemEmbe bulkActionButtons={false} resource="license_policy_item" > - ( - - )} - /> - ( - - )} - /> + + diff --git a/frontend/src/licenses/license_policy_members/LicensePolicyMemberAdd.tsx b/frontend/src/licenses/license_policy_members/LicensePolicyMemberAdd.tsx index e28732b3e..3d657af8f 100644 --- a/frontend/src/licenses/license_policy_members/LicensePolicyMemberAdd.tsx +++ b/frontend/src/licenses/license_policy_members/LicensePolicyMemberAdd.tsx @@ -30,7 +30,7 @@ const LicensePolicyMemberAdd = ({ id }: LicensePolicyMemberAddProps) => { }; const [user, setUser] = useState(); - const [is_manager, setIsManager] = useState(false); + const [isManager, setIsManager] = useState(false); const resetState = () => { setUser(undefined); setIsManager(false); @@ -43,7 +43,7 @@ const LicensePolicyMemberAdd = ({ id }: LicensePolicyMemberAddProps) => { e.preventDefault(); // necessary to prevent default SaveButton submit logic const data = { user: user, - is_manager: is_manager, + is_manager: isManager, }; add_user(data, false); }; @@ -52,7 +52,7 @@ const LicensePolicyMemberAdd = ({ id }: LicensePolicyMemberAddProps) => { e.preventDefault(); // necessary to prevent default SaveButton submit logic const data = { user: user, - is_manager: is_manager, + is_manager: isManager, }; add_user(data, true); }; @@ -109,8 +109,9 @@ const LicensePolicyMemberAdd = ({ id }: LicensePolicyMemberAddProps) => { /> setIsManager(e.target.checked)} /> diff --git a/frontend/src/licenses/license_policy_members/LicensePolicyMemberEdit.tsx b/frontend/src/licenses/license_policy_members/LicensePolicyMemberEdit.tsx index a98d16e39..49fcd2bcb 100644 --- a/frontend/src/licenses/license_policy_members/LicensePolicyMemberEdit.tsx +++ b/frontend/src/licenses/license_policy_members/LicensePolicyMemberEdit.tsx @@ -1,10 +1,9 @@ import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { BooleanInput, SaveButton, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; +import { BooleanInput, SimpleForm, useNotify, useRefresh, useUpdate } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import EditButton from "../../commons/custom_fields/EditButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { TextInputWide } from "../../commons/layout/themes"; const LicensePolicyMemberEdit = () => { @@ -48,19 +47,13 @@ const LicensePolicyMemberEdit = () => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); return ( Edit user - }> + }> diff --git a/frontend/src/licenses/license_policy_members/LicensePolicyMemberEmbeddedList.tsx b/frontend/src/licenses/license_policy_members/LicensePolicyMemberEmbeddedList.tsx index cd291563a..f2451dfbb 100644 --- a/frontend/src/licenses/license_policy_members/LicensePolicyMemberEmbeddedList.tsx +++ b/frontend/src/licenses/license_policy_members/LicensePolicyMemberEmbeddedList.tsx @@ -4,7 +4,6 @@ import { BooleanField, Datagrid, FilterForm, - Identifier, ListContextProvider, NullableBooleanInput, ResourceContextProvider, @@ -14,7 +13,7 @@ import { } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { UserFullNameURLField } from "../../commons/custom_fields/UserFullNameURLField"; import { is_superuser } from "../../commons/functions"; import { getSettingListSize } from "../../commons/user_settings/functions"; import LicensePolicyMemberAdd from "./LicensePolicyMemberAdd"; @@ -29,10 +28,6 @@ function listFilters() { ]; } -const showUser = (id: Identifier) => { - return "#/users/" + id + "/show"; -}; - type LicensePolicyMemberEmbeddedListProps = { license_policy: any; }; @@ -65,26 +60,7 @@ const LicensePolicyMemberEmbeddedList = ({ license_policy }: LicensePolicyMember bulkActionButtons={false} resource="users" > - ( - - )} - /> - ( - - )} - /> + {(is_superuser() || license_policy.is_manager) && ( { - License + + +   SPDX License + @@ -44,6 +49,7 @@ const LicenseComponent = () => { text={license.reference} url={license.reference} label="Reference" + new_tab={true} /> )} @@ -51,24 +57,28 @@ const LicenseComponent = () => { {" "} {license.is_in_license_group && ( - - - License Groups containing this license - - } /> - + + }> + License Groups containing this license + + + } /> + + )} {license.is_in_license_policy && ( - - - License Policies containing this license - - ( - - )} - /> - + + }> + License Policies containing this license + + + ( + + )} + /> + + )} )} diff --git a/frontend/src/metrics/MetricsSeveritiesCurrent.tsx b/frontend/src/metrics/MetricsSeveritiesCurrent.tsx index c452b28f7..4318612a8 100644 --- a/frontend/src/metrics/MetricsSeveritiesCurrent.tsx +++ b/frontend/src/metrics/MetricsSeveritiesCurrent.tsx @@ -37,7 +37,7 @@ const MetricsSeveritiesCurrent = (props: MetricsSeveritiesCurrentProps) => { ], datasets: [ { - label: "Severities of open observations", + label: "Severities of active observations", data: data, backgroundColor: [ get_severity_color(OBSERVATION_SEVERITY_CRITICAL), @@ -68,12 +68,12 @@ const MetricsSeveritiesCurrent = (props: MetricsSeveritiesCurrentProps) => { }) .then((result) => { const new_data = [ - result.json.open_critical, - result.json.open_high, - result.json.open_medium, - result.json.open_low, - result.json.open_none, - result.json.open_unknown, + result.json.active_critical, + result.json.active_high, + result.json.active_medium, + result.json.active_low, + result.json.active_none, + result.json.active_unknown, ]; setData((data) => data.concat(new_data)); }) @@ -115,7 +115,10 @@ const MetricsSeveritiesCurrent = (props: MetricsSeveritiesCurrentProps) => { circular: true, color: getGridColor(), }, + min: 0, + suggestedMax: 5, ticks: { + precision: 0, backdropColor: getBackgroundColor(), color: getFontColor(), }, @@ -128,7 +131,7 @@ const MetricsSeveritiesCurrent = (props: MetricsSeveritiesCurrentProps) => { plugins: { title: { display: true, - text: "Severities of open observations (current)", + text: "Severities of active observations (current)", color: getFontColor(), }, legend: { diff --git a/frontend/src/metrics/MetricsSeveritiesTimeLine.tsx b/frontend/src/metrics/MetricsSeveritiesTimeLine.tsx index c6889e5d2..a95699d1f 100644 --- a/frontend/src/metrics/MetricsSeveritiesTimeLine.tsx +++ b/frontend/src/metrics/MetricsSeveritiesTimeLine.tsx @@ -16,6 +16,7 @@ import { Line } from "react-chartjs-2"; import { get_severity_color } from "../commons/functions"; import { httpClient } from "../commons/ra-data-django-rest-framework"; +import { getSettingsMetricsTimespanInDays } from "../commons/user_settings/functions"; import { OBSERVATION_SEVERITY_CRITICAL, OBSERVATION_SEVERITY_HIGH, @@ -36,15 +37,10 @@ const MetricsSeveritiesTimeline = (props: MetricsSeveritiesTimelineProps) => { const [loading, setLoading] = useState(false); const notify = useNotify(); - const days = [ - new Date(Date.now() - 6 * 24 * 60 * 60 * 1000).toLocaleDateString(), - new Date(Date.now() - 5 * 24 * 60 * 60 * 1000).toLocaleDateString(), - new Date(Date.now() - 4 * 24 * 60 * 60 * 1000).toLocaleDateString(), - new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toLocaleDateString(), - new Date(Date.now() - 2 * 24 * 60 * 60 * 1000).toLocaleDateString(), - new Date(Date.now() - 1 * 24 * 60 * 60 * 1000).toLocaleDateString(), - new Date(Date.now()).toLocaleDateString(), - ]; + const days = []; + for (let i = getSettingsMetricsTimespanInDays() - 1; i >= 0; i--) { + days.push(new Date(Date.now() - i * 24 * 60 * 60 * 1000).toLocaleDateString()); + } function get_metrics(date: Date, metrics_data: any) { const date_string = date.toISOString().split("T")[0]; @@ -53,12 +49,12 @@ const MetricsSeveritiesTimeline = (props: MetricsSeveritiesTimelineProps) => { return metrics; } else { return { - open_critical: 0, - open_high: 0, - open_medium: 0, - open_low: 0, - open_none: 0, - open_unknown: 0, + active_critical: 0, + active_high: 0, + active_medium: 0, + active_low: 0, + active_none: 0, + active_unknown: 0, }; } } @@ -75,7 +71,11 @@ const MetricsSeveritiesTimeline = (props: MetricsSeveritiesTimelineProps) => { function get_data() { setLoading(true); - let url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/metrics/product_metrics_timeline/?age=Past%207%20days"; + let url = + window.__RUNTIME_CONFIG__.API_BASE_URL + + "/metrics/product_metrics_timeline/?age=Past%20" + + getSettingsMetricsTimespanInDays() + + "%20days"; if (props.product_id) { url += "&product_id=" + props.product_id; } @@ -91,61 +91,16 @@ const MetricsSeveritiesTimeline = (props: MetricsSeveritiesTimelineProps) => { const none_observations = []; const unknown_observations = []; - let metrics = get_metrics(new Date(Date.now() - 6 * 24 * 60 * 60 * 1000), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); - - metrics = get_metrics(new Date(Date.now() - 5 * 24 * 60 * 60 * 1000), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); - - metrics = get_metrics(new Date(Date.now() - 4 * 24 * 60 * 60 * 1000), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); - - metrics = get_metrics(new Date(Date.now() - 3 * 24 * 60 * 60 * 1000), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); - - metrics = get_metrics(new Date(Date.now() - 2 * 24 * 60 * 60 * 1000), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); - - metrics = get_metrics(new Date(Date.now() - 1 * 24 * 60 * 60 * 1000), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); - - metrics = get_metrics(new Date(Date.now()), result.json); - critical_observations.push(metrics.open_critical); - high_observations.push(metrics.open_high); - medium_observations.push(metrics.open_medium); - low_observations.push(metrics.open_low); - none_observations.push(metrics.open_none); - unknown_observations.push(metrics.open_unknown); + let metrics = null; + for (let i = getSettingsMetricsTimespanInDays() - 1; i >= 0; i--) { + metrics = get_metrics(new Date(Date.now() - i * 24 * 60 * 60 * 1000), result.json); + critical_observations.push(metrics.active_critical); + high_observations.push(metrics.active_high); + medium_observations.push(metrics.active_medium); + low_observations.push(metrics.active_low); + none_observations.push(metrics.active_none); + unknown_observations.push(metrics.active_unknown); + } const data_sets = [ { @@ -233,6 +188,11 @@ const MetricsSeveritiesTimeline = (props: MetricsSeveritiesTimelineProps) => { options={{ scales: { y: { + min: 0, + suggestedMax: 5, + ticks: { + precision: 0, + }, stacked: true, grid: { color: getGridColor(), @@ -249,7 +209,10 @@ const MetricsSeveritiesTimeline = (props: MetricsSeveritiesTimelineProps) => { plugins: { title: { display: true, - text: "Severities of open observations (last 7 days)", + text: + "Severities of active observations (last " + + getSettingsMetricsTimespanInDays() + + " days)", color: getFontColor(), }, legend: { diff --git a/frontend/src/metrics/MetricsStatusCurrent.tsx b/frontend/src/metrics/MetricsStatusCurrent.tsx index 729b674b6..168000a20 100644 --- a/frontend/src/metrics/MetricsStatusCurrent.tsx +++ b/frontend/src/metrics/MetricsStatusCurrent.tsx @@ -6,6 +6,7 @@ import { PolarArea } from "react-chartjs-2"; import { httpClient } from "../commons/ra-data-django-rest-framework"; import { + OBSERVATION_STATUS_AFFECTED, OBSERVATION_STATUS_DUPLICATE, OBSERVATION_STATUS_FALSE_POSITIVE, OBSERVATION_STATUS_IN_REVIEW, @@ -30,10 +31,11 @@ const MetricsStatusCurrent = (props: MetricsStatusCurrentProps) => { const chart_data = { labels: [ OBSERVATION_STATUS_OPEN, + OBSERVATION_STATUS_AFFECTED, + OBSERVATION_STATUS_IN_REVIEW, OBSERVATION_STATUS_RESOLVED, OBSERVATION_STATUS_DUPLICATE, OBSERVATION_STATUS_FALSE_POSITIVE, - OBSERVATION_STATUS_IN_REVIEW, OBSERVATION_STATUS_NOT_AFFECTED, OBSERVATION_STATUS_NOT_SECURITY, OBSERVATION_STATUS_RISK_ACCEPTED, @@ -43,14 +45,15 @@ const MetricsStatusCurrent = (props: MetricsStatusCurrentProps) => { label: "Status of observations", data: data, backgroundColor: [ - "#1f2c33", - "#3d5766", - "#79adcc", - "#bcb7b6", - "#ffc09f", - "#ffd799", - "#ffee93", - "#fcf5c7", + "#642915", + "#963e20", + "#c7522a", + "#e5c185", + "#fbf2c4", + "#74a892", + "#008585", + "#006464", + "#004343", ], }, ], @@ -74,10 +77,11 @@ const MetricsStatusCurrent = (props: MetricsStatusCurrentProps) => { .then((result) => { const new_data = [ result.json.open, + result.json.affected, + result.json.in_review, result.json.resolved, result.json.duplicate, result.json.false_positive, - result.json.in_review, result.json.not_affected, result.json.not_security, result.json.risk_accepted, @@ -122,7 +126,10 @@ const MetricsStatusCurrent = (props: MetricsStatusCurrentProps) => { circular: true, color: getGridColor(), }, + min: 0, + suggestedMax: 5, ticks: { + precision: 0, backdropColor: getBackgroundColor(), color: getFontColor(), }, diff --git a/frontend/src/metrics/functions.ts b/frontend/src/metrics/functions.ts index 79362d3dc..3e87ddec3 100644 --- a/frontend/src/metrics/functions.ts +++ b/frontend/src/metrics/functions.ts @@ -1,7 +1,7 @@ -import { getSettingTheme } from "../commons/user_settings/functions"; +import { getResolvedSettingTheme } from "../commons/user_settings/functions"; export function getGridColor() { - if (getSettingTheme() == "dark") { + if (getResolvedSettingTheme() == "dark") { return "#666666"; } else { return "#e5e5e5"; @@ -9,7 +9,7 @@ export function getGridColor() { } export function getBackgroundColor() { - if (getSettingTheme() == "dark") { + if (getResolvedSettingTheme() == "dark") { return "#282828"; } else { return "white"; @@ -17,7 +17,7 @@ export function getBackgroundColor() { } export function getFontColor() { - if (getSettingTheme() == "dark") { + if (getResolvedSettingTheme() == "dark") { return "#bcbcbc"; } else { return "#666666"; @@ -29,7 +29,7 @@ export function getElevation(on_dashboard?: boolean) { return 1; } - if (getSettingTheme() == "dark") { + if (getResolvedSettingTheme() == "dark") { return 4; } else { return 1; diff --git a/frontend/src/commons/notifications/NotificationBulkDeleteButton.tsx b/frontend/src/notifications/NotificationBulkMarkAsViewedButton.tsx similarity index 68% rename from frontend/src/commons/notifications/NotificationBulkDeleteButton.tsx rename to frontend/src/notifications/NotificationBulkMarkAsViewedButton.tsx index a90e16056..1daca3523 100644 --- a/frontend/src/commons/notifications/NotificationBulkDeleteButton.tsx +++ b/frontend/src/notifications/NotificationBulkMarkAsViewedButton.tsx @@ -1,23 +1,25 @@ +import ChecklistIcon from "@mui/icons-material/Checklist"; import { Backdrop, CircularProgress } from "@mui/material"; import { useState } from "react"; import { Confirm, useListContext, useNotify, useRefresh, useUnselectAll } from "react-admin"; -import { httpClient } from "../../commons/ra-data-django-rest-framework"; -import RemoveButton from "../custom_fields/RemoveButton"; +import SmallButton from "../commons/custom_fields/SmallButton"; +import { httpClient } from "../commons/ra-data-django-rest-framework"; +import { update_notification_count } from "./notification_count"; -const NotificationBulkDeleteButton = () => { +const NotificationBulkMarkAsViewedButton = () => { const [open, setOpen] = useState(false); const { selectedIds } = useListContext(); const refresh = useRefresh(); const [loading, setLoading] = useState(false); const notify = useNotify(); - const unselectAll = useUnselectAll("notifications"); + const unselectAll = useUnselectAll("notifications", "notifications.list"); const handleClick = () => setOpen(true); const handleDialogClose = () => setOpen(false); const handleConfirm = async () => { setLoading(true); - const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/notifications/bulk_delete/"; + const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/notifications/bulk_mark_as_viewed/"; const delete_data = { notifications: selectedIds, }; @@ -30,8 +32,9 @@ const NotificationBulkDeleteButton = () => { refresh(); setOpen(false); setLoading(false); + update_notification_count(); unselectAll(); - notify("Notifications deleted", { + notify("Notifications marked as viewed", { type: "success", }); }) @@ -48,11 +51,11 @@ const NotificationBulkDeleteButton = () => { return ( <> - + } title="Mark as viewed" onClick={handleClick} /> @@ -65,4 +68,4 @@ const NotificationBulkDeleteButton = () => { ); }; -export default NotificationBulkDeleteButton; +export default NotificationBulkMarkAsViewedButton; diff --git a/frontend/src/notifications/NotificationList.tsx b/frontend/src/notifications/NotificationList.tsx new file mode 100644 index 000000000..93d54efcb --- /dev/null +++ b/frontend/src/notifications/NotificationList.tsx @@ -0,0 +1,101 @@ +import { Fragment } from "react"; +import { + AutocompleteInput, + BooleanInput, + ChipField, + Datagrid, + DateField, + FunctionField, + List, + ReferenceInput, + TextField, + TextInput, + WithListContext, +} from "react-admin"; + +import notifications from "."; +import { CustomPagination } from "../commons/custom_fields/CustomPagination"; +import { has_attribute } from "../commons/functions"; +import ListHeader from "../commons/layout/ListHeader"; +import { AutocompleteInputMedium } from "../commons/layout/themes"; +import { getSettingListSize } from "../commons/user_settings/functions"; +import NotificationBulkMarkAsViewedButton from "./NotificationBulkMarkAsViewedButton"; +import { TYPE_CHOICES } from "./types"; + +const messageShortened = (message: string | null) => { + if (message && message.length > 255) { + return message.substring(0, 255) + "..."; + } + return message; +}; + +const listFilters = [ + , + , + , + , + + + , + + + , + , +]; + +const BulkActionButtons = () => ; + +const NotificationList = () => { + return ( + + + } + filters={listFilters} + filterDefaultValues={{ exclude_already_viewed: true }} + sort={{ field: "created", order: "DESC" }} + disableSyncWithLocation={false} + storeKey="notifications.list" + actions={false} + > + ( + }> + + + + {has_attribute("message", data, sort) && ( + messageShortened(record.message)} + sortable={false} + sx={{ wordBreak: "break-word" }} + /> + )} + {has_attribute("function", data, sort) && } + {has_attribute("product_name", data, sort) && ( + + )} + {has_attribute("observation_title", data, sort) && ( + + )} + {has_attribute("user_full_name", data, sort) && ( + + )} + + + )} + /> + + + ); +}; + +export default NotificationList; diff --git a/frontend/src/notifications/NotificationShow.tsx b/frontend/src/notifications/NotificationShow.tsx new file mode 100644 index 000000000..6264907a8 --- /dev/null +++ b/frontend/src/notifications/NotificationShow.tsx @@ -0,0 +1,83 @@ +import { Typography } from "@mui/material"; +import { useEffect } from "react"; +import { + DateField, + PrevNextButtons, + ReferenceField, + Show, + SimpleShowLayout, + TextField, + TopToolbar, + WithRecord, + useGetRecordId, +} from "react-admin"; + +import notifications from "."; +import { httpClient } from "../commons/ra-data-django-rest-framework"; +import { update_notification_count } from "./notification_count"; + +const ShowActions = () => { + return ( + + + + ); +}; + +const NotificationShow = () => { + const recordId = useGetRecordId(); + + useEffect(() => { + const url = window.__RUNTIME_CONFIG__.API_BASE_URL + "/notifications/" + recordId + "/mark_as_viewed/"; + httpClient(url, { + method: "POST", + }) + .then(() => { + update_notification_count(); + }) + .catch((error) => { + console.warn("Cannot mark notification as viewed: ", error.message); + }); + }, [recordId]); + + return ( + }> + ( + + + +   Notification + + + + + {notification?.message && } + {notification?.function && } + {notification?.arguments && } + {notification?.product && ( + + )} + {notification?.observation && ( + + )} + {notification?.user_full_name && } + + )} + /> + + ); +}; + +export default NotificationShow; diff --git a/frontend/src/commons/notifications/index.ts b/frontend/src/notifications/index.ts similarity index 100% rename from frontend/src/commons/notifications/index.ts rename to frontend/src/notifications/index.ts diff --git a/frontend/src/notifications/notification_count.ts b/frontend/src/notifications/notification_count.ts new file mode 100644 index 000000000..917472bbe --- /dev/null +++ b/frontend/src/notifications/notification_count.ts @@ -0,0 +1,20 @@ +import { httpClient } from "../commons/ra-data-django-rest-framework"; + +export async function update_notification_count() { + const url = + window.__RUNTIME_CONFIG__.API_BASE_URL + "/notifications/?exclude_already_viewed=true&page=1&page_size=1"; + httpClient(url, { + method: "GET", + }) + .then((result) => { + const count = result.json.count; + localStorage.setItem("notification_count", count); + }) + .catch((error) => { + console.warn("Cannot update notification count: ", error.message); + }); +} + +export function get_notification_count() { + return localStorage.getItem("notification_count") || ""; +} diff --git a/frontend/src/notifications/types.ts b/frontend/src/notifications/types.ts new file mode 100644 index 000000000..9730960bb --- /dev/null +++ b/frontend/src/notifications/types.ts @@ -0,0 +1,19 @@ +import { Identifier, RaRecord } from "react-admin"; + +export const TYPE_CHOICES = [ + { id: "Exception", name: "Exception" }, + { id: "Security gate", name: "Security gate" }, + { id: "Task", name: "Task" }, +]; + +export interface Notification extends RaRecord { + id: Identifier; + type: string; + name: string; + created: Date; + message: string; + user: Identifier; + observation: Identifier; + function: string; + arguments: string; +} diff --git a/frontend/src/rules/RuleApproval.tsx b/frontend/src/rules/RuleApproval.tsx index 3bb31c8e2..99c4176c6 100644 --- a/frontend/src/rules/RuleApproval.tsx +++ b/frontend/src/rules/RuleApproval.tsx @@ -1,11 +1,10 @@ import ApprovalIcon from "@mui/icons-material/Approval"; import { Dialog, DialogContent, DialogTitle } from "@mui/material"; import { Fragment, useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; +import { SimpleForm, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../commons/custom_fields/CancelButton"; import SmallButton from "../commons/custom_fields/SmallButton"; -import Toolbar from "../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../commons/custom_fields/ToolbarCancelSave"; import { validate_required, validate_required_255 } from "../commons/custom_validators"; import { AutocompleteInputMedium, TextInputWide } from "../commons/layout/themes"; import { httpClient } from "../commons/ra-data-django-rest-framework"; @@ -53,13 +52,6 @@ const RuleApproval = (props: RuleApprovalProps) => { const handleCancel = () => setOpen(false); const handleOpen = () => setOpen(true); - const CustomToolbar = () => ( - - - - - ); - return ( } /> @@ -69,7 +61,7 @@ const RuleApproval = (props: RuleApprovalProps) => {   Rule approval - }> + }> { + const dialogRef = useRef(null); + const [open, setOpen] = useState(false); + const notify = useNotify(); + const [data, setData] = useState([]); + const [count, setCount] = useState(0); + const [loading, setLoading] = useState(true); + + const simulateRule = () => { + setLoading(true); + const rules_provider = product === undefined ? "general_rules" : "product_rules"; + httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/" + rules_provider + "/" + rule.id + "/simulate/", { + method: "POST", + }) + .then((result: any) => { + setCount(result.json.count); + setData(result.json.results); + }) + .catch((error) => { + setOpen(false); + notify(error.message, { + type: "warning", + }); + }); + setLoading(false); + }; + + const handleOpen = () => { + setOpen(true); + localStorage.removeItem("RaStore.rule_simulation.datagrid.expanded"); + simulateRule(); + }; + + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + }; + + const handleOk = () => setOpen(false); + + const OKButton = () => ( + + ); + + const listContext = useList({ + data, + isLoading: loading, + }); + + return ( + + } /> + + Affected observations of rule {rule.name} + + {count !== data.length && ( + + Showing {data.length} of {count} observations. + + )} + + + } + expandSingle + > + + {(product === undefined || product.is_product_group) && ( + + )} + + + + + + + + + + + + + {loading ? ( + theme.zIndex.drawer + 1 }} open={open}> + + + ) : null} + + ); +}; + +export default RuleSimulation; diff --git a/frontend/src/rules/functions.ts b/frontend/src/rules/functions.ts deleted file mode 100644 index a3d3f2878..000000000 --- a/frontend/src/rules/functions.ts +++ /dev/null @@ -1,23 +0,0 @@ -export const validateRuleForm = (values: any) => { - const errors: any = {}; - - if (!values.name) { - errors.name = "Title is required"; - } - - if (!values.description) { - errors.description = "Description is required"; - } - - if (!values.new_severity && !values.new_status) { - errors.new_severity = "Either New severity or New status must be set"; - errors.new_status = "Either New severity or New status must be set"; - } - - if (!values.parser && !values.scanner_prefix) { - errors.parser = "Either Parser or Scanner prefix must be set"; - errors.scanner_prefix = "Either Parser or Scanner prefix must be set"; - } - - return errors; -}; diff --git a/frontend/src/rules/functions.tsx b/frontend/src/rules/functions.tsx new file mode 100644 index 000000000..fad738747 --- /dev/null +++ b/frontend/src/rules/functions.tsx @@ -0,0 +1,611 @@ +import { Box, Divider, Paper, Stack, Typography } from "@mui/material"; +import { RefObject } from "react"; +import { Fragment, useState } from "react"; +import { + BooleanField, + BooleanInput, + ChipField, + DateField, + Labeled, + ReferenceField, + ReferenceInput, + TextField, + useRecordContext, +} from "react-admin"; +import { useWatch } from "react-hook-form"; +import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; +// import { PrismLight as SyntaxHighlighter } from "react-syntax-highlighter"; +// import rego from "react-syntax-highlighter/dist/esm/languages/prism/rego"; +import { oneDark, oneLight } from "react-syntax-highlighter/dist/esm/styles/prism"; + +import MarkdownEdit from "../commons/custom_fields/MarkdownEdit"; +import MarkdownField from "../commons/custom_fields/MarkdownField"; +import TextUrlField from "../commons/custom_fields/TextUrlField"; +import { + validate_255, + validate_513, + validate_2048, + validate_required, + validate_required_255, +} from "../commons/custom_validators"; +import { + feature_general_rules_need_approval_enabled, + feature_vex_enabled, + justificationIsEnabledForStatus, + settings_vex_justification_style, +} from "../commons/functions"; +import { + AutocompleteInputMedium, + AutocompleteInputWide, + TextInputExtraWide, + TextInputWide, + useStyles, +} from "../commons/layout/themes"; +import { VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX, VEX_JUSTIFICATION_TYPE_CYCLONEDX } from "../commons/types"; +import { getResolvedSettingTheme } from "../commons/user_settings/functions"; +import { + OBSERVATION_CYCLONEDX_VEX_JUSTIFICATION_CHOICES, + OBSERVATION_SEVERITY_CHOICES, + OBSERVATION_STATUS_CHOICES, + OBSERVATION_VEX_JUSTIFICATION_CHOICES, +} from "../core/types"; +import general_rules from "./general_rules"; +import product_rules from "./product_rules"; +import { RULE_TYPE_CHOICES, RULE_TYPE_FIELDS, RULE_TYPE_REGO } from "./types"; + +// SyntaxHighlighter.registerLanguage("rego", rego); + +export function getRegoTheme() { + const theme = getResolvedSettingTheme(); + if (theme === "dark") { + return oneDark; + } else { + return oneLight; + } +} + +export const validateRuleForm = (values: any) => { + const errors: any = {}; + + if (!values.name) { + errors.name = "Name is required"; + } + + if (values.type === RULE_TYPE_FIELDS) { + if (!values.new_severity && !values.new_status) { + errors.new_severity = "Either New severity or New status must be set"; + errors.new_status = "Either New severity or New status must be set"; + } + + if (!values.parser && !values.scanner_prefix) { + errors.parser = "Either Parser or Scanner prefix must be set"; + errors.scanner_prefix = "Either Parser or Scanner prefix must be set"; + } + } + + if (values.type === RULE_TYPE_REGO) { + if (!values.rego_module) { + errors.rego_module = "Rego module is required"; + } + } + + console.log(errors); + + return errors; +}; + +function generateProductURL(product_id: number, is_product_group: boolean): string { + if (is_product_group) { + return "#/product_groups/" + product_id + "/show/rules"; + } + return "#/products/" + product_id + "/show/rules"; +} + +function getProductLabel(product_data: any): string { + if (product_data.is_product_group) { + return "Product group"; + } + return "Product"; +} + +export const RuleShowComponent = ({ rule }: any) => { + const { classes } = useStyles(); + + return ( + + + {rule.product_data && ( + + +   Product Rule + + )} + {!rule.product_data && ( + + +   General Rule + + )} + + {rule.product_data && ( + + + + )} + + + + {rule.description && ( + + + + )} + + + + + + {rule.new_severity && ( + + + + )} + {rule.new_status && ( + + + + )} + {feature_vex_enabled() && rule.new_vex_justification && ( + + + + )} + + {rule.rego_module && ( + + + {rule.rego_module} + + + )} + + + + + {rule.user_full_name && ( + + + + )} + + + + {rule && (rule.parser || rule.scanner_prefix || rule.title || rule.description_observation) && ( + + + Observation + + + {rule.parser && ( + + + + )} + {rule.scanner_prefix && ( + + + + )} + {rule.title && ( + + + + )} + {rule.description_observation && ( + + + + )} + + + )} + + {rule && + (rule.origin_component_name_version || + rule.origin_component_purl || + rule.origin_docker_image_name_tag || + rule.origin_endpoint_url || + rule.origin_service_name || + rule.origin_source_file || + rule.origin_cloud_qualified_resource || + rule.origin_kubernetes_qualified_resource) && ( + + + Origins + + + {rule.origin_component_name_version && ( + + + + )} + {rule.origin_component_purl && ( + + + + )} + {rule.origin_docker_image_name_tag && ( + + + + )} + {rule.origin_endpoint_url && ( + + + + )} + {rule.origin_service_name && ( + + + + )} + {rule.origin_source_file && ( + + + + )} + {rule.origin_cloud_qualified_resource && ( + + + + )} + {rule.origin_kubernetes_qualified_resource && ( + + + + )} + + + )} + + {((rule.product_data && + (rule.product_data.product_rules_need_approval || + rule.product_data.product_group_product_rules_need_approval)) || + (!rule.product_data && feature_general_rules_need_approval_enabled())) && ( + + + Approval + + + + + + {rule.approval_user_full_name && ( + + + + )} + {rule.approval_remark && ( + + + + )} + {rule.approval_date && ( + + + + )} + + + )} + + ); +}; + +export const non_duplicate_transform = (data: any, description: string) => { + data.description = description; + data.type ??= ""; + + if (data.type === RULE_TYPE_FIELDS) { + data.title ??= ""; + data.description_observation ??= ""; + data.new_severity ??= ""; + data.new_status ??= ""; + if (!justificationIsEnabledForStatus(data.new_status) || data.new_vex_justification == null) { + data.new_vex_justification = ""; + } + + data.scanner_prefix ??= ""; + + data.origin_component_name_version ??= ""; + data.origin_component_purl ??= ""; + data.origin_docker_image_name_tag ??= ""; + data.origin_endpoint_url ??= ""; + data.origin_service_name ??= ""; + data.origin_source_file ??= ""; + data.origin_cloud_qualified_resource ??= ""; + data.origin_kubernetes_qualified_resource ??= ""; + + data.rego_module = ""; + } + + if (data.type === RULE_TYPE_REGO) { + data.title = ""; + data.description_observation = ""; + data.new_severity = ""; + data.new_status = ""; + data.new_vex_justification = ""; + + data.scanner_prefix = ""; + + data.origin_component_name_version = ""; + data.origin_component_purl = ""; + data.origin_docker_image_name_tag = ""; + data.origin_endpoint_url = ""; + data.origin_service_name = ""; + data.origin_source_file = ""; + data.origin_cloud_qualified_resource = ""; + data.origin_kubernetes_qualified_resource = ""; + + data.rego_module ??= ""; + } + + return data; +}; + +interface SeverityStatusInputProps { + initialStatus: string; +} + +const SeverityStatusInput = ({ initialStatus }: SeverityStatusInputProps) => { + const [status, setStatus] = useState(initialStatus); + const justificationEnabled = justificationIsEnabledForStatus(status); + const type = useWatch({ name: "type" }); + + if (type === RULE_TYPE_FIELDS) { + return ( + + + setStatus(e)} + /> + {justificationEnabled && settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CSAF_OPENVEX && ( + + )} + {justificationEnabled && settings_vex_justification_style() === VEX_JUSTIFICATION_TYPE_CYCLONEDX && ( + + )} + + ); + } else { + return null; + } +}; + +const RegoModuleInput = () => { + const type = useWatch({ name: "type" }); + + if (type === RULE_TYPE_REGO) { + return ( + + + + + ); + } else { + return null; + } +}; + +const FieldsInput = () => { + const type = useWatch({ name: "type" }); + + if (type === RULE_TYPE_FIELDS) { + return ( + + + + + Observation + + + + + + + + + + + + + + Origins + + + + + + + + + + + + + ); + } else { + return null; + } +}; + +interface RuleCreateEditComponentProps { + product: any; + initialStatus: string; + initialDescription: string; + setDescription: (value: string) => void; + dialogRef?: RefObject | null; +} + +export const RuleCreateEditComponent = ({ + product, + initialStatus, + initialDescription, + setDescription, + dialogRef = null, +}: RuleCreateEditComponentProps) => { + return ( + + {product && ( + + +   Product Rule + + )} + {!product && ( + + +   General Rule + + )} + + {product && ( + + )} + + + + + + + + + + + + + ); +}; + +interface RuleEditComponentProps { + product: any; + initialStatus: string; + setDescription: (value: string) => void; +} + +export const RuleEditComponent = ({ product, initialStatus, setDescription }: RuleEditComponentProps) => { + const rule = useRecordContext(); + const [descriptionSet, setDescriptionSet] = useState(false); + const [initialDescription, setInitialDescription] = useState(""); + + if (!descriptionSet && rule) { + setInitialDescription(rule.description || ""); + setDescription(rule.description); + setDescriptionSet(true); + } + + return ( + + ); +}; diff --git a/frontend/src/rules/general_rules/GeneralRuleCreate.tsx b/frontend/src/rules/general_rules/GeneralRuleCreate.tsx index 3435618d8..64e8b6c96 100644 --- a/frontend/src/rules/general_rules/GeneralRuleCreate.tsx +++ b/frontend/src/rules/general_rules/GeneralRuleCreate.tsx @@ -1,171 +1,25 @@ -import { Divider, Stack, Typography } from "@mui/material"; import { useState } from "react"; -import { BooleanInput, Create, ReferenceInput, SimpleForm } from "react-admin"; +import { Create, SimpleForm } from "react-admin"; -import { - validate_255, - validate_513, - validate_2048, - validate_required_255, - validate_required_2048, -} from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; -import { - OBSERVATION_SEVERITY_CHOICES, - OBSERVATION_STATUS_CHOICES, - OBSERVATION_STATUS_OPEN, - OBSERVATION_VEX_JUSTIFICATION_CHOICES, -} from "../../core/types"; -import { validateRuleForm } from "../functions"; +import { OBSERVATION_STATUS_OPEN } from "../../core/types"; +import { RuleCreateEditComponent, non_duplicate_transform, validateRuleForm } from "../functions"; const GeneralRuleCreate = () => { - const [status, setStatus] = useState(OBSERVATION_STATUS_OPEN); - const justificationEnabled = justificationIsEnabledForStatus(status); + const [description, setDescription] = useState(""); const transform = (data: any) => { - if (data.scanner_prefix == null) { - data.scanner_prefix = ""; - } - if (data.title == null) { - data.title = ""; - } - if (data.description_observation == null) { - data.description_observation = ""; - } - if (data.origin_component_name_version == null) { - data.origin_component_name_version = ""; - } - if (data.origin_docker_image_name_tag == null) { - data.origin_docker_image_name_tag = ""; - } - if (data.origin_endpoint_url == null) { - data.origin_endpoint_url = ""; - } - if (data.origin_service_name == null) { - data.origin_service_name = ""; - } - if (data.origin_source_file == null) { - data.origin_source_file = ""; - } - if (data.origin_cloud_qualified_resource == null) { - data.origin_cloud_qualified_resource = ""; - } - if (data.origin_kubernetes_qualified_resource == null) { - data.origin_kubernetes_qualified_resource = ""; - } - if (data.new_severity == null) { - data.new_severity = ""; - } - if (data.new_status == null) { - data.new_status = ""; - } - if (!justificationEnabled || data.new_vex_justification == null) { - data.new_vex_justification = ""; - } - return data; + return non_duplicate_transform(data, description); }; return ( - - General Rule - - - - - - setStatus(e)} - /> - {justificationEnabled && ( - - )} - - - - - Observation - - - - - - - - - - - - Origins - - - - - - - - - - + {" "} ); diff --git a/frontend/src/rules/general_rules/GeneralRuleEdit.tsx b/frontend/src/rules/general_rules/GeneralRuleEdit.tsx index b24cecce7..b51699b3b 100644 --- a/frontend/src/rules/general_rules/GeneralRuleEdit.tsx +++ b/frontend/src/rules/general_rules/GeneralRuleEdit.tsx @@ -1,196 +1,39 @@ -import { Divider, Stack, Typography } from "@mui/material"; import { useState } from "react"; -import { - BooleanInput, - DeleteButton, - Edit, - ReferenceInput, - SaveButton, - SimpleForm, - Toolbar, - useRecordContext, -} from "react-admin"; +import { DeleteButton, Edit, SaveButton, SimpleForm, Toolbar, WithRecord } from "react-admin"; -import { - validate_255, - validate_513, - validate_2048, - validate_required_255, - validate_required_2048, -} from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; -import { - OBSERVATION_SEVERITY_CHOICES, - OBSERVATION_STATUS_CHOICES, - OBSERVATION_VEX_JUSTIFICATION_CHOICES, -} from "../../core/types"; -import { validateRuleForm } from "../functions"; +import { RuleEditComponent, non_duplicate_transform, validateRuleForm } from "../functions"; const CustomToolbar = () => { return ( - + ); }; + const GeneralRuleEdit = () => { + const [description, setDescription] = useState(""); + const transform = (data: any) => { - if (data.scanner_prefix == null) { - data.scanner_prefix = ""; - } - if (data.title == null) { - data.title = ""; - } - if (data.description_observation == null) { - data.description_observation = ""; - } - if (data.origin_component_name_version == null) { - data.origin_component_name_version = ""; - } - if (data.origin_docker_image_name_tag == null) { - data.origin_docker_image_name_tag = ""; - } - if (data.origin_endpoint_url == null) { - data.origin_endpoint_url = ""; - } - if (data.origin_service_name == null) { - data.origin_service_name = ""; - } - if (data.origin_source_file == null) { - data.origin_source_file = ""; - } - if (data.origin_cloud_qualified_resource == null) { - data.origin_cloud_qualified_resource = ""; - } - if (data.origin_kubernetes_qualified_resource == null) { - data.origin_kubernetes_qualified_resource = ""; - } - if (data.new_severity == null) { - data.new_severity = ""; - } - if (data.new_status == null) { - data.new_status = ""; - } - if (!justificationIsEnabledForStatus(data.new_status) || !data.new_vex_justification) { - data.new_vex_justification = ""; - } - return data; + return non_duplicate_transform(data, description); }; return ( - + } validate={validateRuleForm}> + ( + + )} + /> + ); }; -const GeneralRuleEditForm = () => { - const generalRule = useRecordContext(); - const [status, setStatus] = useState(generalRule ? generalRule.new_status : ""); - const justificationEnabled = justificationIsEnabledForStatus(status); - - return ( - } validate={validateRuleForm}> - - General Rule - - - - - - setStatus(e)} - /> - {justificationEnabled && ( - - )} - - - - - Observation - - - - - - - - - - - - Origins - - - - - - - - - - - - ); -}; export default GeneralRuleEdit; diff --git a/frontend/src/rules/general_rules/GeneralRuleList.tsx b/frontend/src/rules/general_rules/GeneralRuleList.tsx index 41f1431d6..cfefde143 100644 --- a/frontend/src/rules/general_rules/GeneralRuleList.tsx +++ b/frontend/src/rules/general_rules/GeneralRuleList.tsx @@ -5,21 +5,25 @@ import { ChipField, CreateButton, Datagrid, + FieldProps, List, ReferenceField, ReferenceInput, TextField, TextInput, TopToolbar, + WithRecord, + useRecordContext, } from "react-admin"; import general_rules from "."; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; -import { is_superuser } from "../../commons/functions"; -import { feature_general_rules_need_approval_enabled } from "../../commons/functions"; +import TextUrlField from "../../commons/custom_fields/TextUrlField"; +import { feature_general_rules_need_approval_enabled, is_superuser } from "../../commons/functions"; import ListHeader from "../../commons/layout/ListHeader"; import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; +import RuleSimulation from "../RuleSimulation"; import { RULE_STATUS_CHOICES } from "../types"; const listFilters = [ @@ -39,6 +43,15 @@ if (feature_general_rules_need_approval_enabled()) { ); } +const RuleNameURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? : null; +}; + +function get_rule_url(rule_id: number): string { + return `#/general_rules/${rule_id}/show`; +} + const BulkActionButtons = () => { return {is_superuser() && }; }; @@ -62,14 +75,15 @@ const GeneralRuleList = () => { > } > - - - + + {feature_general_rules_need_approval_enabled() && } + + { /> + {is_superuser() && } />} diff --git a/frontend/src/rules/general_rules/GeneralRuleShow.tsx b/frontend/src/rules/general_rules/GeneralRuleShow.tsx index 37fe8a4ac..382513068 100644 --- a/frontend/src/rules/general_rules/GeneralRuleShow.tsx +++ b/frontend/src/rules/general_rules/GeneralRuleShow.tsx @@ -1,26 +1,11 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import { Stack } from "@mui/material"; import { Fragment } from "react"; -import { - BooleanField, - ChipField, - DateField, - EditButton, - Labeled, - PrevNextButtons, - ReferenceField, - Show, - TextField, - TopToolbar, - WithRecord, - useRecordContext, -} from "react-admin"; +import { EditButton, PrevNextButtons, Show, TopToolbar, WithRecord, useRecordContext } from "react-admin"; -import MarkdownField from "../../commons/custom_fields/MarkdownField"; -import { feature_vex_enabled } from "../../commons/functions"; -import { is_superuser } from "../../commons/functions"; -import { feature_general_rules_need_approval_enabled } from "../../commons/functions"; -import { useStyles } from "../../commons/layout/themes"; +import { feature_general_rules_need_approval_enabled, is_superuser } from "../../commons/functions"; import RuleApproval from "../RuleApproval"; +import RuleSimulation from "../RuleSimulation"; +import { RuleShowComponent } from "../functions"; import { RULE_STATUS_NEEDS_APPROVAL } from "../types"; const ShowActions = () => { @@ -29,10 +14,10 @@ const ShowActions = () => { - {rule && - rule.approval_status == RULE_STATUS_NEEDS_APPROVAL && + {rule?.approval_status == RULE_STATUS_NEEDS_APPROVAL && feature_general_rules_need_approval_enabled() && is_superuser() && } + {rule && is_superuser() && } {is_superuser() && } @@ -40,173 +25,7 @@ const ShowActions = () => { }; const GeneralRuleComponent = () => { - const { classes } = useStyles(); - - return ( - ( - - - - General Rule - - - - - - {rule.description && ( - - - - )} - - {rule.new_severity && ( - - - - )} - {rule.new_status && ( - - - - )} - {feature_vex_enabled() && rule.new_vex_justification && ( - - - - )} - - - - {rule.user_full_name && ( - - - - )} - - - - - - Observation - - - {rule.parser && ( - - - - )} - {rule.scanner_prefix && ( - - - - )} - {rule.title && ( - - - - )} - {rule.description_observation && ( - - - - )} - - - - {rule && - (rule.origin_component_name_version || - rule.origin_docker_image_name_tag || - rule.origin_endpoint_url || - rule.origin_service_name || - rule.origin_source_file || - rule.origin_cloud_qualified_resource || - rule.origin_kubernetes_qualified_resource) && ( - - - Origins - - - {rule.origin_component_name_version && ( - - - - )} - {rule.origin_docker_image_name_tag && ( - - - - )} - {rule.origin_endpoint_url && ( - - - - )} - {rule.origin_service_name && ( - - - - )} - {rule.origin_source_file && ( - - - - )} - {rule.origin_cloud_qualified_resource && ( - - - - )} - {rule.origin_kubernetes_qualified_resource && ( - - - - )} - - - )} - - {feature_general_rules_need_approval_enabled() && ( - - - Approval - - - - - - {rule.approval_user_full_name && ( - - - - )} - {rule.approval_remark && ( - - - - )} - {rule.approval_date && ( - - - - )} - - - )} - - )} - /> - ); + return } />; }; const GeneralRuleShow = () => { diff --git a/frontend/src/rules/product_rules/ProductRuleCreate.tsx b/frontend/src/rules/product_rules/ProductRuleCreate.tsx index fb2de7f21..5e200c23a 100644 --- a/frontend/src/rules/product_rules/ProductRuleCreate.tsx +++ b/frontend/src/rules/product_rules/ProductRuleCreate.tsx @@ -1,47 +1,23 @@ -import { Dialog, DialogContent, DialogTitle, Divider, Typography } from "@mui/material"; -import { Fragment, useState } from "react"; -import { - BooleanInput, - CreateBase, - ReferenceInput, - SaveButton, - SimpleForm, - useCreate, - useNotify, - useRefresh, -} from "react-admin"; +import { Dialog, DialogContent, DialogTitle } from "@mui/material"; +import { Fragment, useRef, useState } from "react"; +import { CreateBase, SimpleForm, useCreate, useNotify, useRefresh } from "react-admin"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; -import { - validate_255, - validate_513, - validate_2048, - validate_required_255, - validate_required_2048, -} from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; -import { - OBSERVATION_SEVERITY_CHOICES, - OBSERVATION_STATUS_CHOICES, - OBSERVATION_STATUS_OPEN, - OBSERVATION_VEX_JUSTIFICATION_CHOICES, -} from "../../core/types"; -import { validateRuleForm } from "../functions"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { OBSERVATION_STATUS_OPEN } from "../../core/types"; +import { RuleCreateEditComponent, non_duplicate_transform, validateRuleForm } from "../functions"; export type ProductRuleCreateProps = { - id: any; + product: any; }; -const ProductRuleCreate = ({ id }: ProductRuleCreateProps) => { +const ProductRuleCreate = ({ product }: ProductRuleCreateProps) => { + const dialogRef = useRef(null); + const [description, setDescription] = useState(""); const [open, setOpen] = useState(false); const refresh = useRefresh(); const notify = useNotify(); const [create] = useCreate(); - const [status, setStatus] = useState(OBSERVATION_STATUS_OPEN); - const justificationEnabled = justificationIsEnabledForStatus(status); const handleOpen = () => setOpen(true); const handleCancel = () => setOpen(false); @@ -50,55 +26,9 @@ const ProductRuleCreate = ({ id }: ProductRuleCreateProps) => { setOpen(false); }; - const CustomToolbar = () => ( - - - - - ); - const create_product_rule = (data: any) => { - data.product = id; - - if (data.scanner_prefix == null) { - data.scanner_prefix = ""; - } - if (data.title == null) { - data.title = ""; - } - if (data.description_observation == null) { - data.description_observation = ""; - } - if (data.origin_component_name_version == null) { - data.origin_component_name_version = ""; - } - if (data.origin_docker_image_name_tag == null) { - data.origin_docker_image_name_tag = ""; - } - if (data.origin_endpoint_url == null) { - data.origin_endpoint_url = ""; - } - if (data.origin_service_name == null) { - data.origin_service_name = ""; - } - if (data.origin_source_file == null) { - data.origin_source_file = ""; - } - if (data.origin_cloud_qualified_resource == null) { - data.origin_cloud_qualified_resource = ""; - } - if (data.origin_kubernetes_qualified_resource == null) { - data.origin_kubernetes_qualified_resource = ""; - } - if (data.new_severity == null) { - data.new_severity = ""; - } - if (data.new_status == null) { - data.new_status = ""; - } - if (!justificationEnabled || !data.new_vex_justification) { - data.new_vex_justification = ""; - } + data.product = product.id; + data = non_duplicate_transform(data, description); create( "product_rules", @@ -107,120 +37,33 @@ const ProductRuleCreate = ({ id }: ProductRuleCreateProps) => { onSuccess: () => { refresh(); notify("Product rule added", { type: "success" }); + setOpen(false); }, onError: (error: any) => { notify(error.message, { type: "warning" }); }, } ); - setOpen(false); }; return ( - + Add product rule } + toolbar={} validate={validateRuleForm} > - - Rule - - - - - setStatus(e)} - /> - {justificationEnabled && ( - - )} - - - - - - Observation - - - - - - - - - - - - Origins - - - - - - - - diff --git a/frontend/src/rules/product_rules/ProductRuleEdit.tsx b/frontend/src/rules/product_rules/ProductRuleEdit.tsx index f8cf20346..5a07f87ae 100644 --- a/frontend/src/rules/product_rules/ProductRuleEdit.tsx +++ b/frontend/src/rules/product_rules/ProductRuleEdit.tsx @@ -1,40 +1,16 @@ -import { Divider, Stack, Typography } from "@mui/material"; import { useState } from "react"; -import { - BooleanInput, - DeleteButton, - Edit, - ReferenceInput, - SaveButton, - SimpleForm, - Toolbar, - useRecordContext, -} from "react-admin"; +import { DeleteButton, Edit, SaveButton, SimpleForm, Toolbar, WithRecord, useRecordContext } from "react-admin"; import { PERMISSION_PRODUCT_RULE_DELETE } from "../../access_control/types"; -import { - validate_255, - validate_513, - validate_2048, - validate_required_255, - validate_required_2048, -} from "../../commons/custom_validators"; -import { justificationIsEnabledForStatus } from "../../commons/functions"; -import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; -import { - OBSERVATION_SEVERITY_CHOICES, - OBSERVATION_STATUS_CHOICES, - OBSERVATION_VEX_JUSTIFICATION_CHOICES, -} from "../../core/types"; -import { validateRuleForm } from "../functions"; +import { RuleEditComponent, non_duplicate_transform, validateRuleForm } from "../functions"; const CustomToolbar = () => { const rule = useRecordContext(); return ( - - {rule && rule.product_data.permissions.includes(PERMISSION_PRODUCT_RULE_DELETE) && ( + + {rule?.product_data.permissions.includes(PERMISSION_PRODUCT_RULE_DELETE) && ( { ); }; const ProductRuleEdit = () => { + const [description, setDescription] = useState(""); + const transform = (data: any) => { - if (data.scanner_prefix == null) { - data.scanner_prefix = ""; - } - if (data.title == null) { - data.title = ""; - } - if (data.description_observation == null) { - data.description_observation = ""; - } - if (data.origin_component_name_version == null) { - data.origin_component_name_version = ""; - } - if (data.origin_docker_image_name_tag == null) { - data.origin_docker_image_name_tag = ""; - } - if (data.origin_endpoint_url == null) { - data.origin_endpoint_url = ""; - } - if (data.origin_service_name == null) { - data.origin_service_name = ""; - } - if (data.origin_source_file == null) { - data.origin_source_file = ""; - } - if (data.origin_cloud_qualified_resource == null) { - data.origin_cloud_qualified_resource = ""; - } - if (data.origin_kubernetes_qualified_resource == null) { - data.origin_kubernetes_qualified_resource = ""; - } - if (data.new_severity == null) { - data.new_severity = ""; - } - if (data.new_status == null) { - data.new_status = ""; - } - if (!justificationIsEnabledForStatus(data.new_status) || !data.new_vex_justification) { - data.new_vex_justification = ""; - } - return data; + return non_duplicate_transform(data, description); }; return ( - + } validate={validateRuleForm}> + ( + + )} + /> + ); }; -const ProductRuleEditForm = () => { - const productRule = useRecordContext(); - const [status, setStatus] = useState(productRule ? productRule.new_status : ""); - const justificationEnabled = justificationIsEnabledForStatus(status); - - return ( - } validate={validateRuleForm}> - - Product Rule - - - - - - - setStatus(e)} - /> - {justificationEnabled && ( - - )} - - - - - Observation - - - - - - - - - - - - Origins - - - - - - - - - - - - ); -}; export default ProductRuleEdit; diff --git a/frontend/src/rules/product_rules/ProductRuleEmbeddedList.tsx b/frontend/src/rules/product_rules/ProductRuleEmbeddedList.tsx index b9f2441cc..0d538e025 100644 --- a/frontend/src/rules/product_rules/ProductRuleEmbeddedList.tsx +++ b/frontend/src/rules/product_rules/ProductRuleEmbeddedList.tsx @@ -2,6 +2,7 @@ import { BooleanField, ChipField, Datagrid, + FieldProps, FilterForm, ListContextProvider, ReferenceField, @@ -9,12 +10,16 @@ import { ResourceContextProvider, TextField, TextInput, + WithRecord, useListController, + useRecordContext, } from "react-admin"; import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import TextUrlField from "../../commons/custom_fields/TextUrlField"; import { AutocompleteInputMedium } from "../../commons/layout/themes"; import { getSettingListSize } from "../../commons/user_settings/functions"; +import RuleSimulation from "../RuleSimulation"; import { RULE_STATUS_CHOICES } from "../types"; function listFilters(product: any) { @@ -37,6 +42,15 @@ function listFilters(product: any) { return filters; } +const RuleNameURLField = (props: FieldProps) => { + const record = useRecordContext(props); + return record ? : null; +}; + +function get_rule_url(rule_id: number): string { + return `#/product_rules/${rule_id}/show`; +} + type ProductRuleEmbeddedListProps = { product: any; }; @@ -55,10 +69,6 @@ const ProductRuleEmbeddedList = ({ product }: ProductRuleEmbeddedListProps) => { return
Loading...
; } - const ShowProductRule = (id: any) => { - return "../../../../product_rules/" + id + "/show"; - }; - localStorage.setItem("productruleembeddedlist", "true"); localStorage.removeItem("productruleapprovallist"); @@ -71,18 +81,19 @@ const ProductRuleEmbeddedList = ({ product }: ProductRuleEmbeddedListProps) => { size={getSettingListSize()} sx={{ width: "100%" }} bulkActionButtons={false} - rowClick={ShowProductRule} + rowClick={false} resource="product_rules" > - - - + + {product && (product.product_rules_need_approval || product.product_group_product_rules_need_approval) && ( )} + + { /> + } /> diff --git a/frontend/src/rules/product_rules/ProductRuleShow.tsx b/frontend/src/rules/product_rules/ProductRuleShow.tsx index 5a64e4f93..5c18156a8 100644 --- a/frontend/src/rules/product_rules/ProductRuleShow.tsx +++ b/frontend/src/rules/product_rules/ProductRuleShow.tsx @@ -1,28 +1,12 @@ -import { Box, Paper, Stack, Typography } from "@mui/material"; +import { Stack } from "@mui/material"; import { Fragment } from "react"; -import { - BooleanField, - ChipField, - DateField, - EditButton, - Labeled, - PrevNextButtons, - ReferenceField, - Show, - SortPayload, - TextField, - TopToolbar, - WithRecord, - useRecordContext, -} from "react-admin"; +import { EditButton, PrevNextButtons, Show, SortPayload, TopToolbar, WithRecord, useRecordContext } from "react-admin"; import { PERMISSION_PRODUCT_RULE_APPROVAL, PERMISSION_PRODUCT_RULE_EDIT } from "../../access_control/types"; -import MarkdownField from "../../commons/custom_fields/MarkdownField"; -import TextUrlField from "../../commons/custom_fields/TextUrlField"; -import { feature_vex_enabled } from "../../commons/functions"; -import { useStyles } from "../../commons/layout/themes"; import { ASSESSMENT_STATUS_NEEDS_APPROVAL } from "../../core/types"; import RuleApproval from "../RuleApproval"; +import RuleSimulation from "../RuleSimulation"; +import { RuleShowComponent } from "../functions"; import { RULE_STATUS_NEEDS_APPROVAL } from "../types"; const ShowActions = () => { @@ -49,210 +33,21 @@ const ShowActions = () => { {rule && filter && sort && storeKey && ( )} - {rule && - rule.approval_status == RULE_STATUS_NEEDS_APPROVAL && - (rule.product_data.product_rules_need_approval || - rule.product_data.product_group_product_rules_need_approval) && - rule.product_data.permissions.includes(PERMISSION_PRODUCT_RULE_APPROVAL) && ( + {rule?.approval_status == RULE_STATUS_NEEDS_APPROVAL && + (rule?.product_data.product_rules_need_approval || + rule?.product_data.product_group_product_rules_need_approval) && + rule?.product_data.permissions.includes(PERMISSION_PRODUCT_RULE_APPROVAL) && ( )} - {rule && rule.product_data.permissions.includes(PERMISSION_PRODUCT_RULE_EDIT) && } + {rule && } + {rule?.product_data.permissions.includes(PERMISSION_PRODUCT_RULE_EDIT) && } ); }; -function generateProductURL(product_id: number, is_product_group: boolean): string { - if (is_product_group) { - return "#/product_groups/" + product_id + "/show/rules"; - } - return "#/products/" + product_id + "/show/rules"; -} - -function getProductLabel(product_data: any): string { - if (product_data.is_product_group) { - return "Product group"; - } - return "Product"; -} - const ProductRuleComponent = () => { - const { classes } = useStyles(); - - return ( - ( - - - - Product Rule - - - - - - - - - {rule.description && ( - - - - )} - - {rule.new_severity && ( - - - - )} - {rule.new_status && ( - - - - )} - {feature_vex_enabled() && rule.new_vex_justification && ( - - - - )} - - - - {rule.user_full_name && ( - - - - )} - - - - - - Observation - - - {rule.parser && ( - - - - )} - {rule.scanner_prefix && ( - - - - )} - {rule.title && ( - - - - )} - {rule.description_observation && ( - - - - )} - - - - {rule && - (rule.origin_component_name_version || - rule.origin_docker_image_name_tag || - rule.origin_endpoint_url || - rule.origin_service_name || - rule.origin_source_file || - rule.origin_cloud_qualified_resource || - rule.origin_kubernetes_qualified_resource) && ( - - - Origins - - - {rule.origin_component_name_version && ( - - - - )} - {rule.origin_docker_image_name_tag && ( - - - - )} - {rule.origin_endpoint_url && ( - - - - )} - {rule.origin_service_name && ( - - - - )} - {rule.origin_source_file && ( - - - - )} - {rule.origin_cloud_qualified_resource && ( - - - - )} - {rule.origin_kubernetes_qualified_resource && ( - - - - )} - - - )} - - {rule && - (rule.product_data.product_rules_need_approval || - rule.product_data.product_group_product_rules_need_approval) && ( - - - Approval - - - - - - {rule.approval_user_full_name && ( - - - - )} - {rule.approval_remark && ( - - - - )} - {rule.approval_date && ( - - - - )} - - - )} - - )} - /> - ); + return } />; }; const GeneralRuleShow = () => { diff --git a/frontend/src/rules/types.ts b/frontend/src/rules/types.ts index b44129b62..03c927b4a 100644 --- a/frontend/src/rules/types.ts +++ b/frontend/src/rules/types.ts @@ -9,6 +9,7 @@ export interface GeneralRule extends RaRecord { title: string; description_observation: string; origin_component_name_version: string; + origin_component_purl: string; origin_docker_image_name_tag: string; origin_endpoint_url: string; origin_service_name: string; @@ -27,6 +28,7 @@ export interface ProductRule extends RaRecord { scanner_prefix: string; title: string; origin_component_name_version: string; + origin_component_purl: string; origin_docker_image_name_tag: string; origin_endpoint_url: string; origin_service_name: string; @@ -53,3 +55,11 @@ export const RULE_STATUS_CHOICES_APPROVAL = [ { id: RULE_STATUS_APPROVED, name: RULE_STATUS_APPROVED }, { id: RULE_STATUS_REJECTED, name: RULE_STATUS_REJECTED }, ]; + +export const RULE_TYPE_FIELDS = "Fields"; +export const RULE_TYPE_REGO = "Rego"; + +export const RULE_TYPE_CHOICES = [ + { id: RULE_TYPE_FIELDS, name: RULE_TYPE_FIELDS }, + { id: RULE_TYPE_REGO, name: RULE_TYPE_REGO }, +]; diff --git a/frontend/src/types/globals.ts b/frontend/src/types/globals.ts index 2a474ca3c..219ca7cb9 100644 --- a/frontend/src/types/globals.ts +++ b/frontend/src/types/globals.ts @@ -10,6 +10,7 @@ declare global { OIDC_REDIRECT_URI: string; OIDC_POST_LOGOUT_REDIRECT_URI: string; OIDC_SCOPE: string; + OIDC_PROMPT: string; }; } } diff --git a/frontend/src/vex/csaf/CSAFCreate.tsx b/frontend/src/vex/csaf/CSAFCreate.tsx index 17b3ad39c..a53e3de26 100644 --- a/frontend/src/vex/csaf/CSAFCreate.tsx +++ b/frontend/src/vex/csaf/CSAFCreate.tsx @@ -6,7 +6,6 @@ import { CreateBase, FormDataConsumer, ReferenceInput, - SaveButton, SimpleForm, SimpleFormIterator, useNotify, @@ -15,8 +14,7 @@ import { import axios_instance from "../../access_control/auth_provider/axios_instance"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_required, validate_required_255 } from "../../commons/custom_validators"; import { AutocompleteInputMedium, AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; import { CSAF_PUBLISHER_CATEGORY_CHOICES, CSAF_TLP_LABEL_CHOICES, CSAF_TRACKING_STATUS_CHOICES } from "../types"; @@ -37,22 +35,15 @@ const CSAFCreate = () => { setLoading(false); }; - const CustomToolbar = () => ( - - - } /> - - ); - const create_csaf = async (data: any) => { setLoading(true); data.vulnerability_names = data.vulnerability_names.map((v: any) => v.name); data.vulnerability_names = data.vulnerability_names.filter((v: any) => v != null); - if (data.branch_names) { - data.branch_names = data.branch_names.map((v: any) => v.name); - data.branch_names = data.branch_names.filter((v: any) => v != null); + if (data.branches) { + data.branches = data.branches.map((v: any) => v.branch); + data.branches = data.branches.filter((v: any) => v != null); } const url = "vex/csaf_document/create/"; @@ -95,7 +86,16 @@ const CSAFCreate = () => { Create CSAF document - }> + } + /> + } + > CSAF @@ -115,10 +115,9 @@ const CSAFCreate = () => { {({ formData }) => formData.product && ( - + - - {/* { alwaysOn > - */} + ) diff --git a/frontend/src/vex/csaf/CSAFShow.tsx b/frontend/src/vex/csaf/CSAFShow.tsx index 469f23215..38011cd39 100644 --- a/frontend/src/vex/csaf/CSAFShow.tsx +++ b/frontend/src/vex/csaf/CSAFShow.tsx @@ -16,6 +16,7 @@ import { useRecordContext, } from "react-admin"; +import csafs from "."; import { delete_permission, update_permission } from "../functions"; import CSAFUpdate from "./CSAFUpdate"; @@ -42,11 +43,12 @@ const CSAFComponent = () => { render={(csaf) => ( - - Exported CSAF document + + +   Exported CSAF document - {csaf && csaf.product_data && csaf.product_data.name && ( + {csaf?.product_data?.name && ( { /> )} - {csaf && csaf.vulnerability_names && ( + {csaf?.vulnerability_names && ( { )} - {csaf && csaf.branch_names && ( + {csaf?.branch_names && ( { setLoading(false); }; - const CustomToolbar = () => ( - - - } alwaysEnable /> - - ); - const update_csaf = async (data: any) => { setLoading(true); @@ -76,7 +68,17 @@ const CSAFUpdate = () => { Update CSAF document - }> + } + alwaysEnable + /> + } + > Document diff --git a/frontend/src/vex/cyclonedx/CycloneDXCreate.tsx b/frontend/src/vex/cyclonedx/CycloneDXCreate.tsx new file mode 100644 index 000000000..da90315a8 --- /dev/null +++ b/frontend/src/vex/cyclonedx/CycloneDXCreate.tsx @@ -0,0 +1,160 @@ +import AddIcon from "@mui/icons-material/Add"; +import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle, Divider, Typography } from "@mui/material"; +import { Fragment, useState } from "react"; +import { + ArrayInput, + CreateBase, + FormDataConsumer, + ReferenceInput, + SimpleForm, + SimpleFormIterator, + useNotify, + useRefresh, +} from "react-admin"; + +import axios_instance from "../../access_control/auth_provider/axios_instance"; +import AddButton from "../../commons/custom_fields/AddButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_255, validate_required_255 } from "../../commons/custom_validators"; +import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; + +const CycloneDXCreate = () => { + const [open, setOpen] = useState(false); + const [loading, setLoading] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const handleOpen = () => setOpen(true); + const handleCancel = () => { + setOpen(false); + setLoading(false); + }; + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + setLoading(false); + }; + + const create_cyclonedx = async (data: any) => { + setLoading(true); + + data.vulnerability_names = data.vulnerability_names.map((v: any) => v.name); + data.vulnerability_names = data.vulnerability_names.filter((v: any) => v != null); + + if (data.branches) { + data.branches = data.branches.map((v: any) => v.branch); + data.branches = data.branches.filter((v: any) => v != null); + } + + data.author ??= ""; + data.manufacturer ??= ""; + + const url = "vex/cyclonedx_document/create/"; + axios_instance + .post(url, data, { responseType: "blob" }) + .then(function (response) { + if (response.status == 204) { + setLoading(false); + notify("No vulnerabilities found to create CycloneDX document", { + type: "warning", + }); + } else { + const blob = new Blob([response.data], { type: "application/json" }); + const url = window.URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = url; + link.download = response.headers["content-disposition"].split("filename=")[1]; + link.click(); + + refresh(); + setLoading(false); + notify("CycloneDX document created", { + type: "success", + }); + } + setOpen(false); + }) + .catch(async function (error) { + setLoading(false); + notify(await error.response.data.text(), { + type: "warning", + }); + }); + }; + + return ( + + + + Create CycloneDX document + + + } + /> + } + > + + CycloneDX + + + + + + + + + + + {({ formData }) => + formData.product && ( + + + + + + + + ) + } + + + + Document + + + + + + + + + {loading ? ( + theme.zIndex.drawer + 1 }} open={open}> + + + ) : null} + + ); +}; + +export default CycloneDXCreate; diff --git a/frontend/src/vex/cyclonedx/CycloneDXList.tsx b/frontend/src/vex/cyclonedx/CycloneDXList.tsx new file mode 100644 index 000000000..0ecff0be5 --- /dev/null +++ b/frontend/src/vex/cyclonedx/CycloneDXList.tsx @@ -0,0 +1,100 @@ +import { Fragment } from "react"; +import { + ChipField, + Datagrid, + FunctionField, + List, + NumberField, + ReferenceInput, + ReferenceManyField, + SingleFieldList, + TextField, + TextInput, + TopToolbar, +} from "react-admin"; + +import { CustomPagination } from "../../commons/custom_fields/CustomPagination"; +import { humanReadableDate } from "../../commons/functions"; +import ListHeader from "../../commons/layout/ListHeader"; +import { AutocompleteInputMedium } from "../../commons/layout/themes"; +import { getSettingListSize } from "../../commons/user_settings/functions"; +import cyclonedx from "../../vex/cyclonedx"; +import { CycloneDX } from "../../vex/types"; +import CycloneDXCreate from "./CycloneDXCreate"; + +const listFilters = [ + + + , + , +]; + +const ListActions = () => ( + + + +); + +const CycloneDXList = () => { + return ( + + + } + filters={listFilters} + sort={{ field: "first_issued", order: "DESC" }} + actions={} + disableSyncWithLocation={false} + storeKey="cyclonedx.list" + empty={false} + > + + + + + + + + + + + + + + + + + + + label="First issued" + sortBy="first_issued" + render={(record) => (record ? humanReadableDate(record.first_issued) : "")} + /> + + label="Last updated" + sortBy="last_updated" + render={(record) => (record ? humanReadableDate(record.last_updated) : "")} + /> + + + + + ); +}; + +export default CycloneDXList; diff --git a/frontend/src/vex/cyclonedx/CycloneDXShow.tsx b/frontend/src/vex/cyclonedx/CycloneDXShow.tsx new file mode 100644 index 000000000..496a1d74c --- /dev/null +++ b/frontend/src/vex/cyclonedx/CycloneDXShow.tsx @@ -0,0 +1,151 @@ +import { Box, Paper, Stack, Typography } from "@mui/material"; +import { + ChipField, + DateField, + DeleteWithConfirmButton, + Labeled, + PrevNextButtons, + ReferenceField, + ReferenceManyField, + Show, + SingleFieldList, + TextField, + TopToolbar, + WithRecord, + useRecordContext, +} from "react-admin"; +import { Fragment } from "react/jsx-runtime"; + +import cyclonedxs from "."; +import { delete_permission, update_permission } from "../functions"; +import CycloneDXUpdate from "./CycloneDXUpdate"; + +const ShowActions = () => { + const cyclonedx = useRecordContext(); + return ( + + + + {update_permission(cyclonedx) && } + {delete_permission(cyclonedx) && } + + + ); +}; + +const CycloneDXComponent = () => { + return ( + ( + + + + + +   Exported CycloneDX document + + {cyclonedx?.product_data?.name && ( + + + + )} + {cyclonedx?.vulnerability_names && ( + + + + + + + + )} + {cyclonedx?.branch_names && ( + + + + + + + + )} + + + + + + + + ( + + + Document + {" "} + + + + + + + + + + {cyclonedx.author && ( + + + + )} + {cyclonedx.manufacturer && ( + + + + )} + + )} + /> + + + + + + Tracking + + + + + + + + + + + )} + /> + ); +}; +const CycloneDXShow = () => { + return ( + } component={CycloneDXComponent}> + + + ); +}; + +export default CycloneDXShow; diff --git a/frontend/src/vex/cyclonedx/CycloneDXUpdate.tsx b/frontend/src/vex/cyclonedx/CycloneDXUpdate.tsx new file mode 100644 index 000000000..e2b03aa14 --- /dev/null +++ b/frontend/src/vex/cyclonedx/CycloneDXUpdate.tsx @@ -0,0 +1,101 @@ +import EditIcon from "@mui/icons-material/Edit"; +import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle, Typography } from "@mui/material"; +import { Fragment, useState } from "react"; +import { SimpleForm, useNotify, useRefresh } from "react-admin"; + +import axios_instance from "../../access_control/auth_provider/axios_instance"; +import EditButton from "../../commons/custom_fields/EditButton"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_255 } from "../../commons/custom_validators"; +import { TextInputWide } from "../../commons/layout/themes"; + +const CycloneDXUpdate = () => { + const [open, setOpen] = useState(false); + const [loading, setLoading] = useState(false); + const refresh = useRefresh(); + const notify = useNotify(); + const handleOpen = () => setOpen(true); + const handleCancel = () => { + setOpen(false); + setLoading(false); + }; + const handleClose = (event: object, reason: string) => { + if (reason && reason == "backdropClick") return; + setOpen(false); + setLoading(false); + }; + + const update_cyclonedx = async (data: any) => { + setLoading(true); + + data.author ??= ""; + data.manufacturer ??= ""; + + const url = "vex/cyclonedx_document/update/" + data.document_id_prefix + "/" + data.document_base_id + "/"; + axios_instance + .post(url, data, { responseType: "blob" }) + .then(function (response) { + if (response.status == 204) { + setLoading(false); + notify("No changes in CycloneDX document", { + type: "warning", + }); + } else { + const blob = new Blob([response.data], { type: "application/json" }); + const url = window.URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = url; + link.download = response.headers["content-disposition"].split("filename=")[1]; + link.click(); + + refresh(); + setLoading(false); + notify("CycloneDX document updated", { + type: "success", + }); + } + setOpen(false); + }) + .catch(async function (error) { + setLoading(false); + notify(await error.response.data.text(), { + type: "warning", + }); + }); + }; + + return ( + + + + Update CycloneDX document + + } + alwaysEnable + /> + } + > + + Document + + + + + + + {loading ? ( + theme.zIndex.drawer + 1 }} open={open}> + + + ) : null} + + ); +}; + +export default CycloneDXUpdate; diff --git a/frontend/src/vex/cyclonedx/index.ts b/frontend/src/vex/cyclonedx/index.ts new file mode 100644 index 000000000..892c35a68 --- /dev/null +++ b/frontend/src/vex/cyclonedx/index.ts @@ -0,0 +1,10 @@ +import CycloneDXIcon from "@mui/icons-material/Security"; + +import CycloneDXList from "./CycloneDXList"; +import CycloneDXShow from "./CycloneDXShow"; + +export default { + list: CycloneDXList, + show: CycloneDXShow, + icon: CycloneDXIcon, +}; diff --git a/frontend/src/vex/functions.ts b/frontend/src/vex/functions.ts index 952c669df..d3cc421a2 100644 --- a/frontend/src/vex/functions.ts +++ b/frontend/src/vex/functions.ts @@ -1,22 +1,10 @@ import { PERMISSION_VEX_DELETE, PERMISSION_VEX_EDIT } from "../access_control/types"; import { is_superuser } from "../commons/functions"; -export const update_permission = (csaf: any | null) => { - return ( - csaf && - ((csaf.product_data && - csaf.product_data.permissions && - csaf.product_data.permissions.includes(PERMISSION_VEX_EDIT)) || - is_superuser()) - ); +export const update_permission = (csaf: any) => { + return csaf && (csaf?.product_data?.permissions?.includes(PERMISSION_VEX_EDIT) || is_superuser()); }; -export const delete_permission = (csaf: any | null) => { - return ( - csaf && - ((csaf.product_data && - csaf.product_data.permissions && - csaf.product_data.permissions.includes(PERMISSION_VEX_DELETE)) || - is_superuser()) - ); +export const delete_permission = (csaf: any) => { + return csaf && (csaf?.product_data?.permissions?.includes(PERMISSION_VEX_DELETE) || is_superuser()); }; diff --git a/frontend/src/vex/openvex/OpenVEXCreate.tsx b/frontend/src/vex/openvex/OpenVEXCreate.tsx index 55ba2d9c4..7d7de91cd 100644 --- a/frontend/src/vex/openvex/OpenVEXCreate.tsx +++ b/frontend/src/vex/openvex/OpenVEXCreate.tsx @@ -6,7 +6,6 @@ import { CreateBase, FormDataConsumer, ReferenceInput, - SaveButton, SimpleForm, SimpleFormIterator, useNotify, @@ -15,8 +14,7 @@ import { import axios_instance from "../../access_control/auth_provider/axios_instance"; import AddButton from "../../commons/custom_fields/AddButton"; -import CancelButton from "../../commons/custom_fields/CancelButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; import { validate_255, validate_required_255 } from "../../commons/custom_validators"; import { AutocompleteInputWide, TextInputWide } from "../../commons/layout/themes"; @@ -36,22 +34,15 @@ const OpenVEXCreate = () => { setLoading(false); }; - const CustomToolbar = () => ( - - - } /> - - ); - const create_openvex = async (data: any) => { setLoading(true); data.vulnerability_names = data.vulnerability_names.map((v: any) => v.name); data.vulnerability_names = data.vulnerability_names.filter((v: any) => v != null); - if (data.branch_names) { - data.branch_names = data.branch_names.map((v: any) => v.name); - data.branch_names = data.branch_names.filter((v: any) => v != null); + if (data.branches) { + data.branches = data.branches.map((v: any) => v.branch); + data.branches = data.branches.filter((v: any) => v != null); } const url = "vex/openvex_document/create/"; @@ -73,7 +64,7 @@ const OpenVEXCreate = () => { refresh(); setLoading(false); - notify("CASF document created", { + notify("OpenVEX document created", { type: "success", }); } @@ -94,7 +85,16 @@ const OpenVEXCreate = () => { Create OpenVEX document - }> + } + /> + } + > OpenVEX @@ -114,10 +114,9 @@ const OpenVEXCreate = () => { {({ formData }) => formData.product && ( - + - - {/* { alwaysOn > - */} + ) diff --git a/frontend/src/vex/openvex/OpenVEXShow.tsx b/frontend/src/vex/openvex/OpenVEXShow.tsx index 6d901d591..bc69b7929 100644 --- a/frontend/src/vex/openvex/OpenVEXShow.tsx +++ b/frontend/src/vex/openvex/OpenVEXShow.tsx @@ -16,6 +16,7 @@ import { } from "react-admin"; import { Fragment } from "react/jsx-runtime"; +import openvexs from "."; import { delete_permission, update_permission } from "../functions"; import OpenVEXUpdate from "./OpenVEXUpdate"; @@ -39,10 +40,11 @@ const OpenVEXComponent = () => { - - Exported OpenVEX document + + +   Exported OpenVEX document - {openvex && openvex.product_data && openvex.product_data.name && ( + {openvex?.product_data?.name && ( { /> )} - {openvex && openvex.vulnerability_names && ( + {openvex?.vulnerability_names && ( { )} - {openvex && openvex.branch_names && ( + {openvex?.branch_names && ( { setLoading(false); }; - const CustomToolbar = () => ( - - - } alwaysEnable /> - - ); - const update_openvex = async (data: any) => { setLoading(true); @@ -55,7 +47,7 @@ const OpenVEXUpdate = () => { refresh(); setLoading(false); - notify("CASF document updated", { + notify("OpenVEX document updated", { type: "success", }); } @@ -75,7 +67,17 @@ const OpenVEXUpdate = () => { Update OpenVEX document - }> + } + alwaysEnable + /> + } + > Document diff --git a/frontend/src/vex/types.ts b/frontend/src/vex/types.ts index f26fd9314..0cc266db6 100644 --- a/frontend/src/vex/types.ts +++ b/frontend/src/vex/types.ts @@ -56,6 +56,19 @@ export interface OpenVEX extends RaRecord { last_updated: string; } +export interface CycloneDX extends RaRecord { + id: Identifier; + user: Identifier; + product: Identifier; + vulnerablity_names: string; + document_id_prefix: string; + version: number; + content_hash: string; + author: string; + first_issued: string; + last_updated: string; +} + export const VEX_STATUS_CHOICES = [ { id: "not_affected", name: "not_affected" }, { id: "affected", name: "affected" }, diff --git a/frontend/src/vex/vex_counters/VEXCounterShow.tsx b/frontend/src/vex/vex_counters/VEXCounterShow.tsx index 3d49b737e..4df068b64 100644 --- a/frontend/src/vex/vex_counters/VEXCounterShow.tsx +++ b/frontend/src/vex/vex_counters/VEXCounterShow.tsx @@ -1,6 +1,7 @@ import { Stack, Typography } from "@mui/material"; import { EditButton, NumberField, PrevNextButtons, Show, SimpleShowLayout, TextField, TopToolbar } from "react-admin"; +import vex_counters from "."; import { is_superuser } from "../../commons/functions"; const ShowActions = () => { @@ -22,8 +23,9 @@ const VEXCounterShow = () => { return ( }> - - VEX Counter + + +   VEX Counter diff --git a/frontend/src/vex/vex_documents/VEXDocumentImport.tsx b/frontend/src/vex/vex_documents/VEXDocumentImport.tsx index 31fe3b09a..a3e2adde3 100644 --- a/frontend/src/vex/vex_documents/VEXDocumentImport.tsx +++ b/frontend/src/vex/vex_documents/VEXDocumentImport.tsx @@ -1,28 +1,18 @@ import UploadIcon from "@mui/icons-material/Upload"; import { Backdrop, CircularProgress, Dialog, DialogContent, DialogTitle } from "@mui/material"; -import { ChangeEvent, Fragment, useState } from "react"; -import { SaveButton, SimpleForm, useNotify, useRefresh } from "react-admin"; -import { makeStyles } from "tss-react/mui"; +import { Fragment, useState } from "react"; +import { FileField, FileInput, SimpleForm, useNotify, useRefresh } from "react-admin"; -import CancelButton from "../../commons/custom_fields/CancelButton"; import SmallButton from "../../commons/custom_fields/SmallButton"; -import Toolbar from "../../commons/custom_fields/Toolbar"; +import { ToolbarCancelSave } from "../../commons/custom_fields/ToolbarCancelSave"; +import { validate_required } from "../../commons/custom_validators"; import { httpClient } from "../../commons/ra-data-django-rest-framework"; const VEXDocumentImport = () => { - const useStyles = makeStyles()({ - input: { - marginTop: "2em", - marginBottom: "2em", - }, - }); - const [open, setOpen] = useState(false); const [loading, setLoading] = useState(false); const refresh = useRefresh(); const notify = useNotify(); - const [fileSelected, setFileSelected] = useState(); - const { classes } = useStyles(); const handleOpen = () => setOpen(true); const handleCancel = () => { setOpen(false); @@ -34,61 +24,57 @@ const VEXDocumentImport = () => { setLoading(false); }; - const handleFileChange = function (e: ChangeEvent) { - const fileList = e.target.files; - if (!fileList) return; - setFileSelected(fileList[0]); - }; - - const vexImport = async () => { - if (fileSelected) { - setLoading(true); + const vexImport = async (data: any) => { + setLoading(true); - const formData = new FormData(); - formData.append("file", fileSelected, fileSelected.name); + const formData = new FormData(); + formData.append("file", data.file.rawFile, data.file.title); - httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/vex/vex_import/", { - method: "POST", - body: formData, + httpClient(window.__RUNTIME_CONFIG__.API_BASE_URL + "/vex/vex_import/", { + method: "POST", + body: formData, + }) + .then(() => { + refresh(); + setLoading(false); + setOpen(false); + notify("VEX document imported", { + type: "success", + }); }) - .then(() => { - refresh(); - setLoading(false); - setOpen(false); - notify("VEX document imported", { - type: "success", - }); - }) - .catch((error) => { - setLoading(false); - setOpen(false); - notify(error.message, { - type: "warning", - }); + .catch((error) => { + setLoading(false); + setOpen(false); + notify(error.message, { + type: "warning", }); - } + }); }; - const CustomToolbar = () => ( - - - } alwaysEnable /> - - ); return ( } /> Import VEX document - }> - + } + /> + } + > + + + diff --git a/frontend/src/vex/vex_documents/VEXDocumentShow.tsx b/frontend/src/vex/vex_documents/VEXDocumentShow.tsx index 21418266f..28bd744c1 100644 --- a/frontend/src/vex/vex_documents/VEXDocumentShow.tsx +++ b/frontend/src/vex/vex_documents/VEXDocumentShow.tsx @@ -12,6 +12,7 @@ import { WithRecord, } from "react-admin"; +import vex_documents from "."; import { useStyles } from "../../commons/layout/themes"; import VEXStatementEmbeddedList from "../vex_statements/VEXStatementEmbeddedList"; @@ -38,8 +39,9 @@ const VEXDocumentComponent = () => { render={(vex_document) => ( - - Imported VEX document + + +   Imported VEX document @@ -65,7 +67,7 @@ const VEXDocumentComponent = () => { - {vex_document && vex_document.role && ( + {vex_document?.role && ( diff --git a/frontend/src/vex/vex_statements/VEXStatementEmbeddedList.tsx b/frontend/src/vex/vex_statements/VEXStatementEmbeddedList.tsx index aa22296ae..cf69a14b9 100644 --- a/frontend/src/vex/vex_statements/VEXStatementEmbeddedList.tsx +++ b/frontend/src/vex/vex_statements/VEXStatementEmbeddedList.tsx @@ -58,6 +58,7 @@ const VEXStatementEmbeddedList = ({ vex_document }: VEXStatementEmbeddedListProp + diff --git a/frontend/src/vex/vex_statements/VEXStatementShow.tsx b/frontend/src/vex/vex_statements/VEXStatementShow.tsx index 27a05972f..621b9cf17 100644 --- a/frontend/src/vex/vex_statements/VEXStatementShow.tsx +++ b/frontend/src/vex/vex_statements/VEXStatementShow.tsx @@ -11,6 +11,7 @@ import { useRecordContext, } from "react-admin"; +import vex_statements from "."; import { useStyles } from "../../commons/layout/themes"; const ShowActions = () => { @@ -38,8 +39,9 @@ const VEXStatementShow = () => { ( - - Imported VEX Statement + + +   Imported VEX Statement { {vex_statement.remediation && } {vex_statement.product_purl && } {vex_statement.component_purl && } + {vex_statement.component_cyclonedx_bom_link && ( + + )} )} /> diff --git a/frontend/src/vex/vex_statements/index.ts b/frontend/src/vex/vex_statements/index.ts index bf709a320..52789b103 100644 --- a/frontend/src/vex/vex_statements/index.ts +++ b/frontend/src/vex/vex_statements/index.ts @@ -1,5 +1,8 @@ +import VEXStatementIcon from "@mui/icons-material/Security"; + import VEXStatementShow from "./VEXStatementShow"; export default { show: VEXStatementShow, + icon: VEXStatementIcon, }; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index c83a2369e..ed77f1ced 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -14,8 +14,8 @@ "strict": true, "forceConsistentCasingInFileNames": true, "noFallthroughCasesInSwitch": true, - "module": "esnext", - "moduleResolution": "node", + "module": "nodenext", + "moduleResolution": "nodenext", "resolveJsonModule": true, "isolatedModules": true, "noEmit": true, diff --git a/keycloak/h2/keycloakdb.mv.db.dist b/keycloak/h2/keycloakdb.mv.db.dist deleted file mode 100644 index 1bc8960a7..000000000 Binary files a/keycloak/h2/keycloakdb.mv.db.dist and /dev/null differ diff --git a/keycloak/realm-secobserve.json b/keycloak/realm-secobserve.json new file mode 100644 index 000000000..9c24e8b1a --- /dev/null +++ b/keycloak/realm-secobserve.json @@ -0,0 +1,2726 @@ +{ + "id": "f08914aa-d4a2-4828-ad14-faefca776dee", + "realm": "secobserve", + "notBefore": 0, + "defaultSignatureAlgorithm": "RS256", + "revokeRefreshToken": false, + "refreshTokenMaxReuse": 0, + "accessTokenLifespan": 300, + "accessTokenLifespanForImplicitFlow": 900, + "ssoSessionIdleTimeout": 1800, + "ssoSessionMaxLifespan": 36000, + "ssoSessionIdleTimeoutRememberMe": 0, + "ssoSessionMaxLifespanRememberMe": 0, + "offlineSessionIdleTimeout": 2592000, + "offlineSessionMaxLifespanEnabled": false, + "offlineSessionMaxLifespan": 5184000, + "clientSessionIdleTimeout": 0, + "clientSessionMaxLifespan": 0, + "clientOfflineSessionIdleTimeout": 0, + "clientOfflineSessionMaxLifespan": 0, + "accessCodeLifespan": 60, + "accessCodeLifespanUserAction": 300, + "accessCodeLifespanLogin": 1800, + "actionTokenGeneratedByAdminLifespan": 43200, + "actionTokenGeneratedByUserLifespan": 300, + "oauth2DeviceCodeLifespan": 600, + "oauth2DevicePollingInterval": 5, + "enabled": true, + "sslRequired": "external", + "registrationAllowed": false, + "registrationEmailAsUsername": false, + "rememberMe": false, + "verifyEmail": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": false, + "editUsernameAllowed": false, + "bruteForceProtected": false, + "permanentLockout": false, + "maxTemporaryLockouts": 0, + "bruteForceStrategy": "MULTIPLE", + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 30, + "roles": { + "realm": [ + { + "id": "326f00d0-4b5e-4483-bc60-f30a30e4eda6", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": "f08914aa-d4a2-4828-ad14-faefca776dee", + "attributes": {} + }, + { + "id": "91c3a755-620f-41d0-81f1-f24761a4938f", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": "f08914aa-d4a2-4828-ad14-faefca776dee", + "attributes": {} + }, + { + "id": "8d87b0a8-a8dd-4fa5-b0bd-cc0bf88ab326", + "name": "default-roles-secobserve", + "description": "${role_default-roles}", + "composite": true, + "composites": { + "realm": [ + "offline_access", + "uma_authorization" + ], + "client": { + "account": [ + "view-profile", + "manage-account" + ] + } + }, + "clientRole": false, + "containerId": "f08914aa-d4a2-4828-ad14-faefca776dee", + "attributes": {} + } + ], + "client": { + "realm-management": [ + { + "id": "6685ceae-149c-459d-a2bf-3f2fe5ec2508", + "name": "query-realms", + "description": "${role_query-realms}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "eb6e009f-57ff-45d3-bfd4-2adf43c983b9", + "name": "query-users", + "description": "${role_query-users}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "5895d106-876c-49b8-9c40-25743d4751ef", + "name": "view-events", + "description": "${role_view-events}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "49763662-b91f-4e40-aac3-d7be64473a45", + "name": "view-realm", + "description": "${role_view-realm}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "77e7688f-fa85-4dd0-ab12-3011330a4b08", + "name": "query-groups", + "description": "${role_query-groups}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "6ca2fb84-de4e-45e0-8ded-ce08d69f2882", + "name": "manage-clients", + "description": "${role_manage-clients}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "bab860bb-72d5-461f-97ae-8bc86ae55a3e", + "name": "view-clients", + "description": "${role_view-clients}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "query-clients" + ] + } + }, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "3b50136f-ad59-484a-89ff-6744ca498e2f", + "name": "manage-realm", + "description": "${role_manage-realm}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "fb65e474-e120-408e-bda7-5dd3a31eea02", + "name": "manage-users", + "description": "${role_manage-users}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "210946ce-4db0-4c22-a4cc-473738da9f90", + "name": "manage-identity-providers", + "description": "${role_manage-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "cf87201e-f8c7-4401-acae-4a87bc6bb36d", + "name": "view-identity-providers", + "description": "${role_view-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "79e684d9-4de9-4c6c-8394-cfb5ff19021a", + "name": "view-users", + "description": "${role_view-users}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "query-users", + "query-groups" + ] + } + }, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "c570e7ed-f7a3-4706-8762-528099b79135", + "name": "manage-authorization", + "description": "${role_manage-authorization}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "21b09fe3-e477-49d6-a775-6c01a161f6c7", + "name": "query-clients", + "description": "${role_query-clients}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "8075f734-f7de-4e5d-9218-c85cd81b6f3c", + "name": "realm-admin", + "description": "${role_realm-admin}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "view-events", + "query-realms", + "query-users", + "view-realm", + "query-groups", + "manage-clients", + "view-clients", + "manage-realm", + "manage-users", + "manage-identity-providers", + "view-identity-providers", + "view-users", + "manage-authorization", + "query-clients", + "impersonation", + "create-client", + "view-authorization", + "manage-events" + ] + } + }, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "0f2d3e2c-9827-4827-b801-5e2a35558337", + "name": "impersonation", + "description": "${role_impersonation}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "2a443e60-d71f-45c3-b2c1-250e19894eb0", + "name": "create-client", + "description": "${role_create-client}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "8ed8ae1f-881d-4382-9185-5832dd578188", + "name": "view-authorization", + "description": "${role_view-authorization}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + }, + { + "id": "d74781f1-380c-4a93-9a83-1618bd23b0af", + "name": "manage-events", + "description": "${role_manage-events}", + "composite": false, + "clientRole": true, + "containerId": "cf0276ff-0524-4668-979d-3e55b159467b", + "attributes": {} + } + ], + "security-admin-console": [], + "admin-cli": [], + "account-console": [], + "broker": [ + { + "id": "e0b65fd6-ba19-4b6c-98f2-3e568a5f8539", + "name": "read-token", + "description": "${role_read-token}", + "composite": false, + "clientRole": true, + "containerId": "b625abca-9e98-445c-a4b2-7d9212866e8d", + "attributes": {} + } + ], + "account": [ + { + "id": "b702f8f8-c13b-4086-b735-6fc5f76cb47f", + "name": "view-applications", + "description": "${role_view-applications}", + "composite": false, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "b854437a-cded-44ba-a649-ebc819e04283", + "name": "view-groups", + "description": "${role_view-groups}", + "composite": false, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "6a07898f-de28-49f7-8c86-91fdd10a03a1", + "name": "view-profile", + "description": "${role_view-profile}", + "composite": false, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "543141c8-35ee-46f5-aafe-bb1c52adbc40", + "name": "manage-account", + "description": "${role_manage-account}", + "composite": true, + "composites": { + "client": { + "account": [ + "manage-account-links" + ] + } + }, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "2d11b00d-05e7-4c7c-8f9c-c9004c74e4d5", + "name": "manage-consent", + "description": "${role_manage-consent}", + "composite": true, + "composites": { + "client": { + "account": [ + "view-consent" + ] + } + }, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "8ba6e69a-c2c4-4637-a4a2-bcf1539af46a", + "name": "delete-account", + "description": "${role_delete-account}", + "composite": false, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "a2fc1e4c-6257-4b3d-8755-5ea23e09afb2", + "name": "manage-account-links", + "description": "${role_manage-account-links}", + "composite": false, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + }, + { + "id": "ebbc353f-e381-4ffb-ad21-d2452bc05e0c", + "name": "view-consent", + "description": "${role_view-consent}", + "composite": false, + "clientRole": true, + "containerId": "26fb21af-8468-439f-9921-4cd61bf294a6", + "attributes": {} + } + ], + "secobserve": [] + } + }, + "users": [ + { + "id": "ed043497-eb82-47ab-a96f-5ea027fe36be", + "username": "keycloak_user", + "firstName": "Keycloak", + "lastName": "User", + "email": "keycloak_user@secobserve.org", + "emailVerified": true, + "credentials": [ + { + "type": "password", + "value": "keycloak" + } + ], + "groups": [ + "/keycloak-group" + ], + "userProfileMetadata": { + "attributes": [ + { + "name": "username", + "displayName": "${username}", + "required": true, + "readOnly": true, + "validators": { + "username-prohibited-characters": { + "ignore.empty.value": true + }, + "multivalued": { + "max": "1" + }, + "length": { + "max": 255, + "ignore.empty.value": true, + "min": 3 + }, + "up-username-not-idn-homograph": { + "ignore.empty.value": true + } + }, + "multivalued": false + }, + { + "name": "email", + "displayName": "${email}", + "required": false, + "readOnly": false, + "validators": { + "multivalued": { + "max": "1" + }, + "length": { + "max": 255, + "ignore.empty.value": true + }, + "email": { + "ignore.empty.value": true + } + }, + "multivalued": false + }, + { + "name": "firstName", + "displayName": "${firstName}", + "required": false, + "readOnly": false, + "validators": { + "person-name-prohibited-characters": { + "ignore.empty.value": true + }, + "multivalued": { + "max": "1" + }, + "length": { + "max": 255, + "ignore.empty.value": true + } + }, + "multivalued": false + }, + { + "name": "lastName", + "displayName": "${lastName}", + "required": false, + "readOnly": false, + "validators": { + "person-name-prohibited-characters": { + "ignore.empty.value": true + }, + "multivalued": { + "max": "1" + }, + "length": { + "max": 255, + "ignore.empty.value": true + } + }, + "multivalued": false + } + ], + "groups": [ + { + "name": "user-metadata", + "displayHeader": "User metadata", + "displayDescription": "Attributes, which refer to user metadata" + } + ] + }, + "enabled": true, + "createdTimestamp": 1759517340844, + "totp": false, + "disableableCredentialTypes": [], + "requiredActions": [], + "notBefore": 0, + "access": { + "manage": true + } + } + ], + "groups": [ + { + "id": "5c9ab5ff-5788-4bee-a23f-7cfda924723e", + "name": "keycloak-group", + "path": "/keycloak-group", + "subGroups": [], + "attributes": {}, + "realmRoles": [], + "clientRoles": {} + } + ], + "defaultRole": { + "id": "8d87b0a8-a8dd-4fa5-b0bd-cc0bf88ab326", + "name": "default-roles-secobserve", + "description": "${role_default-roles}", + "composite": true, + "clientRole": false, + "containerId": "f08914aa-d4a2-4828-ad14-faefca776dee" + }, + "requiredCredentials": [ + "password" + ], + "otpPolicyType": "totp", + "otpPolicyAlgorithm": "HmacSHA1", + "otpPolicyInitialCounter": 0, + "otpPolicyDigits": 6, + "otpPolicyLookAheadWindow": 1, + "otpPolicyPeriod": 30, + "otpPolicyCodeReusable": false, + "otpSupportedApplications": [ + "totpAppFreeOTPName", + "totpAppGoogleName", + "totpAppMicrosoftAuthenticatorName" + ], + "localizationTexts": {}, + "webAuthnPolicyRpEntityName": "keycloak", + "webAuthnPolicySignatureAlgorithms": [ + "ES256", + "RS256" + ], + "webAuthnPolicyRpId": "", + "webAuthnPolicyAttestationConveyancePreference": "not specified", + "webAuthnPolicyAuthenticatorAttachment": "not specified", + "webAuthnPolicyRequireResidentKey": "not specified", + "webAuthnPolicyUserVerificationRequirement": "not specified", + "webAuthnPolicyCreateTimeout": 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyAcceptableAaguids": [], + "webAuthnPolicyExtraOrigins": [], + "webAuthnPolicyPasswordlessRpEntityName": "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms": [ + "ES256", + "RS256" + ], + "webAuthnPolicyPasswordlessRpId": "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey": "Yes", + "webAuthnPolicyPasswordlessUserVerificationRequirement": "required", + "webAuthnPolicyPasswordlessCreateTimeout": 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyPasswordlessAcceptableAaguids": [], + "webAuthnPolicyPasswordlessExtraOrigins": [], + "scopeMappings": [ + { + "clientScope": "offline_access", + "roles": [ + "offline_access" + ] + } + ], + "clientScopeMappings": { + "account": [ + { + "client": "account-console", + "roles": [ + "manage-account", + "view-groups" + ] + } + ] + }, + "clients": [ + { + "id": "26fb21af-8468-439f-9921-4cd61bf294a6", + "clientId": "account", + "name": "${client_account}", + "rootUrl": "${authBaseUrl}", + "baseUrl": "/realms/secobserve/account/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + "/realms/secobserve/account/*" + ], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "realm_client": "false", + "post.logout.redirect.uris": "+" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + }, + { + "id": "fa84e388-9943-40b9-9884-e10be843e6ad", + "clientId": "account-console", + "name": "${client_account-console}", + "rootUrl": "${authBaseUrl}", + "baseUrl": "/realms/secobserve/account/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + "/realms/secobserve/account/*" + ], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "realm_client": "false", + "post.logout.redirect.uris": "+", + "pkce.code.challenge.method": "S256" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "53d86e99-3699-4ce4-a141-efdbc75c0e67", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + }, + { + "id": "a85b766c-212f-475c-a702-c34375ad3b0a", + "clientId": "admin-cli", + "name": "${client_admin-cli}", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": false, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "realm_client": "false", + "client.use.lightweight.access.token.enabled": "true", + "post.logout.redirect.uris": "+" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + }, + { + "id": "b625abca-9e98-445c-a4b2-7d9212866e8d", + "clientId": "broker", + "name": "${client_broker}", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "realm_client": "true", + "post.logout.redirect.uris": "+" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + }, + { + "id": "cf0276ff-0524-4668-979d-3e55b159467b", + "clientId": "realm-management", + "name": "${client_realm-management}", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "realm_client": "true", + "post.logout.redirect.uris": "+" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + }, + { + "id": "1db20e10-28e0-43a5-88f0-93fdce894fdb", + "clientId": "secobserve", + "name": "", + "description": "", + "rootUrl": "http://localhost:3000", + "adminUrl": "http://localhost:3000", + "baseUrl": "http://localhost:3000", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + "http://localhost:3000/*" + ], + "webOrigins": [ + "+" + ], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": true, + "protocol": "openid-connect", + "attributes": { + "realm_client": "false", + "oidc.ciba.grant.enabled": "false", + "client.secret.creation.time": "1759517215", + "backchannel.logout.session.required": "true", + "standard.token.exchange.enabled": "false", + "login_theme": "keycloak.v2", + "post.logout.redirect.uris": "http://localhost:3000/*", + "frontchannel.logout.session.required": "true", + "oauth2.device.authorization.grant.enabled": "false", + "display.on.consent.screen": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "dpop.bound.access.tokens": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "id": "a6f4f2c5-311b-42ed-a54c-22da643fb4da", + "name": "Group mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-group-membership-mapper", + "consentRequired": false, + "config": { + "full.path": "false", + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "multivalued": "true", + "id.token.claim": "true", + "lightweight.claim": "false", + "access.token.claim": "true", + "claim.name": "groups" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + }, + { + "id": "236a22b1-7296-462a-ba55-2fe510695104", + "clientId": "security-admin-console", + "name": "${client_security-admin-console}", + "rootUrl": "${authAdminUrl}", + "baseUrl": "/admin/secobserve/console/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + "/admin/secobserve/console/*" + ], + "webOrigins": [ + "+" + ], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "realm_client": "false", + "client.use.lightweight.access.token.enabled": "true", + "post.logout.redirect.uris": "+", + "pkce.code.challenge.method": "S256" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "546fff4b-6bcd-42ef-8e4a-3ccf642d5e9c", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt" + ] + } + ], + "clientScopes": [ + { + "id": "46286fba-0634-4d4e-ae2e-de200b0d463d", + "name": "saml_organization", + "description": "Organization Membership", + "protocol": "saml", + "attributes": { + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "86dae2da-0138-4079-829c-ebd87b28c2d5", + "name": "organization", + "protocol": "saml", + "protocolMapper": "saml-organization-membership-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "id": "11b5e115-7717-46b5-9223-e3b3faeb43a2", + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "consent.screen.text": "${profileScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "3ab9c9a1-eebb-422f-8f55-bacc11f06673", + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + }, + { + "id": "c7d11607-87f4-479a-b116-a8536a4ba8d4", + "name": "gender", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "gender", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "gender", + "jsonType.label": "String" + } + }, + { + "id": "c0fe98b6-487e-44a8-a163-6d086484ac9e", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + }, + { + "id": "5049e160-01fd-452d-b930-4de12d88c7ad", + "name": "picture", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "picture", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "picture", + "jsonType.label": "String" + } + }, + { + "id": "2bfba323-c18a-4d39-ae79-737b2a0e4b11", + "name": "zoneinfo", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "zoneinfo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "zoneinfo", + "jsonType.label": "String" + } + }, + { + "id": "59553355-fa73-4d81-9f94-29d92f94992a", + "name": "middle name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "middleName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "middle_name", + "jsonType.label": "String" + } + }, + { + "id": "353a0c24-9200-4ee7-895a-aa75e76e4a27", + "name": "website", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "website", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "website", + "jsonType.label": "String" + } + }, + { + "id": "0d503047-146f-4dd6-8937-ea7aa641b0ed", + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-full-name-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "introspection.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + }, + { + "id": "9aa657c2-8330-4125-b2f8-b35bf9b8fb03", + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + }, + { + "id": "1d86a070-a1ba-48a2-b589-ddc1b65a39ac", + "name": "profile", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "profile", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "profile", + "jsonType.label": "String" + } + }, + { + "id": "69e86f10-4d1d-4bf3-8ed1-6be1e29ea4c9", + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "id": "01aca5ce-d1e7-49c0-9144-a33153c88472", + "name": "updated at", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "updatedAt", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "updated_at", + "jsonType.label": "long" + } + }, + { + "id": "9c7bc4e8-67ae-4de0-9321-61dfd48456b8", + "name": "nickname", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "nickname", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "nickname", + "jsonType.label": "String" + } + }, + { + "id": "e865a089-3c7b-4d6b-b675-9edad08cc853", + "name": "birthdate", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "birthdate", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "birthdate", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "f20f994b-73a2-4c83-85cd-88879fb26a08", + "name": "address", + "description": "OpenID Connect built-in scope: address", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "consent.screen.text": "${addressScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "54eddfc9-b361-47c0-9673-c72c4299fbe2", + "name": "address", + "protocol": "openid-connect", + "protocolMapper": "oidc-address-mapper", + "consentRequired": false, + "config": { + "user.attribute.formatted": "formatted", + "user.attribute.country": "country", + "introspection.token.claim": "true", + "user.attribute.postal_code": "postal_code", + "userinfo.token.claim": "true", + "user.attribute.street": "street", + "id.token.claim": "true", + "user.attribute.region": "region", + "access.token.claim": "true", + "user.attribute.locality": "locality" + } + } + ] + }, + { + "id": "8421204c-ab33-468b-a894-7885bea73570", + "name": "roles", + "description": "OpenID Connect scope for add user roles to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "consent.screen.text": "${rolesScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "5a25cb3b-a843-49d0-89ec-d6d61d841d69", + "name": "client roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "introspection.token.claim": "true", + "access.token.claim": "true", + "claim.name": "resource_access.${client_id}.roles", + "jsonType.label": "String", + "multivalued": "true" + } + }, + { + "id": "afd69f15-f6dd-48e9-9b5f-047ec2217d8d", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "access.token.claim": "true" + } + }, + { + "id": "07e36902-765b-4512-a455-6a7e50094d52", + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "introspection.token.claim": "true", + "access.token.claim": "true", + "claim.name": "realm_access.roles", + "jsonType.label": "String", + "multivalued": "true" + } + } + ] + }, + { + "id": "3af9c958-9f43-4fa9-8292-5b7571f29eb0", + "name": "basic", + "description": "OpenID Connect scope for add all basic claims to the token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "6263ea86-f83b-4c89-b3e5-e7d3afffb94f", + "name": "auth_time", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "AUTH_TIME", + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "auth_time", + "jsonType.label": "long" + } + }, + { + "id": "668efc42-edf7-459c-b14c-36cd35e9f371", + "name": "sub", + "protocol": "openid-connect", + "protocolMapper": "oidc-sub-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "access.token.claim": "true" + } + } + ] + }, + { + "id": "58a01ecb-0533-49dc-b7e8-70b8d3a82d7f", + "name": "phone", + "description": "OpenID Connect built-in scope: phone", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "consent.screen.text": "${phoneScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "c49b906e-4296-4aa5-a0d1-d111c5046150", + "name": "phone number", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "phoneNumber", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number", + "jsonType.label": "String" + } + }, + { + "id": "2aa2f933-2bfb-4072-bce4-bddce773982d", + "name": "phone number verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "phoneNumberVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "690a8f21-1241-4958-a96d-5c8a3bbb3876", + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "consent.screen.text": "${emailScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "d0b709cc-df13-4e73-8b9f-3640c71ed434", + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + }, + { + "id": "de9447b3-de4a-47a7-afc5-64b35b02d65f", + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "875790d0-4f27-4b66-9926-1f13b164311c", + "name": "organization", + "description": "Additional claims about the organization a subject belongs to", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "consent.screen.text": "${organizationScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "241eee75-513b-4c18-ba98-105f447d5bf9", + "name": "organization", + "protocol": "openid-connect", + "protocolMapper": "oidc-organization-membership-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "multivalued": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "organization", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "99f8f738-1e8f-495a-91f2-83b638f243fb", + "name": "service_account", + "description": "Specific scope for a client enabled for service accounts", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "952071b4-70e9-4579-9534-eb9708879331", + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "client_id", + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "client_id", + "jsonType.label": "String" + } + }, + { + "id": "326e307b-7cfe-47c5-853e-b5f1af357544", + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientHost", + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String" + } + }, + { + "id": "0dfdb854-e646-47ba-8425-ebb5f75ba8f6", + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientAddress", + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "4ef5c02c-a2cc-46eb-9f65-e0383171b399", + "name": "offline_access", + "description": "OpenID Connect built-in scope: offline_access", + "protocol": "openid-connect", + "attributes": { + "consent.screen.text": "${offlineAccessScopeConsentText}", + "display.on.consent.screen": "true" + } + }, + { + "id": "212962ae-a67b-4b2e-bb6a-c552753f6a29", + "name": "microprofile-jwt", + "description": "Microprofile - JWT built-in scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "2b8eb40e-d7c9-41de-af68-51abf6fe45f1", + "name": "upn", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "upn", + "jsonType.label": "String" + } + }, + { + "id": "b06f91da-489f-494c-8a56-c624d434a81e", + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "multivalued": "true", + "userinfo.token.claim": "true", + "user.attribute": "foo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "1e44d0f7-eabe-4970-a0fc-a7077ac18b4a", + "name": "role_list", + "description": "SAML role list", + "protocol": "saml", + "attributes": { + "consent.screen.text": "${samlRoleListScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "2fe0ae5a-1a6f-469b-a38e-e9b82b0acc16", + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": false, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role" + } + } + ] + }, + { + "id": "53dfd50f-a15f-486a-8d88-b1f2d1d0755a", + "name": "web-origins", + "description": "OpenID Connect scope for add allowed web origins to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "consent.screen.text": "", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "31547082-6766-4efb-a74d-946328b3c383", + "name": "allowed web origins", + "protocol": "openid-connect", + "protocolMapper": "oidc-allowed-origins-mapper", + "consentRequired": false, + "config": { + "introspection.token.claim": "true", + "access.token.claim": "true" + } + } + ] + }, + { + "id": "cd3873a8-ccff-42a7-84be-dac216f3fccd", + "name": "acr", + "description": "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "73a4ffbe-8d61-403c-b69d-1bfd5da648fa", + "name": "acr loa level", + "protocol": "openid-connect", + "protocolMapper": "oidc-acr-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "introspection.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + } + ] + } + ], + "defaultDefaultClientScopes": [ + "role_list", + "saml_organization", + "profile", + "email", + "roles", + "web-origins", + "acr", + "basic" + ], + "defaultOptionalClientScopes": [ + "offline_access", + "address", + "phone", + "microprofile-jwt", + "organization" + ], + "browserSecurityHeaders": { + "contentSecurityPolicyReportOnly": "", + "xContentTypeOptions": "nosniff", + "referrerPolicy": "no-referrer", + "xRobotsTag": "none", + "xFrameOptions": "SAMEORIGIN", + "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "strictTransportSecurity": "max-age=31536000; includeSubDomains" + }, + "smtpServer": {}, + "eventsEnabled": false, + "eventsListeners": [ + "jboss-logging" + ], + "enabledEventTypes": [], + "adminEventsEnabled": false, + "adminEventsDetailsEnabled": false, + "identityProviders": [], + "identityProviderMappers": [], + "components": { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ + { + "id": "45411418-dc45-4611-aa41-eab32bad41ec", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-address-mapper", + "saml-user-property-mapper", + "oidc-full-name-mapper", + "saml-user-attribute-mapper", + "saml-role-list-mapper", + "oidc-usermodel-property-mapper", + "oidc-usermodel-attribute-mapper", + "oidc-sha256-pairwise-sub-mapper" + ] + } + }, + { + "id": "70ddf288-a73a-4076-9fbb-7032ae1795d7", + "name": "Trusted Hosts", + "providerId": "trusted-hosts", + "subType": "anonymous", + "subComponents": {}, + "config": { + "host-sending-registration-request-must-match": [ + "true" + ], + "client-uris-must-match": [ + "true" + ] + } + }, + { + "id": "9c788ac8-2d2f-49af-b77b-05e491cb40f7", + "name": "Max Clients Limit", + "providerId": "max-clients", + "subType": "anonymous", + "subComponents": {}, + "config": { + "max-clients": [ + "200" + ] + } + }, + { + "id": "559e486a-aa36-4457-9240-8dbdd4cd2ab0", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-address-mapper", + "oidc-usermodel-property-mapper", + "oidc-full-name-mapper", + "oidc-sha256-pairwise-sub-mapper", + "saml-user-attribute-mapper", + "oidc-usermodel-attribute-mapper", + "saml-role-list-mapper", + "saml-user-property-mapper" + ] + } + }, + { + "id": "5d09d604-171a-4684-8eeb-6576c59bfc8b", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allow-default-scopes": [ + "true" + ] + } + }, + { + "id": "4525ea7e-8940-447e-b5f4-548bffab1a34", + "name": "Full Scope Disabled", + "providerId": "scope", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "e3d2a356-b2c6-4db5-9b85-a81dde15cd91", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allow-default-scopes": [ + "true" + ] + } + }, + { + "id": "0c392d38-0d5e-40ed-a83f-afec5e88d412", + "name": "Consent Required", + "providerId": "consent-required", + "subType": "anonymous", + "subComponents": {}, + "config": {} + } + ], + "org.keycloak.keys.KeyProvider": [ + { + "id": "d58d5cd3-6175-4e24-a409-25190a3aec15", + "name": "rsa-enc-generated", + "providerId": "rsa-enc-generated", + "subComponents": {}, + "config": { + "priority": [ + "100" + ], + "algorithm": [ + "RSA-OAEP" + ] + } + }, + { + "id": "9b5c14db-5b4e-415e-b0ac-51c9137eedf8", + "name": "hmac-generated-hs512", + "providerId": "hmac-generated", + "subComponents": {}, + "config": { + "priority": [ + "100" + ], + "algorithm": [ + "HS512" + ] + } + }, + { + "id": "053cfdae-ab68-40fd-9b19-6a92d80206b6", + "name": "aes-generated", + "providerId": "aes-generated", + "subComponents": {}, + "config": { + "priority": [ + "100" + ] + } + }, + { + "id": "831151e9-7fe7-4d28-8f86-ff6db930fdf1", + "name": "rsa-generated", + "providerId": "rsa-generated", + "subComponents": {}, + "config": { + "priority": [ + "100" + ] + } + } + ] + }, + "internationalizationEnabled": false, + "authenticationFlows": [ + { + "id": "c27e97be-9c5d-4730-8b9e-5c4e7b695737", + "alias": "Account verification options", + "description": "Method with which to verity the existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-email-verification", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Verify Existing Account by Re-authentication", + "userSetupAllowed": false + } + ] + }, + { + "id": "11ca1cca-3943-4c21-8657-505b7ef5b385", + "alias": "Browser - Conditional 2FA", + "description": "Flow to determine if any 2FA is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorConfig": "browser-conditional-credential", + "authenticator": "conditional-credential", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "webauthn-authenticator", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-recovery-authn-code-form", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 50, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "b6654bcd-9917-4917-a4dd-d353526cff69", + "alias": "Browser - Conditional Organization", + "description": "Flow to determine if the organization identity-first login is to be used", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "organization", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "0015c10a-b9a9-4170-aa66-92cfb65fddb9", + "alias": "Direct Grant - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "44917fa5-1f6a-40c1-b21c-f14b4b5b8ace", + "alias": "First Broker Login - Conditional Organization", + "description": "Flow to determine if the authenticator that adds organization members is to be used", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "idp-add-organization-member", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "d7f84079-589d-4255-83de-8ef8cb0d73dc", + "alias": "First broker login - Conditional 2FA", + "description": "Flow to determine if any 2FA is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorConfig": "first-broker-login-conditional-credential", + "authenticator": "conditional-credential", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "webauthn-authenticator", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-recovery-authn-code-form", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 50, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "547fea31-d49d-480d-8553-59e2b414cfce", + "alias": "Handle Existing Account", + "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-confirm-link", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Account verification options", + "userSetupAllowed": false + } + ] + }, + { + "id": "397872e3-0437-4247-9617-04683d0c96e4", + "alias": "Organization", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 10, + "autheticatorFlow": true, + "flowAlias": "Browser - Conditional Organization", + "userSetupAllowed": false + } + ] + }, + { + "id": "36e35c71-44e1-4351-8868-73fe4c530b0d", + "alias": "Reset - Conditional OTP", + "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "2cd8616b-ec68-4e09-af2d-88a8d692f97d", + "alias": "User creation or linking", + "description": "Flow for the existing/non-existing user alternatives", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "create unique user config", + "authenticator": "idp-create-user-if-unique", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Handle Existing Account", + "userSetupAllowed": false + } + ] + }, + { + "id": "02564112-e716-4f4d-8c03-9d49a2393c88", + "alias": "Verify Existing Account by Re-authentication", + "description": "Reauthentication of existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "First broker login - Conditional 2FA", + "userSetupAllowed": false + } + ] + }, + { + "id": "f995c827-4bc8-4dea-8afd-669cd27c40ac", + "alias": "browser", + "description": "Browser based authentication", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-cookie", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "identity-provider-redirector", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 25, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 26, + "autheticatorFlow": true, + "flowAlias": "Organization", + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "forms", + "userSetupAllowed": false + } + ] + }, + { + "id": "88b8c9e3-392a-41fe-bba5-749ad279cb39", + "alias": "clients", + "description": "Base authentication for clients", + "providerId": "client-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "client-secret", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-secret-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-x509", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "a814751a-2808-49e8-8984-78fb26768642", + "alias": "direct grant", + "description": "OpenID Connect Resource Owner Grant", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "direct-grant-validate-username", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "Direct Grant - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "93a95f5d-614d-45eb-bf2c-b7dd8b16327d", + "alias": "docker auth", + "description": "Used by Docker clients to authenticate against the IDP", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "docker-http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "17a66b4e-8a6d-48dc-be97-ffae43e93981", + "alias": "first broker login", + "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "review profile config", + "authenticator": "idp-review-profile", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "User creation or linking", + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 60, + "autheticatorFlow": true, + "flowAlias": "First Broker Login - Conditional Organization", + "userSetupAllowed": false + } + ] + }, + { + "id": "862acef9-e444-4574-9428-7e98fb8bca6c", + "alias": "forms", + "description": "Username, password, otp and other auth forms.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Browser - Conditional 2FA", + "userSetupAllowed": false + } + ] + }, + { + "id": "515dcc67-b60d-4427-ae6c-e6b3f035b500", + "alias": "registration", + "description": "Registration flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-page-form", + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": true, + "flowAlias": "registration form", + "userSetupAllowed": false + } + ] + }, + { + "id": "92cb5b0d-4e98-4e6c-84d2-61fb8dfdb3e5", + "alias": "registration form", + "description": "Registration form", + "providerId": "form-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-user-creation", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-password-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 50, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-recaptcha-action", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 60, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-terms-and-conditions", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 70, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "9a17af04-5563-4630-a411-508855cacf6d", + "alias": "reset credentials", + "description": "Reset credentials for a user if they forgot their password or something", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "reset-credentials-choose-user", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-credential-email", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 40, + "autheticatorFlow": true, + "flowAlias": "Reset - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "315ba2b0-b688-406a-b25f-43b092f8b633", + "alias": "saml ecp", + "description": "SAML ECP Profile Authentication Flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + } + ], + "authenticatorConfig": [ + { + "id": "0452f9b0-34fe-4f09-b1e9-f002b9f1d774", + "alias": "browser-conditional-credential", + "config": { + "credentials": "webauthn-passwordless" + } + }, + { + "id": "7b1fb54c-8142-4c57-9d20-f679c38ed57c", + "alias": "create unique user config", + "config": { + "require.password.update.after.registration": "false" + } + }, + { + "id": "b30c4776-49f9-456f-8309-aeaa32826edf", + "alias": "first-broker-login-conditional-credential", + "config": { + "credentials": "webauthn-passwordless" + } + }, + { + "id": "17903758-be43-4a2f-8dd6-dd57a16d2f0c", + "alias": "review profile config", + "config": { + "update.profile.on.first.login": "missing" + } + } + ], + "requiredActions": [ + { + "alias": "CONFIGURE_TOTP", + "name": "Configure OTP", + "providerId": "CONFIGURE_TOTP", + "enabled": true, + "defaultAction": false, + "priority": 10, + "config": {} + }, + { + "alias": "TERMS_AND_CONDITIONS", + "name": "Terms and Conditions", + "providerId": "TERMS_AND_CONDITIONS", + "enabled": false, + "defaultAction": false, + "priority": 20, + "config": {} + }, + { + "alias": "UPDATE_PASSWORD", + "name": "Update Password", + "providerId": "UPDATE_PASSWORD", + "enabled": true, + "defaultAction": false, + "priority": 30, + "config": {} + }, + { + "alias": "UPDATE_PROFILE", + "name": "Update Profile", + "providerId": "UPDATE_PROFILE", + "enabled": true, + "defaultAction": false, + "priority": 40, + "config": {} + }, + { + "alias": "VERIFY_EMAIL", + "name": "Verify Email", + "providerId": "VERIFY_EMAIL", + "enabled": true, + "defaultAction": false, + "priority": 50, + "config": {} + }, + { + "alias": "delete_account", + "name": "Delete Account", + "providerId": "delete_account", + "enabled": false, + "defaultAction": false, + "priority": 60, + "config": {} + }, + { + "alias": "UPDATE_EMAIL", + "name": "Update Email", + "providerId": "UPDATE_EMAIL", + "enabled": false, + "defaultAction": false, + "priority": 70, + "config": {} + }, + { + "alias": "webauthn-register", + "name": "Webauthn Register", + "providerId": "webauthn-register", + "enabled": true, + "defaultAction": false, + "priority": 80, + "config": {} + }, + { + "alias": "webauthn-register-passwordless", + "name": "Webauthn Register Passwordless", + "providerId": "webauthn-register-passwordless", + "enabled": true, + "defaultAction": false, + "priority": 90, + "config": {} + }, + { + "alias": "VERIFY_PROFILE", + "name": "Verify Profile", + "providerId": "VERIFY_PROFILE", + "enabled": true, + "defaultAction": false, + "priority": 100, + "config": {} + }, + { + "alias": "delete_credential", + "name": "Delete Credential", + "providerId": "delete_credential", + "enabled": true, + "defaultAction": false, + "priority": 110, + "config": {} + }, + { + "alias": "idp_link", + "name": "Linking Identity Provider", + "providerId": "idp_link", + "enabled": true, + "defaultAction": false, + "priority": 120, + "config": {} + }, + { + "alias": "CONFIGURE_RECOVERY_AUTHN_CODES", + "name": "Recovery Authentication Codes", + "providerId": "CONFIGURE_RECOVERY_AUTHN_CODES", + "enabled": true, + "defaultAction": false, + "priority": 130, + "config": {} + }, + { + "alias": "update_user_locale", + "name": "Update User Locale", + "providerId": "update_user_locale", + "enabled": true, + "defaultAction": false, + "priority": 1000, + "config": {} + } + ], + "browserFlow": "browser", + "registrationFlow": "registration", + "directGrantFlow": "direct grant", + "resetCredentialsFlow": "reset credentials", + "clientAuthenticationFlow": "clients", + "dockerAuthenticationFlow": "docker auth", + "firstBrokerLoginFlow": "first broker login", + "attributes": { + "cibaBackchannelTokenDeliveryMode": "poll", + "cibaExpiresIn": "120", + "cibaAuthRequestedUserHint": "login_hint", + "oauth2DeviceCodeLifespan": "600", + "clientOfflineSessionMaxLifespan": "0", + "oauth2DevicePollingInterval": "5", + "clientSessionIdleTimeout": "0", + "parRequestUriLifespan": "60", + "clientSessionMaxLifespan": "0", + "clientOfflineSessionIdleTimeout": "0", + "cibaInterval": "5", + "realmReusableOtpCode": "false" + }, + "keycloakVersion": "26.4.0", + "userManagedAccessAllowed": false, + "organizationsEnabled": false, + "verifiableCredentialsEnabled": false, + "adminPermissionsEnabled": false, + "clientProfiles": { + "profiles": [] + }, + "clientPolicies": { + "policies": [] + } +} \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 67380127b..f352ce7a2 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,6 +2,9 @@ dev_addr: 127.0.0.1:9000 site_name: SecObserve +exclude_docs: | + /mkdocs_plugins/ + theme: name: material logo: assets/images/secobserve_white_icon.svg @@ -12,6 +15,7 @@ theme: - navigation.tabs - navigation.tabs.top - navigation.footer + - content.code.copy custom_dir: docs/overrides palette: accent: light blue @@ -28,6 +32,7 @@ markdown_extensions: - pymdownx.mark - pymdownx.tilde - attr_list + - md_in_html - admonition - pymdownx.details - pymdownx.superfences: @@ -35,20 +40,23 @@ markdown_extensions: - name: mermaid class: mermaid format: !!python/name:pymdownx.superfences.fence_code_format + - name: include + class: highlight + format: !!python/name:include.include_file_format + validator: !!python/name:include.include_file_validate - pymdownx.highlight: anchor_linenums: true line_spans: __span pygments_lang_class: true - pymdownx.inlinehilite - pymdownx.snippets - - pymdownx.superfences - pymdownx.emoji: emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_generator: !!python/name:material.extensions.emoji.to_svg -repo_url: https://github.com/MaibornWolff/SecObserve -repo_name: maibornwolff/secobserve +repo_url: https://github.com/SecObserve/SecObserve +repo_name: SecObserve/secobserve -copyright: Copyright © 2023 MaibornWolff GmbH +copyright: Copyright © since December 2025 Stefan Fleckenstein # Page tree nav: @@ -62,8 +70,10 @@ nav: - Installation: getting_started/installation.md - Configuration: getting_started/configuration.md - Upgrading: getting_started/upgrading.md + - Acknowledgements: getting_started/acknowledgements.md - Usage: - Import observations: usage/import_observations.md + - Upload SBOMs: usage/upload_sbom.md - Assess observations: usage/assess_observations.md - Product groups: usage/product_groups.md - Branches and Versions: usage/branches.md @@ -81,11 +91,13 @@ nav: - GitHub actions and GitLab CI templates: integrations/github_actions_and_templates.md - OpenID Connect: integrations/oidc_authentication.md - Exploit Prediction Scoring System (EPSS): integrations/epss.md + - Exploits (from cvss-bt): integrations/exploit_information.md - License data: integrations/license_data.md - Source code repositories: integrations/source_code_repositories.md - Issue trackers: integrations/issue_trackers.md - Notifications: integrations/notifications.md - VEX documents: integrations/vex.md + - Vulnerability scanning from OSV database: integrations/osv_scan.md - Export of observations: integrations/observations_export.md - CodeCharta: integrations/codecharta.md - Links to additional information: integrations/links.md diff --git a/mkdocs_requirements.txt b/mkdocs_requirements.txt index f6d6d1f14..913dc1970 100644 --- a/mkdocs_requirements.txt +++ b/mkdocs_requirements.txt @@ -1 +1,3 @@ -mkdocs-material==9.5.49 # https://github.com/squidfunk/mkdocs-material +mkdocs-material==9.7.1 # https://github.com/squidfunk/mkdocs-material +Markdown==3.10.2 # https://github.com/Python-Markdown/markdown +pymdown-extensions==10.21 # https://github.com/facelessuser/pymdown-extensions diff --git a/renovate.json b/renovate.json index d420ec39e..49181a42f 100644 --- a/renovate.json +++ b/renovate.json @@ -3,11 +3,9 @@ "extends": [ "config:recommended" ], - "baseBranches": [ + "baseBranchPatterns": [ "dev" ], - "binarySource": "install", - "branchConcurrentLimit": 10, "dependencyDashboard": true, "dependencyDashboardApproval": false, "labels": [ @@ -18,9 +16,9 @@ }, "rebaseWhen": "conflicted", "pip_requirements": { - "fileMatch": [ - "mkdocs_requirements.txt", - "backend/poetry_requirements.txt" + "managerFilePatterns": [ + "/mkdocs_requirements.txt/", + "/backend/poetry_requirements.txt/" ] }, "packageRules": [ @@ -28,13 +26,13 @@ "matchPackageNames": [ "python" ], - "allowedVersions": "<3.13" + "allowedVersions": "<3.15" }, { "matchPackageNames": [ "django" ], - "allowedVersions": "<5.2" + "allowedVersions": "<5.3" }, { "matchPackageNames": [ @@ -54,6 +52,5 @@ ], "allowedVersions": "<9" } - ], - "prHourlyLimit": 10 + ] } diff --git a/sbom/README.md b/sbom/README.md index 277fd735d..c6ca0dc6d 100644 --- a/sbom/README.md +++ b/sbom/README.md @@ -4,8 +4,8 @@ This directory contains the CycloneDX SBOMs for the project of the last 3 releas * `sbom_backend_application_RELEASE.json` - Python libraries of the backend application, derived from [../backend/poetry.lock](../backend/poetry.lock) * `sbom_frontend_application_RELEASE.json` - NPM libraries of the frontend application, derived from [../frontend/package-lock.json](../frontend/package-lock.json) -* `sbom_backend_container_RELEASE.json` - Operating system components of the backend container, derived from [backend container image of the release](https://hub.docker.com/repository/docker/maibornwolff/secobserve-backend/general) -* `sbom_frontend_container_RELEASE.json` - Operating system components of the frontend container, derived from [frontend container image of the release](https://hub.docker.com/repository/docker/maibornwolff/secobserve-frontend/general) +* `sbom_backend_container_RELEASE.json` - Operating system components of the backend container, derived from [backend container image of the release](https://github.com/SecObserve/SecObserve/pkgs/container/secobserve-backend) +* `sbom_frontend_container_RELEASE.json` - Operating system components of the frontend container, derived from [frontend container image of the release](https://github.com/SecObserve/SecObserve/pkgs/container/secobserve-frontend) * `sbom_RELEASE.json` - Combined SBOM of the backend and frontend applications and containers diff --git a/sbom/configuration/license_policy.json b/sbom/configuration/license_policy.json new file mode 100644 index 000000000..30baa85e8 --- /dev/null +++ b/sbom/configuration/license_policy.json @@ -0,0 +1,2743 @@ +{ + "policies": [ + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "AFL-1-1", + "id": "AFL-1.1", + "name": "Academic Free License v1.1", + "osi": true, + "reference": "https://spdx.org/licenses/AFL-1.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "AFL-1-2", + "id": "AFL-1.2", + "name": "Academic Free License v1.2", + "osi": true, + "reference": "https://spdx.org/licenses/AFL-1.2.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "AFL-2-0", + "id": "AFL-2.0", + "name": "Academic Free License v2.0", + "osi": true, + "reference": "https://spdx.org/licenses/AFL-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "AFL-2-1", + "id": "AFL-2.1", + "name": "Academic Free License v2.1", + "osi": true, + "reference": "https://spdx.org/licenses/AFL-2.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "AFL-3-0", + "id": "AFL-3.0", + "name": "Academic Free License v3.0", + "osi": true, + "reference": "https://spdx.org/licenses/AFL-3.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Apache-1-0", + "id": "Apache-1.0", + "name": "Apache License 1.0", + "osi": false, + "reference": "https://spdx.org/licenses/Apache-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Apache-1-1", + "id": "Apache-1.1", + "name": "Apache License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/Apache-1.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Apache-2-0", + "id": "Apache-2.0", + "name": "Apache License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/Apache-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Beerware", + "id": "Beerware", + "name": "Beerware License", + "osi": false, + "reference": "https://spdx.org/licenses/Beerware.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BSD-2-Clause", + "id": "BSD-2-Clause", + "name": "BSD 2-Clause \"Simplified\" License", + "osi": true, + "reference": "https://spdx.org/licenses/BSD-2-Clause.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BSD-3-Clause", + "id": "BSD-3-Clause", + "name": "BSD 3-Clause \"New\" or \"Revised\" License", + "osi": true, + "reference": "https://spdx.org/licenses/BSD-3-Clause.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BSD-4-Clause", + "id": "BSD-4-Clause", + "name": "BSD 4-Clause \"Original\" or \"Old\" License", + "osi": false, + "reference": "https://spdx.org/licenses/BSD-4-Clause.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BSL-1-0", + "id": "BSL-1.0", + "name": "Boost Software License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/BSL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "CECILL-B", + "id": "CECILL-B", + "name": "CeCILL-B Free Software License Agreement", + "osi": false, + "reference": "https://spdx.org/licenses/CECILL-B.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "CNRI-Jython", + "id": "CNRI-Jython", + "name": "CNRI Jython License", + "osi": false, + "reference": "https://spdx.org/licenses/CNRI-Jython.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "CNRI-Python", + "id": "CNRI-Python", + "name": "CNRI Python License", + "osi": true, + "reference": "https://spdx.org/licenses/CNRI-Python.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Condor-1-1", + "id": "Condor-1.1", + "name": "Condor Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/Condor-1.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "curl", + "id": "curl", + "name": "curl License", + "osi": false, + "reference": "https://spdx.org/licenses/curl.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ECL-1-0", + "id": "ECL-1.0", + "name": "Educational Community License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/ECL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ECL-2-0", + "id": "ECL-2.0", + "name": "Educational Community License v2.0", + "osi": true, + "reference": "https://spdx.org/licenses/ECL-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "EFL-1-0", + "id": "EFL-1.0", + "name": "Eiffel Forum License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/EFL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "EFL-2-0", + "id": "EFL-2.0", + "name": "Eiffel Forum License v2.0", + "osi": true, + "reference": "https://spdx.org/licenses/EFL-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Entessa", + "id": "Entessa", + "name": "Entessa Public License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/Entessa.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "EPL-1-0", + "id": "EPL-1.0", + "name": "Eclipse Public License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/EPL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "EUDatagrid", + "id": "EUDatagrid", + "name": "EU DataGrid Software License", + "osi": true, + "reference": "https://spdx.org/licenses/EUDatagrid.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Fair", + "id": "Fair", + "name": "Fair License", + "osi": true, + "reference": "https://spdx.org/licenses/Fair.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "FSFUL", + "id": "FSFUL", + "name": "FSF Unlimited License", + "osi": false, + "reference": "https://spdx.org/licenses/FSFUL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "FTL", + "id": "FTL", + "name": "Freetype Project License", + "osi": false, + "reference": "https://spdx.org/licenses/FTL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "GD", + "id": "GD", + "name": "GD License", + "osi": false, + "reference": "https://spdx.org/licenses/GD.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "HPND", + "id": "HPND", + "name": "Historical Permission Notice and Disclaimer", + "osi": true, + "reference": "https://spdx.org/licenses/HPND.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ICU", + "id": "ICU", + "name": "ICU License", + "osi": true, + "reference": "https://spdx.org/licenses/ICU.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "IJG", + "id": "IJG", + "name": "Independent JPEG Group License", + "osi": false, + "reference": "https://spdx.org/licenses/IJG.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "IJG-short", + "id": "IJG-short", + "name": "Independent JPEG Group License - short", + "osi": false, + "reference": "https://spdx.org/licenses/IJG-short.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ImageMagick", + "id": "ImageMagick", + "name": "ImageMagick License", + "osi": false, + "reference": "https://spdx.org/licenses/ImageMagick.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Intel", + "id": "Intel", + "name": "Intel Open Source License", + "osi": true, + "reference": "https://spdx.org/licenses/Intel.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ISC", + "id": "ISC", + "name": "ISC License", + "osi": true, + "reference": "https://spdx.org/licenses/ISC.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "JasPer-2-0", + "id": "JasPer-2.0", + "name": "JasPer License", + "osi": false, + "reference": "https://spdx.org/licenses/JasPer-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "JSON", + "id": "JSON", + "name": "JSON License", + "osi": false, + "reference": "https://spdx.org/licenses/JSON.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Libpng", + "id": "Libpng", + "name": "libpng License", + "osi": false, + "reference": "https://spdx.org/licenses/Libpng.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "libpng-2-0", + "id": "libpng-2.0", + "name": "PNG Reference Library version 2", + "osi": false, + "reference": "https://spdx.org/licenses/libpng-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "LPL-1-0", + "id": "LPL-1.0", + "name": "Lucent Public License Version 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/LPL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "LPL-1-02", + "id": "LPL-1.02", + "name": "Lucent Public License v1.02", + "osi": true, + "reference": "https://spdx.org/licenses/LPL-1.02.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MirOS", + "id": "MirOS", + "name": "The MirOS Licence", + "osi": true, + "reference": "https://spdx.org/licenses/MirOS.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MIT", + "id": "MIT", + "name": "MIT License", + "osi": true, + "reference": "https://spdx.org/licenses/MIT.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MS-PL", + "id": "MS-PL", + "name": "Microsoft Public License", + "osi": true, + "reference": "https://spdx.org/licenses/MS-PL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MulanPSL-1-0", + "id": "MulanPSL-1.0", + "name": "Mulan Permissive Software License, Version 1", + "osi": false, + "reference": "https://spdx.org/licenses/MulanPSL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MulanPSL-2-0", + "id": "MulanPSL-2.0", + "name": "Mulan Permissive Software License, Version 2", + "osi": true, + "reference": "https://spdx.org/licenses/MulanPSL-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Naumen", + "id": "Naumen", + "name": "Naumen Public License", + "osi": true, + "reference": "https://spdx.org/licenses/Naumen.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "NCSA", + "id": "NCSA", + "name": "University of Illinois/NCSA Open Source License", + "osi": true, + "reference": "https://spdx.org/licenses/NCSA.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "NTP", + "id": "NTP", + "name": "NTP License", + "osi": true, + "reference": "https://spdx.org/licenses/NTP.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": true, + "family": "Nunit", + "id": "Nunit", + "name": "Nunit License", + "osi": false, + "reference": "https://spdx.org/licenses/Nunit.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OGC-1-0", + "id": "OGC-1.0", + "name": "OGC Software License, Version 1.0", + "osi": false, + "reference": "https://spdx.org/licenses/OGC-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OGTSL", + "id": "OGTSL", + "name": "Open Group Test Suite License", + "osi": true, + "reference": "https://spdx.org/licenses/OGTSL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-1-1", + "id": "OLDAP-1.1", + "name": "Open LDAP Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-1.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-1-2", + "id": "OLDAP-1.2", + "name": "Open LDAP Public License v1.2", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-1.2.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-1-3", + "id": "OLDAP-1.3", + "name": "Open LDAP Public License v1.3", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-1.3.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-1-4", + "id": "OLDAP-1.4", + "name": "Open LDAP Public License v1.4", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-1.4.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-0", + "id": "OLDAP-2.0", + "name": "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-0-1", + "id": "OLDAP-2.0.1", + "name": "Open LDAP Public License v2.0.1", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.0.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-1", + "id": "OLDAP-2.1", + "name": "Open LDAP Public License v2.1", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-2", + "id": "OLDAP-2.2", + "name": "Open LDAP Public License v2.2", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.2.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-2-1", + "id": "OLDAP-2.2.1", + "name": "Open LDAP Public License v2.2.1", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.2.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-2-2", + "id": "OLDAP-2.2.2", + "name": "Open LDAP Public License 2.2.2", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.2.2.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-3", + "id": "OLDAP-2.3", + "name": "Open LDAP Public License v2.3", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.3.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-4", + "id": "OLDAP-2.4", + "name": "Open LDAP Public License v2.4", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.4.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-5", + "id": "OLDAP-2.5", + "name": "Open LDAP Public License v2.5", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.5.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-6", + "id": "OLDAP-2.6", + "name": "Open LDAP Public License v2.6", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.6.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-7", + "id": "OLDAP-2.7", + "name": "Open LDAP Public License v2.7", + "osi": false, + "reference": "https://spdx.org/licenses/OLDAP-2.7.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OLDAP-2-8", + "id": "OLDAP-2.8", + "name": "Open LDAP Public License v2.8", + "osi": true, + "reference": "https://spdx.org/licenses/OLDAP-2.8.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OpenSSL", + "id": "OpenSSL", + "name": "OpenSSL License", + "osi": false, + "reference": "https://spdx.org/licenses/OpenSSL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "PHP-3-0", + "id": "PHP-3.0", + "name": "PHP License v3.0", + "osi": true, + "reference": "https://spdx.org/licenses/PHP-3.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "PHP-3-01", + "id": "PHP-3.01", + "name": "PHP License v3.01", + "osi": true, + "reference": "https://spdx.org/licenses/PHP-3.01.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "PostgreSQL", + "id": "PostgreSQL", + "name": "PostgreSQL License", + "osi": true, + "reference": "https://spdx.org/licenses/PostgreSQL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "PSF-2-0", + "id": "PSF-2.0", + "name": "Python Software Foundation License 2.0", + "osi": false, + "reference": "https://spdx.org/licenses/PSF-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Python-2-0", + "id": "Python-2.0", + "name": "Python License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/Python-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Python-2-0-1", + "id": "Python-2.0.1", + "name": "Python License 2.0.1", + "osi": false, + "reference": "https://spdx.org/licenses/Python-2.0.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Ruby", + "id": "Ruby", + "name": "Ruby License", + "osi": false, + "reference": "https://spdx.org/licenses/Ruby.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Sendmail", + "id": "Sendmail", + "name": "Sendmail License", + "osi": false, + "reference": "https://spdx.org/licenses/Sendmail.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "SMLNJ", + "id": "SMLNJ", + "name": "Standard ML of New Jersey License", + "osi": false, + "reference": "https://spdx.org/licenses/SMLNJ.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": true, + "family": "StandardML-NJ", + "id": "StandardML-NJ", + "name": "Standard ML of New Jersey License", + "osi": false, + "reference": "https://spdx.org/licenses/StandardML-NJ.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "TCL", + "id": "TCL", + "name": "TCL/TK License", + "osi": false, + "reference": "https://spdx.org/licenses/TCL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "TOSL", + "id": "TOSL", + "name": "Trusster Open Source License", + "osi": false, + "reference": "https://spdx.org/licenses/TOSL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Unlicense", + "id": "Unlicense", + "name": "The Unlicense", + "osi": true, + "reference": "https://spdx.org/licenses/Unlicense.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "UPL-1-0", + "id": "UPL-1.0", + "name": "Universal Permissive License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/UPL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "VSL-1-0", + "id": "VSL-1.0", + "name": "Vovida Software License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/VSL-1.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "W3C", + "id": "W3C", + "name": "W3C Software Notice and License (2002-12-31)", + "osi": true, + "reference": "https://spdx.org/licenses/W3C.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "W3C-19980720", + "id": "W3C-19980720", + "name": "W3C Software Notice and License (1998-07-20)", + "osi": false, + "reference": "https://spdx.org/licenses/W3C-19980720.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "W3C-20150513", + "id": "W3C-20150513", + "name": "W3C Software Notice and Document License (2015-05-13)", + "osi": true, + "reference": "https://spdx.org/licenses/W3C-20150513.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "WTFPL", + "id": "WTFPL", + "name": "Do What The F*ck You Want To Public License", + "osi": false, + "reference": "https://spdx.org/licenses/WTFPL.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "X11", + "id": "X11", + "name": "X11 License", + "osi": false, + "reference": "https://spdx.org/licenses/X11.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "XFree86-1-1", + "id": "XFree86-1.1", + "name": "XFree86 License 1.1", + "osi": false, + "reference": "https://spdx.org/licenses/XFree86-1.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "xinetd", + "id": "xinetd", + "name": "xinetd License", + "osi": false, + "reference": "https://spdx.org/licenses/xinetd.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Xnet", + "id": "Xnet", + "name": "X.Net License", + "osi": true, + "reference": "https://spdx.org/licenses/Xnet.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "Zlib", + "id": "Zlib", + "name": "zlib License", + "osi": true, + "reference": "https://spdx.org/licenses/Zlib.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ZPL-1-1", + "id": "ZPL-1.1", + "name": "Zope Public License 1.1", + "osi": false, + "reference": "https://spdx.org/licenses/ZPL-1.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ZPL-2-0", + "id": "ZPL-2.0", + "name": "Zope Public License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/ZPL-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "ZPL-2-1", + "id": "ZPL-2.1", + "name": "Zope Public License 2.1", + "osi": true, + "reference": "https://spdx.org/licenses/ZPL-2.1.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "AGPL-1-0", + "id": "AGPL-1.0", + "name": "Affero General Public License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/AGPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "AGPL-1-0-only", + "id": "AGPL-1.0-only", + "name": "Affero General Public License v1.0 only", + "osi": false, + "reference": "https://spdx.org/licenses/AGPL-1.0-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "AGPL-1-0-or-later", + "id": "AGPL-1.0-or-later", + "name": "Affero General Public License v1.0 or later", + "osi": false, + "reference": "https://spdx.org/licenses/AGPL-1.0-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "AGPL-3-0", + "id": "AGPL-3.0", + "name": "GNU Affero General Public License v3.0", + "osi": true, + "reference": "https://spdx.org/licenses/AGPL-3.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "AGPL-3-0-only", + "id": "AGPL-3.0-only", + "name": "GNU Affero General Public License v3.0 only", + "osi": true, + "reference": "https://spdx.org/licenses/AGPL-3.0-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "AGPL-3-0-or-later", + "id": "AGPL-3.0-or-later", + "name": "GNU Affero General Public License v3.0 or later", + "osi": true, + "reference": "https://spdx.org/licenses/AGPL-3.0-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "eCos-2-0", + "id": "eCos-2.0", + "name": "eCos license version 2.0", + "osi": false, + "reference": "https://spdx.org/licenses/eCos-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-1-0", + "id": "GPL-1.0", + "name": "GNU General Public License v1.0 only", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-1-0-", + "id": "GPL-1.0+", + "name": "GNU General Public License v1.0 or later", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-1.0+.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "GPL-1-0-only", + "id": "GPL-1.0-only", + "name": "GNU General Public License v1.0 only", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-1.0-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "GPL-1-0-or-later", + "id": "GPL-1.0-or-later", + "name": "GNU General Public License v1.0 or later", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-1.0-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-2-0", + "id": "GPL-2.0", + "name": "GNU General Public License v2.0 only", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-2-0-", + "id": "GPL-2.0+", + "name": "GNU General Public License v2.0 or later", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-2.0+.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "GPL-2-0-only", + "id": "GPL-2.0-only", + "name": "GNU General Public License v2.0 only", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-2.0-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "GPL-2-0-or-later", + "id": "GPL-2.0-or-later", + "name": "GNU General Public License v2.0 or later", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-2.0-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-3-0", + "id": "GPL-3.0", + "name": "GNU General Public License v3.0 only", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-3.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-3-0-", + "id": "GPL-3.0+", + "name": "GNU General Public License v3.0 or later", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-3.0+.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "GPL-3-0-only", + "id": "GPL-3.0-only", + "name": "GNU General Public License v3.0 only", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-3.0-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "GPL-3-0-or-later", + "id": "GPL-3.0-or-later", + "name": "GNU General Public License v3.0 or later", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-3.0-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NGPL", + "id": "NGPL", + "name": "Nethack General Public License", + "osi": true, + "reference": "https://spdx.org/licenses/NGPL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SimPL-2-0", + "id": "SimPL-2.0", + "name": "Simple Public License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/SimPL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Arphic-1999", + "id": "Arphic-1999", + "name": "Arphic Public License", + "osi": false, + "reference": "https://spdx.org/licenses/Arphic-1999.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CAL-1-0", + "id": "CAL-1.0", + "name": "Cryptographic Autonomy License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/CAL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CECILL-1-0", + "id": "CECILL-1.0", + "name": "CeCILL Free Software License Agreement v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/CECILL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CECILL-1-1", + "id": "CECILL-1.1", + "name": "CeCILL Free Software License Agreement v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/CECILL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CECILL-2-0", + "id": "CECILL-2.0", + "name": "CeCILL Free Software License Agreement v2.0", + "osi": false, + "reference": "https://spdx.org/licenses/CECILL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CECILL-2-1", + "id": "CECILL-2.1", + "name": "CeCILL Free Software License Agreement v2.1", + "osi": true, + "reference": "https://spdx.org/licenses/CECILL-2.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CPL-1-0", + "id": "CPL-1.0", + "name": "Common Public License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/CPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "D-FSL-1-0", + "id": "D-FSL-1.0", + "name": "Deutsche Freie Software Lizenz", + "osi": false, + "reference": "https://spdx.org/licenses/D-FSL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "EUPL-1-0", + "id": "EUPL-1.0", + "name": "European Union Public License 1.0", + "osi": false, + "reference": "https://spdx.org/licenses/EUPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "EUPL-1-1", + "id": "EUPL-1.1", + "name": "European Union Public License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/EUPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "EUPL-1-2", + "id": "EUPL-1.2", + "name": "European Union Public License 1.2", + "osi": true, + "reference": "https://spdx.org/licenses/EUPL-1.2.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "IPA", + "id": "IPA", + "name": "IPA Font License", + "osi": true, + "reference": "https://spdx.org/licenses/IPA.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "IPL-1-0", + "id": "IPL-1.0", + "name": "IBM Public License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/IPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NLPL", + "id": "NLPL", + "name": "No Limit Public License", + "osi": false, + "reference": "https://spdx.org/licenses/NLPL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NPOSL-3-0", + "id": "NPOSL-3.0", + "name": "Non-Profit Open Software License 3.0", + "osi": true, + "reference": "https://spdx.org/licenses/NPOSL-3.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OFL-1-0", + "id": "OFL-1.0", + "name": "SIL Open Font License 1.0", + "osi": false, + "reference": "https://spdx.org/licenses/OFL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OFL-1-1", + "id": "OFL-1.1", + "name": "SIL Open Font License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/OFL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OSL-1-0", + "id": "OSL-1.0", + "name": "Open Software License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/OSL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OSL-1-1", + "id": "OSL-1.1", + "name": "Open Software License 1.1", + "osi": false, + "reference": "https://spdx.org/licenses/OSL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OSL-2-0", + "id": "OSL-2.0", + "name": "Open Software License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/OSL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OSL-2-1", + "id": "OSL-2.1", + "name": "Open Software License 2.1", + "osi": true, + "reference": "https://spdx.org/licenses/OSL-2.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OSL-3-0", + "id": "OSL-3.0", + "name": "Open Software License 3.0", + "osi": true, + "reference": "https://spdx.org/licenses/OSL-3.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "RPL-1-1", + "id": "RPL-1.1", + "name": "Reciprocal Public License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/RPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "RPL-1-5", + "id": "RPL-1.5", + "name": "Reciprocal Public License 1.5", + "osi": true, + "reference": "https://spdx.org/licenses/RPL-1.5.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "TGPPL-1-0", + "id": "TGPPL-1.0", + "name": "Transitive Grace Period Public Licence 1.0", + "osi": false, + "reference": "https://spdx.org/licenses/TGPPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "VOSTROM", + "id": "VOSTROM", + "name": "VOSTROM Public License for Open Source", + "osi": false, + "reference": "https://spdx.org/licenses/VOSTROM.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CATOSL-1-1", + "id": "CATOSL-1.1", + "name": "Computer Associates Trusted Open Source License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/CATOSL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CDDL-1-0", + "id": "CDDL-1.0", + "name": "Common Development and Distribution License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/CDDL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CDDL-1-1", + "id": "CDDL-1.1", + "name": "Common Development and Distribution License 1.1", + "osi": false, + "reference": "https://spdx.org/licenses/CDDL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CPAL-1-0", + "id": "CPAL-1.0", + "name": "Common Public Attribution License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/CPAL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CUA-OPL-1-0", + "id": "CUA-OPL-1.0", + "name": "CUA Office Public License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/CUA-OPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "ErlPL-1-1", + "id": "ErlPL-1.1", + "name": "Erlang Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/ErlPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "gSOAP-1-3b", + "id": "gSOAP-1.3b", + "name": "gSOAP Public License v1.3b", + "osi": false, + "reference": "https://spdx.org/licenses/gSOAP-1.3b.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Interbase-1-0", + "id": "Interbase-1.0", + "name": "Interbase Public License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/Interbase-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "MPL-1-0", + "id": "MPL-1.0", + "name": "Mozilla Public License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/MPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "MPL-1-1", + "id": "MPL-1.1", + "name": "Mozilla Public License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/MPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "MPL-2-0", + "id": "MPL-2.0", + "name": "Mozilla Public License 2.0", + "notes": [ + "No changes in MPL code and files are kept separate" + ], + "osi": true, + "reference": "https://spdx.org/licenses/MPL-2.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NASA-1-3", + "id": "NASA-1.3", + "name": "NASA Open Source Agreement 1.3", + "osi": true, + "reference": "https://spdx.org/licenses/NASA-1.3.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Nokia", + "id": "Nokia", + "name": "Nokia Open Source License", + "osi": true, + "reference": "https://spdx.org/licenses/Nokia.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NOSL", + "id": "NOSL", + "name": "Netizen Open Source License", + "osi": false, + "reference": "https://spdx.org/licenses/NOSL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OPL-1-0", + "id": "OPL-1.0", + "name": "Open Public License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/OPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "RHeCos-1-1", + "id": "RHeCos-1.1", + "name": "Red Hat eCos Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/RHeCos-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "RSCPL", + "id": "RSCPL", + "name": "Ricoh Source Code Public License", + "osi": true, + "reference": "https://spdx.org/licenses/RSCPL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SISSL", + "id": "SISSL", + "name": "Sun Industry Standards Source License v1.1", + "osi": true, + "reference": "https://spdx.org/licenses/SISSL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SISSL-1-2", + "id": "SISSL-1.2", + "name": "Sun Industry Standards Source License v1.2", + "osi": false, + "reference": "https://spdx.org/licenses/SISSL-1.2.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SNIA", + "id": "SNIA", + "name": "SNIA Public License 1.1", + "osi": false, + "reference": "https://spdx.org/licenses/SNIA.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SPL-1-0", + "id": "SPL-1.0", + "name": "Sun Public License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/SPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SugarCRM-1-1-3", + "id": "SugarCRM-1.1.3", + "name": "SugarCRM Public License v1.1.3", + "osi": false, + "reference": "https://spdx.org/licenses/SugarCRM-1.1.3.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Watcom-1-0", + "id": "Watcom-1.0", + "name": "Sybase Open Watcom Public License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/Watcom-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Zend-2-0", + "id": "Zend-2.0", + "name": "Zend License v2.0", + "osi": false, + "reference": "https://spdx.org/licenses/Zend-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Zimbra-1-3", + "id": "Zimbra-1.3", + "name": "Zimbra Public License v1.3", + "osi": false, + "reference": "https://spdx.org/licenses/Zimbra-1.3.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Zimbra-1-4", + "id": "Zimbra-1.4", + "name": "Zimbra Public License v1.4", + "osi": false, + "reference": "https://spdx.org/licenses/Zimbra-1.4.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Artistic-1-0", + "id": "Artistic-1.0", + "name": "Artistic License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/Artistic-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Artistic-2-0", + "id": "Artistic-2.0", + "name": "Artistic License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/Artistic-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "ClArtistic", + "id": "ClArtistic", + "name": "Clarified Artistic License", + "osi": false, + "reference": "https://spdx.org/licenses/ClArtistic.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Frameworx-1-0", + "id": "Frameworx-1.0", + "name": "Frameworx Open License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/Frameworx-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LPPL-1-0", + "id": "LPPL-1.0", + "name": "LaTeX Project Public License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/LPPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LPPL-1-1", + "id": "LPPL-1.1", + "name": "LaTeX Project Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/LPPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LPPL-1-2", + "id": "LPPL-1.2", + "name": "LaTeX Project Public License v1.2", + "osi": false, + "reference": "https://spdx.org/licenses/LPPL-1.2.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LPPL-1-3a", + "id": "LPPL-1.3a", + "name": "LaTeX Project Public License v1.3a", + "osi": false, + "reference": "https://spdx.org/licenses/LPPL-1.3a.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LPPL-1-3c", + "id": "LPPL-1.3c", + "name": "LaTeX Project Public License v1.3c", + "osi": true, + "reference": "https://spdx.org/licenses/LPPL-1.3c.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SGI-B-1-0", + "id": "SGI-B-1.0", + "name": "SGI Free Software License B v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/SGI-B-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "SGI-B-1-1", + "id": "SGI-B-1.1", + "name": "SGI Free Software License B v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/SGI-B-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Sleepycat", + "id": "Sleepycat", + "name": "Sleepycat License", + "osi": true, + "reference": "https://spdx.org/licenses/Sleepycat.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Vim", + "id": "Vim", + "name": "Vim License", + "osi": false, + "reference": "https://spdx.org/licenses/Vim.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "APSL-1-0", + "id": "APSL-1.0", + "name": "Apple Public Source License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/APSL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "APSL-1-1", + "id": "APSL-1.1", + "name": "Apple Public Source License 1.1", + "osi": true, + "reference": "https://spdx.org/licenses/APSL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "APSL-1-2", + "id": "APSL-1.2", + "name": "Apple Public Source License 1.2", + "osi": true, + "reference": "https://spdx.org/licenses/APSL-1.2.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NPL-1-0", + "id": "NPL-1.0", + "name": "Netscape Public License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/NPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "NPL-1-1", + "id": "NPL-1.1", + "name": "Netscape Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/NPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OCLC-2-0", + "id": "OCLC-2.0", + "name": "OCLC Research Public License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/OCLC-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "QPL-1-0", + "id": "QPL-1.0", + "name": "Q Public License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/QPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "RPSL-1-0", + "id": "RPSL-1.0", + "name": "RealNetworks Public Source License v1.0", + "osi": true, + "reference": "https://spdx.org/licenses/RPSL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "APL-1-0", + "id": "APL-1.0", + "name": "Adaptive Public License 1.0", + "osi": true, + "reference": "https://spdx.org/licenses/APL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "APSL-2-0", + "id": "APSL-2.0", + "name": "Apple Public Source License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/APSL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "BitTorrent-1-0", + "id": "BitTorrent-1.0", + "name": "BitTorrent Open Source License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/BitTorrent-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "BitTorrent-1-1", + "id": "BitTorrent-1.1", + "name": "BitTorrent Open Source License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/BitTorrent-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "CECILL-C", + "id": "CECILL-C", + "name": "CeCILL-C Free Software License Agreement", + "osi": false, + "reference": "https://spdx.org/licenses/CECILL-C.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "EPL-2-0", + "id": "EPL-2.0", + "name": "Eclipse Public License 2.0", + "osi": true, + "reference": "https://spdx.org/licenses/EPL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-2-0-with-bison-exception", + "id": "GPL-2.0-with-bison-exception", + "name": "GNU General Public License v2.0 w/Bison exception", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-2-0-with-classpath-exception", + "id": "GPL-2.0-with-classpath-exception", + "name": "GNU General Public License v2.0 w/Classpath exception", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-2-0-with-font-exception", + "id": "GPL-2.0-with-font-exception", + "name": "GNU General Public License v2.0 w/Font exception", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-2.0-with-font-exception.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-2-0-with-GCC-exception", + "id": "GPL-2.0-with-GCC-exception", + "name": "GNU General Public License v2.0 w/GCC Runtime Library exception", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-3-0-with-autoconf-exception", + "id": "GPL-3.0-with-autoconf-exception", + "name": "GNU General Public License v3.0 w/Autoconf exception", + "osi": false, + "reference": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "GPL-3-0-with-GCC-exception", + "id": "GPL-3.0-with-GCC-exception", + "name": "GNU General Public License v3.0 w/GCC Runtime Library exception", + "osi": true, + "reference": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "LGPL-2-0", + "id": "LGPL-2.0", + "name": "GNU Library General Public License v2 only", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "LGPL-2-0-", + "id": "LGPL-2.0+", + "name": "GNU Library General Public License v2 or later", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.0+.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LGPL-2-0-only", + "id": "LGPL-2.0-only", + "name": "GNU Library General Public License v2 only", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.0-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LGPL-2-0-or-later", + "id": "LGPL-2.0-or-later", + "name": "GNU Library General Public License v2 or later", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.0-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "LGPL-2-1", + "id": "LGPL-2.1", + "name": "GNU Lesser General Public License v2.1 only", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "LGPL-2-1-", + "id": "LGPL-2.1+", + "name": "GNU Lesser General Public License v2.1 or later", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.1+.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LGPL-2-1-only", + "id": "LGPL-2.1-only", + "name": "GNU Lesser General Public License v2.1 only", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.1-only.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "LGPL-2-1-or-later", + "id": "LGPL-2.1-or-later", + "name": "GNU Lesser General Public License v2.1 or later", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-2.1-or-later.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "LGPL-3-0", + "id": "LGPL-3.0", + "name": "GNU Lesser General Public License v3.0 only", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-3.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "LGPL-3-0-", + "id": "LGPL-3.0+", + "name": "GNU Lesser General Public License v3.0 or later", + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-3.0+.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "LGPL-3-0-only", + "id": "LGPL-3.0-only", + "name": "GNU Lesser General Public License v3.0 only", + "notes": [ + "We only do dynamic linking" + ], + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-3.0-only.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "LGPL-3-0-or-later", + "id": "LGPL-3.0-or-later", + "name": "GNU Lesser General Public License v3.0 or later", + "notes": [ + "We only do dynamic linking" + ], + "osi": true, + "reference": "https://spdx.org/licenses/LGPL-3.0-or-later.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "Motosoto", + "id": "Motosoto", + "name": "Motosoto License", + "osi": true, + "reference": "https://spdx.org/licenses/Motosoto.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "MS-RL", + "id": "MS-RL", + "name": "Microsoft Reciprocal License", + "osi": true, + "reference": "https://spdx.org/licenses/MS-RL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "OCCT-PL", + "id": "OCCT-PL", + "name": "Open CASCADE Technology Public License", + "osi": false, + "reference": "https://spdx.org/licenses/OCCT-PL.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": true, + "family": "wxWindows", + "id": "wxWindows", + "name": "wxWindows Library License", + "osi": true, + "reference": "https://spdx.org/licenses/wxWindows.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "YPL-1-0", + "id": "YPL-1.0", + "name": "Yahoo! Public License v1.0", + "osi": false, + "reference": "https://spdx.org/licenses/YPL-1.0.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "FORBIDDEN" + ], + "deprecated": false, + "family": "YPL-1-1", + "id": "YPL-1.1", + "name": "Yahoo! Public License v1.1", + "osi": false, + "reference": "https://spdx.org/licenses/YPL-1.1.html", + "usagePolicy": "deny" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "0BSD", + "id": "0BSD", + "name": "BSD Zero Clause License", + "notes": [ + "Permissive, but not yet in MW Standard policy" + ], + "osi": true, + "reference": "https://spdx.org/licenses/0BSD.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "CC-BY-4-0", + "id": "CC-BY-4.0", + "name": "Creative Commons Attribution 4.0 International", + "notes": [ + "Permissive, but not yet in MW Standard policy" + ], + "osi": false, + "reference": "https://spdx.org/licenses/CC-BY-4.0.html", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "The-Unlicense--Unlicense-", + "id": "", + "name": "The Unlicense (Unlicense)", + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "BSD", + "id": "", + "name": "BSD", + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "CMU-License--MIT-CMU-", + "id": "", + "name": "CMU License (MIT-CMU)", + "osi": false, + "reference": "", + "usagePolicy": "allow" + }, + { + "annotationRefs": [ + "ALLOWED" + ], + "deprecated": false, + "family": "OSI-Approved-OR-BSD-3-Clause-License-or-Apache-License--Version-2-0", + "id": "", + "name": "OSI Approved OR BSD 3-Clause License or Apache License, Version 2.0", + "osi": false, + "reference": "", + "usagePolicy": "allow" + } + ] +} \ No newline at end of file diff --git a/sbom/configuration/patch_backend_application.json b/sbom/configuration/patch_backend_application.json index ec91bd038..8c8412561 100644 --- a/sbom/configuration/patch_backend_application.json +++ b/sbom/configuration/patch_backend_application.json @@ -17,7 +17,7 @@ { "op": "replace", "path": "/metadata/component/purl", - "value": "pkg:github/MaibornWolff/SecObserve@vREPLACE_VERSION#backend" + "value": "pkg:github/SecObserve/SecObserve@vREPLACE_VERSION#backend" }, { "op": "add", diff --git a/sbom/configuration/patch_complete.json b/sbom/configuration/patch_complete.json index c2c8eecce..2b23c76a3 100644 --- a/sbom/configuration/patch_complete.json +++ b/sbom/configuration/patch_complete.json @@ -7,7 +7,7 @@ { "op": "add", "path": "/metadata/component/purl", - "value": "pkg:github/MaibornWolff/SecObserve@vREPLACE_VERSION" + "value": "pkg:github/SecObserve/SecObserve@vREPLACE_VERSION" }, { "op": "add", diff --git a/sbom/configuration/patch_frontend_application.json b/sbom/configuration/patch_frontend_application.json index 7a04accca..07aab5f67 100644 --- a/sbom/configuration/patch_frontend_application.json +++ b/sbom/configuration/patch_frontend_application.json @@ -17,7 +17,7 @@ { "op": "replace", "path": "/metadata/component/purl", - "value": "pkg:github/MaibornWolff/SecObserve@vREPLACE_VERSION#frontend" + "value": "pkg:github/SecObserve/SecObserve@vREPLACE_VERSION#frontend" }, { "op": "add", diff --git a/sbom/configuration/patch_supplier.json b/sbom/configuration/patch_supplier.json index 0af5722a1..56c01128b 100644 --- a/sbom/configuration/patch_supplier.json +++ b/sbom/configuration/patch_supplier.json @@ -3,9 +3,9 @@ "op": "add", "path": "/metadata/component/supplier", "value": { - "name": "MaibornwWolff Open Source Community and contributors", + "name": "SecObserve Open Source Community and contributors", "url": [ - "https://github.com/MaibornWolff/SecObserve" + "https://github.com/SecObserve/SecObserve" ] } } diff --git a/so_configuration_code.yml b/so_configuration_code.yml index a6ec94d4b..cd5c586d9 100644 --- a/so_configuration_code.yml +++ b/so_configuration_code.yml @@ -72,5 +72,5 @@ trivy_filesystem_frontend: importer: SO_UPLOAD: "true" - SO_API_BASE_URL: "https://secobserve-backend.maibornwolff.de" + SO_API_BASE_URL: "https://secobserve.example.org/api" SO_PRODUCT_NAME: "SecObserve" diff --git a/so_configuration_endpoints.yml b/so_configuration_endpoints.yml index b3b146d5e..d7dee7da6 100644 --- a/so_configuration_endpoints.yml +++ b/so_configuration_endpoints.yml @@ -1,34 +1,14 @@ -cryptolyzer_backend_main: +cryptolyzer: SCANNER: cryptolyzer - TARGET: "secobserve-backend.maibornwolff.de" - REPORT_NAME: "cryptolyzer_backend_main.json" - SO_ORIGIN_SERVICE: "backend" - SO_BRANCH_NAME: "maks_public" + TARGET: "www.example.org" + REPORT_NAME: "cryptolyzer.json" -drheader_backend_main: +drheader: SCANNER: drheader - TARGET: "https://secobserve-backend.maibornwolff.de" - REPORT_NAME: "drheader_backend_main.json" - SO_ORIGIN_SERVICE: "backend" - SO_ORIGIN_ENDPOINT_URL: "https://secobserve-backend.maibornwolff.de" - SO_BRANCH_NAME: "maks_public" - -cryptolyzer_frontend_main: - SCANNER: cryptolyzer - TARGET: "secobserve.maibornwolff.de" - REPORT_NAME: "cryptolyzer_frontend_main.json" - SO_ORIGIN_SERVICE: "frontend" - SO_BRANCH_NAME: "maks_public" - -drheader_frontend_main: - SCANNER: drheader - TARGET: "https://secobserve.maibornwolff.de" - REPORT_NAME: "drheader_frontend_main.json" - SO_ORIGIN_SERVICE: "frontend" - SO_ORIGIN_ENDPOINT_URL: "https://secobserve.maibornwolff.de" - SO_BRANCH_NAME: "maks_public" + TARGET: "https://www.example.org" + REPORT_NAME: "drheader.json" + SO_ORIGIN_ENDPOINT_URL: "https://www.example.org" importer: SO_UPLOAD: "true" - SO_API_BASE_URL: "https://secobserve-backend.maibornwolff.de" - SO_PRODUCT_NAME: "SecObserve" + SO_PRODUCT_NAME: "Example website" diff --git a/so_configuration_sca_current.yml b/so_configuration_sca_current.yml deleted file mode 100644 index 3b812ac5c..000000000 --- a/so_configuration_sca_current.yml +++ /dev/null @@ -1,38 +0,0 @@ -trivy_image_backend_current: - SCANNER: trivy_image - TARGET: "maibornwolff/secobserve-backend:1.26.0" - FURTHER_PARAMETERS: "--pkg-types os --db-repository public.ecr.aws/aquasecurity/trivy-db:2" - REPORT_NAME: "trivy_backend_image.json" - SO_ORIGIN_SERVICE: "backend" - SO_BRANCH_NAME: "1.26.0" - -trivy_image_frontend_current: - SCANNER: trivy_image - TARGET: "maibornwolff/secobserve-frontend:1.26.0" - FURTHER_PARAMETERS: "--pkg-types os --db-repository public.ecr.aws/aquasecurity/trivy-db:2" - REPORT_NAME: "trivy_frontend_image.json" - SO_ORIGIN_SERVICE: "frontend" - SO_BRANCH_NAME: "1.26.0" - -trivy_filesystem_backend_current: - SCANNER: trivy_filesystem - RUN_DIRECTORY: "." - TARGET: "backend/poetry.lock" - FURTHER_PARAMETERS: "--db-repository public.ecr.aws/aquasecurity/trivy-db:2" - REPORT_NAME: "trivy_backend_poetry.json" - SO_ORIGIN_SERVICE: "backend" - SO_BRANCH_NAME: "1.26.0" - -trivy_filesystem_frontend_current: - SCANNER: trivy_filesystem - RUN_DIRECTORY: "." - TARGET: "frontend/package-lock.json" - FURTHER_PARAMETERS: "--db-repository public.ecr.aws/aquasecurity/trivy-db:2" - REPORT_NAME: "trivy_frontend_npm.json" - SO_ORIGIN_SERVICE: "frontend" - SO_BRANCH_NAME: "1.26.0" - -importer: - SO_UPLOAD: "true" - SO_API_BASE_URL: "https://secobserve-backend.maibornwolff.de" - SO_PRODUCT_NAME: "SecObserve" diff --git a/so_configuration_sca_dev.yml b/so_configuration_sca_dev.yml index f0233d44f..8691d6443 100644 --- a/so_configuration_sca_dev.yml +++ b/so_configuration_sca_dev.yml @@ -1,15 +1,15 @@ trivy_image_backend_current: SCANNER: trivy_image - TARGET: "maibornwolff/secobserve-backend:dev" - FURTHER_PARAMETERS: "--pkg-types os --db-repository public.ecr.aws/aquasecurity/trivy-db:2" + TARGET: "ghcr.io/secobserve/secobserve-backend:dev" + FURTHER_PARAMETERS: "--pkg-types os" REPORT_NAME: "trivy_backend_image.json" SO_ORIGIN_SERVICE: "backend" SO_BRANCH_NAME: "dev" trivy_image_frontend_current: SCANNER: trivy_image - TARGET: "maibornwolff/secobserve-frontend:dev" - FURTHER_PARAMETERS: "--pkg-types os --db-repository public.ecr.aws/aquasecurity/trivy-db:2" + TARGET: "ghcr.io/secobserve/secobserve-frontend:dev" + FURTHER_PARAMETERS: "--pkg-types os" REPORT_NAME: "trivy_frontend_image.json" SO_ORIGIN_SERVICE: "frontend" SO_BRANCH_NAME: "dev" @@ -18,7 +18,6 @@ trivy_filesystem_backend_current: SCANNER: trivy_filesystem RUN_DIRECTORY: "." TARGET: "backend/poetry.lock" - FURTHER_PARAMETERS: "--db-repository public.ecr.aws/aquasecurity/trivy-db:2" REPORT_NAME: "trivy_backend_poetry.json" SO_ORIGIN_SERVICE: "backend" SO_BRANCH_NAME: "dev" @@ -27,12 +26,11 @@ trivy_filesystem_frontend_current: SCANNER: trivy_filesystem RUN_DIRECTORY: "." TARGET: "frontend/package-lock.json" - FURTHER_PARAMETERS: "--db-repository public.ecr.aws/aquasecurity/trivy-db:2" REPORT_NAME: "trivy_frontend_npm.json" SO_ORIGIN_SERVICE: "frontend" SO_BRANCH_NAME: "dev" importer: SO_UPLOAD: "true" - SO_API_BASE_URL: "https://secobserve-backend.maibornwolff.de" + SO_API_BASE_URL: "https://secobserve.example.org/api" SO_PRODUCT_NAME: "SecObserve"