diff --git a/.binny.yaml b/.binny.yaml index ec3fbb050a8..038a145878f 100644 --- a/.binny.yaml +++ b/.binny.yaml @@ -2,7 +2,7 @@ tools: # we want to use a pinned version of binny to manage the toolchain (so binny manages itself!) - name: binny version: - want: v0.8.0 + want: v0.9.0 method: github-release with: repo: anchore/binny @@ -26,7 +26,7 @@ tools: # used for linting - name: golangci-lint version: - want: v1.64.6 + want: v2.2.1 method: github-release with: repo: golangci/golangci-lint @@ -34,7 +34,7 @@ tools: # used for showing the changelog at release - name: glow version: - want: v2.1.0 + want: v2.1.1 method: github-release with: repo: charmbracelet/glow @@ -42,23 +42,15 @@ tools: # used for signing the checksums file at release - name: cosign version: - want: v2.4.3 + want: v2.5.2 method: github-release with: repo: sigstore/cosign - # used in integration tests to verify JSON schemas - - name: yajsv - version: - want: v1.4.1 - method: github-release - with: - repo: neilpa/yajsv - # used to release all artifacts - name: goreleaser version: - want: v2.7.0 + want: v2.10.2 method: github-release with: repo: goreleaser/goreleaser @@ -90,7 +82,7 @@ tools: # used for running all local and CI tasks - name: task version: - want: v3.41.0 + want: v3.44.0 method: github-release with: repo: go-task/task @@ -98,7 +90,7 @@ tools: # used for triggering a release - name: gh version: - want: v2.68.0 + want: v2.74.2 method: github-release with: repo: cli/cli @@ -106,7 +98,7 @@ tools: # used for integration tests - name: skopeo version: - want: v1.18.0 + want: v1.19.0 method: go-install with: module: github.com/containers/skopeo diff --git a/.github/actions/bootstrap/action.yaml b/.github/actions/bootstrap/action.yaml index 150284da53d..3a6a906469e 100644 --- a/.github/actions/bootstrap/action.yaml +++ b/.github/actions/bootstrap/action.yaml @@ -4,7 +4,7 @@ inputs: go-version: description: "Go version to install" required: true - default: "1.24.x" + default: ">= 1.24" python-version: description: "Python version to install" required: true @@ -32,18 +32,18 @@ runs: using: "composite" steps: # note: go mod and build is automatically cached on default with v4+ - - uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 # v5.3.0 + - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 if: inputs.go-version != '' with: go-version: ${{ inputs.go-version }} - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: ${{ inputs.python-version }} - name: Restore tool cache id: tool-cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 if: inputs.tools == 'true' with: path: ${{ github.workspace }}/.tool diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 67d1fc17e08..068e304dc26 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -1,11 +1,7 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. name: CodeQL Security Scan on: + workflow_dispatch: push: paths: - '**' @@ -17,72 +13,11 @@ on: schedule: - cron: '0 14 * * 4' -env: - CODEQL_EXTRACTOR_GO_BUILD_TRACING: on - -permissions: - contents: read - jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - + CodeQL: + uses: anchore/workflows/.github/workflows/codeql-go.yaml@main + with: + entrypoint: "./cmd/${{ github.event.repository.name }}" permissions: - # required for all workflows security-events: write - - strategy: - fail-fast: false - matrix: - # Override automatic language detection by changing the below list - # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] - language: ['go', 'python'] - # Learn more... - # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection - - steps: - - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - - name: Utilize Go Module Cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 - with: - path: | - ~/go/pkg/mod - ~/.cache/go-build - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - - name: Set correct version of Golang to use during CodeQL run - uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 # v5.3.0 - with: - go-version: '1.21' - check-latest: true - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # If this step fails, then you should remove it and run the build manually (see below) - # - name: Autobuild - # uses: github/codeql-action/autobuild@v2 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - name: Build grype for CodeQL - run: make grype - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10 + contents: read diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index c7364a00319..49976b16659 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -5,12 +5,17 @@ on: version: description: tag the latest commit on main with the given version (prefixed with v) required: true + skip_quality_gate: + description: skip quality gate and proceed directly to releasing (for emergency releases) + type: boolean + default: false permissions: contents: read jobs: quality-gate: + if: ${{ !inputs.skip_quality_gate }} environment: release runs-on: ubuntu-24.04 steps: @@ -60,6 +65,17 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} # This check name is defined as the github action job name (in .github/workflows/testing.yaml) checkName: "Integration tests" + timeoutSeconds: 1200 # 20 minutes, it sometimes takes that long + ref: ${{ github.event.pull_request.head.sha || github.sha }} + + - name: Check integration test results + uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0 + id: quality_tests + with: + token: ${{ secrets.GITHUB_TOKEN }} + # This check name is defined as the github action job name (in .github/workflows/testing.yaml) + checkName: "Quality tests" + timeoutSeconds: 1200 # 20 minutes, it sometimes takes that long ref: ${{ github.event.pull_request.head.sha || github.sha }} - name: Check acceptance test results (linux) @@ -90,11 +106,12 @@ jobs: ref: ${{ github.event.pull_request.head.sha || github.sha }} - name: Quality gate - if: steps.static-analysis.outputs.conclusion != 'success' || steps.unit.outputs.conclusion != 'success' || steps.integration.outputs.conclusion != 'success' || steps.cli-linux.outputs.conclusion != 'success' || steps.acceptance-linux.outputs.conclusion != 'success' || steps.acceptance-mac.outputs.conclusion != 'success' + if: steps.static-analysis.outputs.conclusion != 'success' || steps.unit.outputs.conclusion != 'success' || steps.integration.outputs.conclusion != 'success' || steps.quality_tests.outputs.conclusion != 'success' || steps.cli-linux.outputs.conclusion != 'success' || steps.acceptance-linux.outputs.conclusion != 'success' || steps.acceptance-mac.outputs.conclusion != 'success' run: | echo "Static Analysis Status: ${{ steps.static-analysis.conclusion }}" echo "Unit Test Status: ${{ steps.unit.outputs.conclusion }}" echo "Integration Test Status: ${{ steps.integration.outputs.conclusion }}" + echo "Quality Test Status: ${{ steps.quality_tests.outputs.conclusion }}" echo "Acceptance Test (Linux) Status: ${{ steps.acceptance-linux.outputs.conclusion }}" echo "Acceptance Test (Mac) Status: ${{ steps.acceptance-mac.outputs.conclusion }}" echo "CLI Test (Linux) Status: ${{ steps.cli-linux.outputs.conclusion }}" @@ -106,6 +123,7 @@ jobs: # not all actions are guaranteed to be idempotent. release: needs: [quality-gate] + if: ${{ always() && (needs.quality-gate.result == 'success' || inputs.skip_quality_gate) }} runs-on: ubuntu-24.04 permissions: contents: write @@ -123,13 +141,13 @@ jobs: build-cache-key-prefix: "snapshot" - name: Login to Docker Hub - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 #v3.3.0 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 #v3.4.0 with: username: ${{ secrets.ANCHOREOSSWRITE_DH_USERNAME }} password: ${{ secrets.ANCHOREOSSWRITE_DH_PAT }} - name: Login to GitHub Container Registry - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 #v3.3.0 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 #v3.4.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -158,12 +176,12 @@ jobs: # for updating brew formula in anchore/homebrew-syft GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }} - - uses: anchore/sbom-action@f325610c9f50a54015d37c8d16cb3b0e2c8f4de0 # v0.18.0 + - uses: anchore/sbom-action@9246b90769f852b3a8921f330c59e0b3f439d6e9 # v0.20.1 continue-on-error: true with: artifact-name: sbom.spdx.json - - uses: 8398a7/action-slack@28ba43ae48961b90635b50953d216767a6bea486 # v3.16.2 + - uses: 8398a7/action-slack@1750b5085f3ec60384090fb7c52965ef822e869e # v3.18.0 continue-on-error: true with: status: ${{ job.status }} @@ -175,6 +193,7 @@ jobs: release-version-file: needs: [release] + if: ${{ needs.release.result == 'success' }} uses: ./.github/workflows/release-version-file.yaml with: version: ${{ github.event.inputs.version }} diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 10c014b21a3..02c3ad1dcc5 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -25,7 +25,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 + uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 with: results_file: results.sarif results_format: sarif @@ -38,6 +38,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v1.0.26 + uses: github/codeql-action/upload-sarif@181d5eefc20863364f96762470ba6f862bdef56b # v1.0.26 with: sarif_file: results.sarif diff --git a/.github/workflows/validations.yaml b/.github/workflows/validations.yaml index b3b540e69d8..260aa38bd4b 100644 --- a/.github/workflows/validations.yaml +++ b/.github/workflows/validations.yaml @@ -60,7 +60,7 @@ jobs: - name: Upload the provider state archive if: ${{ failure() }} - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: qg-capture-state path: qg-capture-state.tar.gz @@ -103,7 +103,7 @@ jobs: uses: ./.github/actions/bootstrap - name: Restore integration test cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: ${{ github.workspace }}/test/integration/test-fixtures/cache key: ${{ runner.os }}-integration-test-cache-${{ hashFiles('test/integration/test-fixtures/cache.fingerprint') }} @@ -134,11 +134,51 @@ jobs: # why not use actions/upload-artifact? It is very slow (3 minutes to upload ~600MB of data, vs 10 seconds with this approach). # see https://github.com/actions/upload-artifact/issues/199 for more info - name: Upload snapshot artifacts - uses: actions/cache/save@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: snapshot key: snapshot-build-${{ github.run_id }} + Upload-Snapshot-Artifacts: + name: "Upload snapshot artifacts" + needs: [Build-Snapshot-Artifacts] + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 + + - name: Download snapshot build + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 + with: + path: snapshot + key: snapshot-build-${{ github.run_id }} + + - run: npm install @actions/artifact@2.2.2 + + - uses: actions/github-script@v7 + with: + script: | + const { readdirSync } = require('fs') + const { DefaultArtifactClient } = require('@actions/artifact') + const artifact = new DefaultArtifactClient() + const ls = d => readdirSync(d, { withFileTypes: true }) + const baseDir = "./snapshot" + const dirs = ls(baseDir).filter(f => f.isDirectory()).map(f => f.name) + const uploads = [] + for (const dir of dirs) { + // uploadArtifact returns Promise<{id, size}> + uploads.push(artifact.uploadArtifact( + // name of the archive: + `${dir}`, + // array of all files to include: + ls(`${baseDir}/${dir}`).map(f => `${baseDir}/${dir}/${f.name}`), + // base directory to trim from entries: + `${baseDir}/${dir}`, + { retentionDays: 30 } + )) + } + // wait for all uploads to finish + Promise.all(uploads) + Acceptance-Linux: # Note: changing this job name requires making the same update in the .github/workflows/release.yaml pipeline name: "Acceptance tests (Linux)" @@ -148,14 +188,14 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 - name: Download snapshot build - uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: snapshot key: snapshot-build-${{ github.run_id }} - name: Restore install.sh test image cache id: install-test-image-cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: ${{ github.workspace }}/test/install/cache key: ${{ runner.os }}-install-test-image-cache-${{ hashFiles('test/install/cache.fingerprint') }} @@ -178,19 +218,19 @@ jobs: runs-on: macos-latest steps: - name: Install Cosign - uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a #v3.8.1 + uses: sigstore/cosign-installer@398d4b0eeef1380460a10c8013a76f728fb906ac #v3.9.1 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 - name: Download snapshot build - uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: snapshot key: snapshot-build-${{ github.run_id }} - name: Restore docker image cache for compare testing id: mac-compare-testing-cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: image.tar key: ${{ runner.os }}-${{ hashFiles('test/compare/mac.sh') }} @@ -211,16 +251,35 @@ jobs: uses: ./.github/actions/bootstrap - name: Restore CLI test-fixture cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: ${{ github.workspace }}/test/cli/test-fixtures/cache key: ${{ runner.os }}-cli-test-cache-${{ hashFiles('test/cli/test-fixtures/cache.fingerprint') }} - name: Download snapshot build - uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf #v4.2.2 + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 #v4.2.3 with: path: snapshot key: snapshot-build-${{ github.run_id }} - name: Run CLI Tests (Linux) run: make cli + + Cleanup-Cache: + name: "Cleanup snapshot cache" + if: github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-24.04 + permissions: + actions: write + needs: + - Acceptance-Linux + - Acceptance-Mac + - Cli-Linux + - Upload-Snapshot-Artifacts + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 + + - name: Delete snapshot cache + run: gh cache delete "snapshot-build-${{ github.run_id }}" || echo "Cache deletion failed or cache not found - continuing" + env: + GH_TOKEN: ${{ github.token }} diff --git a/.golangci.yaml b/.golangci.yaml index 6521a59d3ac..68be2075aa3 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -1,57 +1,46 @@ -issues: - max-same-issues: 25 - uniq-by-line: false - - # TODO: enable this when we have coverage on docstring comments -# # The list of ids of default excludes to include or disable. -# include: -# - EXC0002 # disable excluding of issues about comments from golint - +version: "2" linters: - # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint - disable-all: true + # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint + default: none enable: - asciicheck - bodyclose + - copyloopvar - dogsled - dupl - errcheck - - copyloopvar - funlen - gocognit - goconst - gocritic - gocyclo - - gofmt - - goimports - goprintffuncname - gosec - - gosimple - govet - ineffassign - misspell - nakedret - revive - staticcheck - - stylecheck - - typecheck - unconvert - unparam - unused - whitespace - -linters-settings: - funlen: - # Checks the number of lines in a function. - # If lower than 0, disable the check. - # Default: 60 - lines: 70 - # Checks the number of statements in a function. - # If lower than 0, disable the check. - # Default: 40 - statements: 50 -run: - timeout: 10m + settings: + funlen: + lines: 70 + statements: 50 + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ # do not enable... # - deadcode # The owner seems to have abandoned the linter. Replaced by "unused". @@ -79,3 +68,23 @@ run: # - testpackage # - varcheck # The owner seems to have abandoned the linter. Replaced by "unused". # - wsl # this doens't have an auto-fixer yet and is pretty noisy (https://github.com/bombsimon/wsl/issues/90) + +issues: + max-same-issues: 25 + uniq-by-line: false + +# TODO: enable this when we have coverage on docstring comments +# # The list of ids of default excludes to include or disable. +# include: +# - EXC0002 # disable excluding of issues about comments from golint + +formatters: + enable: + - gofmt + - goimports + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/.goreleaser.yaml b/.goreleaser.yaml index dd500a6ad2d..9ac11a2c7ce 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -59,16 +59,16 @@ builds: archives: - id: linux-archives - builds: + ids: - linux-build - id: darwin-archives - builds: + ids: - darwin-build - id: windows-archives - format: zip - builds: + formats: [zip] + ids: - windows-build nfpms: @@ -80,7 +80,7 @@ nfpms: - rpm - deb -brews: +homebrew_casks: - repository: owner: anchore name: homebrew-grype @@ -93,13 +93,12 @@ brews: license: "Apache License 2.0" dockers: + # production images... - image_templates: - - anchore/grype:debug - - anchore/grype:{{.Tag}}-debug - - ghcr.io/anchore/grype:debug - - ghcr.io/anchore/grype:{{.Tag}}-debug + - anchore/grype:{{.Tag}}-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-amd64 goarch: amd64 - dockerfile: Dockerfile.debug + dockerfile: Dockerfile use: buildx build_flag_templates: - "--platform=linux/amd64" @@ -109,12 +108,10 @@ dockers: - "--build-arg=VCS_URL={{.GitURL}}" - image_templates: - - anchore/grype:debug-arm64v8 - - anchore/grype:{{.Tag}}-debug-arm64v8 - - ghcr.io/anchore/grype:debug-arm64v8 - - ghcr.io/anchore/grype:{{.Tag}}-debug-arm64v8 + - anchore/grype:{{.Tag}}-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-arm64v8 goarch: arm64 - dockerfile: Dockerfile.debug + dockerfile: Dockerfile use: buildx build_flag_templates: - "--platform=linux/arm64/v8" @@ -124,12 +121,10 @@ dockers: - "--build-arg=VCS_URL={{.GitURL}}" - image_templates: - - anchore/grype:debug-ppc64le - - anchore/grype:{{.Tag}}-debug-ppc64le - - ghcr.io/anchore/grype:debug-ppc64le - - ghcr.io/anchore/grype:{{.Tag}}-debug-ppc64le + - anchore/grype:{{.Tag}}-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-ppc64le goarch: ppc64le - dockerfile: Dockerfile.debug + dockerfile: Dockerfile use: buildx build_flag_templates: - "--platform=linux/ppc64le" @@ -139,12 +134,10 @@ dockers: - "--build-arg=VCS_URL={{.GitURL}}" - image_templates: - - anchore/grype:debug-s390x - - anchore/grype:{{.Tag}}-debug-s390x - - ghcr.io/anchore/grype:debug-s390x - - ghcr.io/anchore/grype:{{.Tag}}-debug-s390x + - anchore/grype:{{.Tag}}-s390x + - ghcr.io/anchore/grype:{{.Tag}}-s390x goarch: s390x - dockerfile: Dockerfile.debug + dockerfile: Dockerfile use: buildx build_flag_templates: - "--platform=linux/s390x" @@ -153,13 +146,12 @@ dockers: - "--build-arg=VCS_REF={{.FullCommit}}" - "--build-arg=VCS_URL={{.GitURL}}" + # nonroot images... - image_templates: - - anchore/grype:latest - - anchore/grype:{{.Tag}} - - ghcr.io/anchore/grype:latest - - ghcr.io/anchore/grype:{{.Tag}} + - anchore/grype:{{.Tag}}-nonroot-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-amd64 goarch: amd64 - dockerfile: Dockerfile + dockerfile: Dockerfile.nonroot use: buildx build_flag_templates: - "--platform=linux/amd64" @@ -169,10 +161,10 @@ dockers: - "--build-arg=VCS_URL={{.GitURL}}" - image_templates: - - anchore/grype:{{.Tag}}-arm64v8 - - ghcr.io/anchore/grype:{{.Tag}}-arm64v8 + - anchore/grype:{{.Tag}}-nonroot-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-arm64v8 goarch: arm64 - dockerfile: Dockerfile + dockerfile: Dockerfile.nonroot use: buildx build_flag_templates: - "--platform=linux/arm64/v8" @@ -182,10 +174,10 @@ dockers: - "--build-arg=VCS_URL={{.GitURL}}" - image_templates: - - anchore/grype:{{.Tag}}-ppc64le - - ghcr.io/anchore/grype:{{.Tag}}-ppc64le + - anchore/grype:{{.Tag}}-nonroot-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-ppc64le goarch: ppc64le - dockerfile: Dockerfile + dockerfile: Dockerfile.nonroot use: buildx build_flag_templates: - "--platform=linux/ppc64le" @@ -195,10 +187,63 @@ dockers: - "--build-arg=VCS_URL={{.GitURL}}" - image_templates: - - anchore/grype:{{.Tag}}-s390x - - ghcr.io/anchore/grype:{{.Tag}}-s390x + - anchore/grype:{{.Tag}}-nonroot-s390x + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-s390x goarch: s390x - dockerfile: Dockerfile + dockerfile: Dockerfile.nonroot + use: buildx + build_flag_templates: + - "--platform=linux/s390x" + - "--build-arg=BUILD_DATE={{.Date}}" + - "--build-arg=BUILD_VERSION={{.Version}}" + - "--build-arg=VCS_REF={{.FullCommit}}" + - "--build-arg=VCS_URL={{.GitURL}}" + + # debug images... + - image_templates: + - anchore/grype:{{.Tag}}-debug-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-debug-amd64 + goarch: amd64 + dockerfile: Dockerfile.debug + use: buildx + build_flag_templates: + - "--platform=linux/amd64" + - "--build-arg=BUILD_DATE={{.Date}}" + - "--build-arg=BUILD_VERSION={{.Version}}" + - "--build-arg=VCS_REF={{.FullCommit}}" + - "--build-arg=VCS_URL={{.GitURL}}" + + - image_templates: + - anchore/grype:{{.Tag}}-debug-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-debug-arm64v8 + goarch: arm64 + dockerfile: Dockerfile.debug + use: buildx + build_flag_templates: + - "--platform=linux/arm64/v8" + - "--build-arg=BUILD_DATE={{.Date}}" + - "--build-arg=BUILD_VERSION={{.Version}}" + - "--build-arg=VCS_REF={{.FullCommit}}" + - "--build-arg=VCS_URL={{.GitURL}}" + + - image_templates: + - anchore/grype:{{.Tag}}-debug-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-debug-ppc64le + goarch: ppc64le + dockerfile: Dockerfile.debug + use: buildx + build_flag_templates: + - "--platform=linux/ppc64le" + - "--build-arg=BUILD_DATE={{.Date}}" + - "--build-arg=BUILD_VERSION={{.Version}}" + - "--build-arg=VCS_REF={{.FullCommit}}" + - "--build-arg=VCS_URL={{.GitURL}}" + + - image_templates: + - anchore/grype:{{.Tag}}-debug-s390x + - ghcr.io/anchore/grype:{{.Tag}}-debug-s390x + goarch: s390x + dockerfile: Dockerfile.debug use: buildx build_flag_templates: - "--platform=linux/s390x" @@ -210,45 +255,89 @@ dockers: docker_manifests: - name_template: anchore/grype:latest image_templates: - - anchore/grype:{{.Tag}} + - anchore/grype:{{.Tag}}-amd64 - anchore/grype:{{.Tag}}-arm64v8 - anchore/grype:{{.Tag}}-ppc64le - anchore/grype:{{.Tag}}-s390x - - name_template: anchore/grype:debug - - anchore/grype:{{.Tag}}-debug - - anchore/grype:{{.Tag}}-debug-arm64v8 - - anchore/grype:{{.Tag}}-debug-ppc64le - - anchore/grype:{{.Tag}}-debug-s390x + - name_template: ghcr.io/anchore/grype:latest + image_templates: + - ghcr.io/anchore/grype:{{.Tag}}-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-s390x - name_template: anchore/grype:{{.Tag}} image_templates: - - anchore/grype:{{.Tag}} + - anchore/grype:{{.Tag}}-amd64 - anchore/grype:{{.Tag}}-arm64v8 - anchore/grype:{{.Tag}}-ppc64le - anchore/grype:{{.Tag}}-s390x - - name_template: ghcr.io/anchore/grype:latest + - name_template: ghcr.io/anchore/grype:{{.Tag}} image_templates: - - ghcr.io/anchore/grype:{{.Tag}} + - ghcr.io/anchore/grype:{{.Tag}}-amd64 - ghcr.io/anchore/grype:{{.Tag}}-arm64v8 - ghcr.io/anchore/grype:{{.Tag}}-ppc64le - ghcr.io/anchore/grype:{{.Tag}}-s390x + # nonroot images... + - name_template: anchore/grype:nonroot + image_templates: + - anchore/grype:{{.Tag}}-nonroot-amd64 + - anchore/grype:{{.Tag}}-nonroot-arm64v8 + - anchore/grype:{{.Tag}}-nonroot-ppc64le + - anchore/grype:{{.Tag}}-nonroot-s390x + + - name_template: ghcr.io/anchore/grype:nonroot + image_templates: + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-s390x + + - name_template: anchore/grype:{{.Tag}}-nonroot + image_templates: + - anchore/grype:{{.Tag}}-nonroot-amd64 + - anchore/grype:{{.Tag}}-nonroot-arm64v8 + - anchore/grype:{{.Tag}}-nonroot-ppc64le + - anchore/grype:{{.Tag}}-nonroot-s390x + + - name_template: ghcr.io/anchore/grype:{{.Tag}}-nonroot + image_templates: + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-nonroot-s390x + + # debug images... + - name_template: anchore/grype:debug + image_templates: + - anchore/grype:{{.Tag}}-debug-amd64 + - anchore/grype:{{.Tag}}-debug-arm64v8 + - anchore/grype:{{.Tag}}-debug-ppc64le + - anchore/grype:{{.Tag}}-debug-s390x + - name_template: ghcr.io/anchore/grype:debug image_templates: - - ghcr.io/anchore/grype:{{.Tag}}-debug + - ghcr.io/anchore/grype:{{.Tag}}-debug-amd64 - ghcr.io/anchore/grype:{{.Tag}}-debug-arm64v8 - ghcr.io/anchore/grype:{{.Tag}}-debug-ppc64le - ghcr.io/anchore/grype:{{.Tag}}-debug-s390x - - name_template: ghcr.io/anchore/grype:{{.Tag}} + - name_template: anchore/grype:{{.Tag}}-debug image_templates: - - ghcr.io/anchore/grype:{{.Tag}} - - ghcr.io/anchore/grype:{{.Tag}}-arm64v8 - - ghcr.io/anchore/grype:{{.Tag}}-ppc64le - - ghcr.io/anchore/grype:{{.Tag}}-s390x + - anchore/grype:{{.Tag}}-debug-amd64 + - anchore/grype:{{.Tag}}-debug-arm64v8 + - anchore/grype:{{.Tag}}-debug-ppc64le + - anchore/grype:{{.Tag}}-debug-s390x + - name_template: ghcr.io/anchore/grype:{{.Tag}}-debug + image_templates: + - ghcr.io/anchore/grype:{{.Tag}}-debug-amd64 + - ghcr.io/anchore/grype:{{.Tag}}-debug-arm64v8 + - ghcr.io/anchore/grype:{{.Tag}}-debug-ppc64le + - ghcr.io/anchore/grype:{{.Tag}}-debug-s390x signs: - cmd: .tool/cosign diff --git a/Dockerfile b/Dockerfile index 5eaab88c663..a02f87b98e5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,4 @@ -FROM gcr.io/distroless/static-debian11@sha256:5759d194607e472ff80fff5833442d3991dd89b219c96552837a2c8f74058617 AS build - +FROM gcr.io/distroless/static-debian12:latest AS build FROM scratch # needed for version check HTTPS request diff --git a/Dockerfile.debug b/Dockerfile.debug index db7a781d275..5239859e036 100644 --- a/Dockerfile.debug +++ b/Dockerfile.debug @@ -1,5 +1,4 @@ -FROM gcr.io/distroless/static-debian11:debug@sha256:c66a6ecb5aa7704a68c89d3ead1398adc7f16e214dda5f5f8e5d44351bcbf67d - +FROM gcr.io/distroless/static-debian12:debug-nonroot # create the /tmp dir, which is needed for image content cache WORKDIR /tmp diff --git a/Dockerfile.nonroot b/Dockerfile.nonroot new file mode 100644 index 00000000000..006a8ba188f --- /dev/null +++ b/Dockerfile.nonroot @@ -0,0 +1,25 @@ +FROM gcr.io/distroless/static-debian12:nonroot + +# create the /tmp dir, which is needed for image content cache +WORKDIR /tmp + +COPY grype / + +ARG BUILD_DATE +ARG BUILD_VERSION +ARG VCS_REF +ARG VCS_URL + +LABEL org.opencontainers.image.created=$BUILD_DATE +LABEL org.opencontainers.image.title="grype" +LABEL org.opencontainers.image.description="A vulnerability scanner for container images and filesystems" +LABEL org.opencontainers.image.source=$VCS_URL +LABEL org.opencontainers.image.revision=$VCS_REF +LABEL org.opencontainers.image.vendor="Anchore, Inc." +LABEL org.opencontainers.image.version=$BUILD_VERSION +LABEL org.opencontainers.image.licenses="Apache-2.0" +LABEL io.artifacthub.package.readme-url="https://raw.githubusercontent.com/anchore/grype/main/README.md" +LABEL io.artifacthub.package.logo-url="https://user-images.githubusercontent.com/5199289/136855393-d0a9eef9-ccf1-4e2b-9d7c-7aad16a567e5.png" +LABEL io.artifacthub.package.license="Apache-2.0" + +ENTRYPOINT ["/grype"] diff --git a/README.md b/README.md index e1073c764d1..8d59cb6b8af 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ A vulnerability scanner for container images and filesystems. Easily [install th - Agenda: https://docs.google.com/document/d/1ZtSAa6fj2a6KRWviTn3WoJm09edvrNUp4Iz_dOjjyY8/edit?usp=sharing (join [this group](https://groups.google.com/g/anchore-oss-community) for write access) - All are welcome! -For commercial support options with Syft or Grype, please [contact Anchore](https://get.anchore.com/contact/) +For commercial support options with Syft or Grype, please [contact Anchore](https://get.anchore.com/contact/). ![grype-demo](https://user-images.githubusercontent.com/590471/90276236-9868f300-de31-11ea-8068-4268b6b68529.gif) @@ -34,11 +34,13 @@ For commercial support options with Syft or Grype, please [contact Anchore](http - Find vulnerabilities for major operating system packages: - Alpine - Amazon Linux + - Azure Linux (previously CBL-Mariner) - BusyBox - CentOS - - CBL-Mariner - Debian + - Echo - Distroless + - MinimOS - Oracle Linux - Red Hat (RHEL) - Ubuntu @@ -153,7 +155,7 @@ docker run --rm \ $(ImageName):$(ImageTag) ``` -### Supported sources +## Supported sources Grype can scan a variety of sources beyond those found in Docker. @@ -210,11 +212,54 @@ use the `--distro :` flag. A full example is: grype --add-cpes-if-none --distro alpine:3.10 sbom:some-alpine-3.10.spdx.json ``` +## Threat & Risk Prioritization + +This section explains the columns and UI cues that help prioritize remediation efforts: + +- **Severity**: String severity based on CVSS scores and indicate the significance of a vulnerability in levels. + This balances concerns such as ease of exploitability, and the potential to affect + confidentiality, integrity, and availability of software and services. + +- **EPSS**: + [Exploit Prediction Scoring System](https://www.first.org/epss/model) is a metric expressing the likelihood + that a vulnerability will be + exploited in the wild over the next 30 days (on a 0–1 scale); higher values signal a greater likelihood of + exploitation. + The table output shows the EPSS percentile, a one-way transform of the EPSS score showing the + proportion of all scored vulnerabilities with an equal or lower probability. + Percentiles linearize a heavily skewed distribution, making threshold choice (e.g. “only CVEs above the + 90th percentile”) straightforward. + +- **KEV Indicator**: Flags entries from CISA’s [Known Exploited Vulnerabilities Catalog](https://www.cisa.gov/known-exploited-vulnerabilities-catalog) + --an authoritative list of flaws observed being exploited in the wild. + +- **Risk Score**: A composite 0–100 metric calculated as: + ```markdown + risk = min(1, threat * average(severity)) * 100 + ``` + Where: + - `severity` is the average of all CVSS scores and string severity for a vulnerability (scaled between 0–1). + - `threat` is the EPSS score (between 0–1). If the vulnerability is on the KEV list then `threat` is + `1.05`, or `1.1` if the vulnerability is associated with a ransomware campaign. + This metric is one way to combine EPSS and CVSS suggested in the [EPSS user guide](https://www.first.org/epss/user-guide). + +- **Suggested Fixes**: All possible fixes for a package are listed, however, when multiple fixes are available, we de-emphasize all + upgrade paths except for the minimal upgrade path (which highlights the smallest, safest version bump). + +Results default to sorting by Risk Score and can be overridden with `--sort-by `: + +- `severity`: sort by severity +- `epss`: sort by EPSS percentile (aka, "threat") +- `risk`: sort by risk score +- `kev`: just like risk, except that KEV entries are always above non-KEV entries +- `package`: sort by package name, version, type +- `vulnerability`: sort by vulnerability ID + ### Supported versions Software updates are always applied to the latest version of Grype; fixes are not backported to any previous versions of Grype. -In terms of database updates, any version of Grype before v0.51.0 (Oct 2022, before schema v5) will not receive +In terms of database updates, any version of Grype before v0.51.0 (Oct 2022, before schema v5) will not receive vulnerability database updates. You can still build vulnerability databases for unsupported Grype releases by using previous releases of [vunnel](https://github.com/anchore/vunnel) to gather the upstream data and [grype-db](https://github.com/anchore/grype-db) to build databases for unsupported schemas. @@ -353,6 +398,8 @@ For example, here's how you could trigger a CI pipeline failure if any vulnerabi grype ubuntu:latest --fail-on medium ``` +**Note:** Grype returns exit code `2` on vulnerability errors. + ### Specifying matches to ignore If you're seeing Grype report **false positives** or any other vulnerability matches that you just don't want to see, you can tell Grype to **ignore** matches by specifying one or more _"ignore rules"_ in your Grype configuration file (e.g. `~/.grype.yaml`). This causes Grype not to report any vulnerability matches that meet the criteria specified by any of your ignore rules. @@ -506,7 +553,9 @@ When Grype performs a scan for vulnerabilities, it does so using a vulnerability - Amazon Linux ALAS: https://alas.aws.amazon.com/AL2/alas.rss - Chainguard SecDB: https://packages.cgr.dev/chainguard/security.json - Debian Linux CVE Tracker: https://security-tracker.debian.org/tracker/data/json +- Echo Security Advisories: https://advisory.echohq.com/data.json - GitHub Security Advisories (GHSAs): https://github.com/advisories +- MinimOS SecDB: https://packages.mini.dev/advisories/secdb/security.json - National Vulnerability Database (NVD): https://nvd.nist.gov/vuln/data-feeds - Oracle Linux OVAL: https://linux.oracle.com/security/oval/ - RedHat Linux Security Data: https://access.redhat.com/hydra/rest/securitydata/ @@ -697,192 +746,240 @@ GRYPE_CONFIG=/path/to/config.yaml grype Configuration options (example values are the default): ```yaml -# enable/disable checking for application updates on startup -# same as GRYPE_CHECK_FOR_APP_UPDATE env var +# the output format of the vulnerability report (options: table, template, json, cyclonedx) +# when using template as the output type, you must also provide a value for 'output-template-file' (env: GRYPE_OUTPUT) +output: 'table' + +# if using template output, you must provide a path to a Go template file +# see https://github.com/anchore/grype#using-templates for more information on template output +# the default path to the template file is the current working directory +# output-template-file: .grype/html.tmpl +# +# write output report to a file (default is to write to stdout) (env: GRYPE_FILE) +file: '' + +# pretty-print JSON output (env: GRYPE_PRETTY) +pretty: false + +# distro to match against in the format: : (env: GRYPE_DISTRO) +distro: '' + +# generate CPEs for packages with no CPE data (env: GRYPE_ADD_CPES_IF_NONE) +add-cpes-if-none: false + +# specify the path to a Go template file (requires 'template' output to be selected) (env: GRYPE_OUTPUT_TEMPLATE_FILE) +output-template-file: '' + +# enable/disable checking for application updates on startup (env: GRYPE_CHECK_FOR_APP_UPDATE) check-for-app-update: true -# allows users to specify which image source should be used to generate the sbom -# valid values are: registry, docker, podman -# same as GRYPE_DEFAULT_IMAGE_PULL_SOURCE env var -default-image-pull-source: "" +# ignore matches for vulnerabilities that are not fixed (env: GRYPE_ONLY_FIXED) +only-fixed: false -# same as --name; set the name of the target being analyzed -name: "" +# ignore matches for vulnerabilities that are fixed (env: GRYPE_ONLY_NOTFIXED) +only-notfixed: false + +# ignore matches for vulnerabilities with specified comma separated fix states, options=[fixed not-fixed unknown wont-fix] (env: GRYPE_IGNORE_WONTFIX) +ignore-wontfix: '' + +# an optional platform specifier for container image sources (e.g. 'linux/arm64', 'linux/arm64/v8', 'arm64', 'linux') (env: GRYPE_PLATFORM) +platform: '' # upon scanning, if a severity is found at or above the given severity then the return code will be 1 -# default is unset which will skip this validation (options: negligible, low, medium, high, critical) -# same as --fail-on ; GRYPE_FAIL_ON_SEVERITY env var -fail-on-severity: "" +# default is unset which will skip this validation (options: negligible, low, medium, high, critical) (env: GRYPE_FAIL_ON_SEVERITY) +fail-on-severity: '' -# the output format of the vulnerability report (options: table, template, json, cyclonedx) -# when using template as the output type, you must also provide a value for 'output-template-file' -# same as -o ; GRYPE_OUTPUT env var -output: "table" +# show suppressed/ignored vulnerabilities in the output (only supported with table output format) (env: GRYPE_SHOW_SUPPRESSED) +show-suppressed: false -# if using template output, you must provide a path to a Go template file -# see https://github.com/anchore/grype#using-templates for more information on template output -# the default path to the template file is the current working directory -# output-template-file: .grype/html.tmpl +# orient results by CVE instead of the original vulnerability ID when possible (env: GRYPE_BY_CVE) +by-cve: false -# write output report to a file (default is to write to stdout) -# same as --file; GRYPE_FILE env var -file: "" +# sort the match results with the given strategy, options=[package severity epss risk kev vulnerability] (env: GRYPE_SORT_BY) +sort-by: 'risk' -# a list of globs to exclude from scanning, for example: -# exclude: -# - '/etc/**' -# - './out/**/*.json' -# same as --exclude ; GRYPE_EXCLUDE env var -exclude: [] +# same as --name; set the name of the target being analyzed (env: GRYPE_NAME) +name: '' -# include matches on kernel-headers packages that are matched against upstream kernel package -# if 'false' any such matches are marked as ignored -match-upstream-kernel-headers: false +# allows users to specify which image source should be used to generate the sbom +# valid values are: registry, docker, podman (env: GRYPE_DEFAULT_IMAGE_PULL_SOURCE) +default-image-pull-source: '' -# os and/or architecture to use when referencing container images (e.g. "windows/armv6" or "arm64") -# same as --platform; GRYPE_PLATFORM env var -platform: "" +search: + # selection of layers to analyze, options=[squashed all-layers] (env: GRYPE_SEARCH_SCOPE) + scope: 'squashed' -# If using SBOM input, automatically generate CPEs when packages have none -add-cpes-if-none: false + # search within archives that do not contain a file index to search against (tar, tar.gz, tar.bz2, etc) + # note: enabling this may result in a performance impact since all discovered compressed tars will be decompressed + # note: for now this only applies to the java package cataloger (env: GRYPE_SEARCH_UNINDEXED_ARCHIVES) + unindexed-archives: false + + # search within archives that do contain a file index to search against (zip) + # note: for now this only applies to the java package cataloger (env: GRYPE_SEARCH_INDEXED_ARCHIVES) + indexed-archives: true + +# A list of vulnerability ignore rules, one or more property may be specified and all matching vulnerabilities will be ignored. +# This is the full set of supported rule fields: +# - vulnerability: CVE-2008-4318 +# fix-state: unknown +# package: +# name: libcurl +# version: 1.5.1 +# type: npm +# location: "/usr/local/lib/node_modules/**" +# +# VEX fields apply when Grype reads vex data: +# - vex-status: not_affected +# vex-justification: vulnerable_code_not_present +ignore: [] -# Explicitly specify a linux distribution to use as : like alpine:3.10 -distro: +# a list of globs to exclude from scanning, for example: +# - '/etc/**' +# - './out/**/*.json' +# same as --exclude (env: GRYPE_EXCLUDE) +exclude: [] external-sources: + # enable Grype searching network source for additional information (env: GRYPE_EXTERNAL_SOURCES_ENABLE) enable: false + maven: - search-upstream-by-sha1: true - base-url: https://search.maven.org/solrsearch/select + # search for Maven artifacts by SHA1 (env: GRYPE_EXTERNAL_SOURCES_MAVEN_SEARCH_MAVEN_UPSTREAM) + search-maven-upstream: true + + # base URL of the Maven repository to search (env: GRYPE_EXTERNAL_SOURCES_MAVEN_BASE_URL) + base-url: 'https://search.maven.org/solrsearch/select' + + # (env: GRYPE_EXTERNAL_SOURCES_MAVEN_RATE_LIMIT) rate-limit: 300ms -db: - # check for database updates on execution - # same as GRYPE_DB_AUTO_UPDATE env var - auto-update: true +match: + java: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_JAVA_USING_CPES) + using-cpes: false + + jvm: + # (env: GRYPE_MATCH_JVM_USING_CPES) + using-cpes: true - # location to write the vulnerability database cache; defaults to $XDG_CACHE_HOME/grype/db - # same as GRYPE_DB_CACHE_DIR env var - cache-dir: "" + dotnet: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_DOTNET_USING_CPES) + using-cpes: false - # URL of the vulnerability database - # same as GRYPE_DB_UPDATE_URL env var - update-url: "https://grype.anchore.io/databases" + golang: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_GOLANG_USING_CPES) + using-cpes: false - # it ensures db build is no older than the max-allowed-built-age - # set to false to disable check - validate-age: true + # use CPE matching to find vulnerabilities for the Go standard library (env: GRYPE_MATCH_GOLANG_ALWAYS_USE_CPE_FOR_STDLIB) + always-use-cpe-for-stdlib: true - # Max allowed age for vulnerability database, - # age being the time since it was built - # Default max age is 120h (or five days) - max-allowed-built-age: "120h" + # allow comparison between main module pseudo-versions (e.g. v0.0.0-20240413-2b432cf643...) (env: GRYPE_MATCH_GOLANG_ALLOW_MAIN_MODULE_PSEUDO_VERSION_COMPARISON) + allow-main-module-pseudo-version-comparison: false - # Timeout for downloading GRYPE_DB_UPDATE_URL to see if the database needs to be downloaded - # This file is ~156KiB as of 2024-04-17 so the download should be quick; adjust as needed - update-available-timeout: "30s" + javascript: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_JAVASCRIPT_USING_CPES) + using-cpes: false - # Timeout for downloading actual vulnerability DB - # The DB is ~156MB as of 2024-04-17 so slower connections may exceed the default timeout; adjust as needed - update-download-timeout: "120s" + python: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_PYTHON_USING_CPES) + using-cpes: false -search: - # the search space to look for packages (options: all-layers, squashed) - # same as -s ; GRYPE_SEARCH_SCOPE env var - scope: "squashed" + ruby: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_RUBY_USING_CPES) + using-cpes: false - # search within archives that do contain a file index to search against (zip) - # note: for now this only applies to the java package cataloger - # same as GRYPE_PACKAGE_SEARCH_INDEXED_ARCHIVES env var - indexed-archives: true + rust: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_RUST_USING_CPES) + using-cpes: false + + stock: + # use CPE matching to find vulnerabilities (env: GRYPE_MATCH_STOCK_USING_CPES) + using-cpes: true - # search within archives that do not contain a file index to search against (tar, tar.gz, tar.bz2, etc) - # note: enabling this may result in a performance impact since all discovered compressed tars will be decompressed - # note: for now this only applies to the java package cataloger - # same as GRYPE_PACKAGE_SEARCH_UNINDEXED_ARCHIVES env var - unindexed-archives: false -# options when pulling directly from a registry via the "registry:" scheme registry: - # skip TLS verification when communicating with the registry - # same as GRYPE_REGISTRY_INSECURE_SKIP_TLS_VERIFY env var + # skip TLS verification when communicating with the registry (env: GRYPE_REGISTRY_INSECURE_SKIP_TLS_VERIFY) insecure-skip-tls-verify: false - # use http instead of https when connecting to the registry - # same as GRYPE_REGISTRY_INSECURE_USE_HTTP env var + # use http instead of https when connecting to the registry (env: GRYPE_REGISTRY_INSECURE_USE_HTTP) insecure-use-http: false - # filepath to a CA certificate (or directory containing *.crt, *.cert, *.pem) used to generate the client certificate - # GRYPE_REGISTRY_CA_CERT env var - ca-cert: "" + # Authentication credentials for specific registries. Each entry describes authentication for a specific authority: + # - authority: the registry authority URL the URL to the registry (e.g. "docker.io", "localhost:5000", etc.) (env: SYFT_REGISTRY_AUTH_AUTHORITY) + # username: a username if using basic credentials (env: SYFT_REGISTRY_AUTH_USERNAME) + # password: a corresponding password (env: SYFT_REGISTRY_AUTH_PASSWORD) + # token: a token if using token-based authentication, mutually exclusive with username/password (env: SYFT_REGISTRY_AUTH_TOKEN) + # tls-cert: filepath to the client certificate used for TLS authentication to the registry (env: SYFT_REGISTRY_AUTH_TLS_CERT) + # tls-key: filepath to the client key used for TLS authentication to the registry (env: SYFT_REGISTRY_AUTH_TLS_KEY) + auth: [] + + # filepath to a CA certificate (or directory containing *.crt, *.cert, *.pem) used to generate the client certificate (env: GRYPE_REGISTRY_CA_CERT) + ca-cert: '' + +# a list of VEX documents to consider when producing scanning results (env: GRYPE_VEX_DOCUMENTS) +vex-documents: [] - # credentials for specific registries - auth: - # the URL to the registry (e.g. "docker.io", "localhost:5000", etc.) - # GRYPE_REGISTRY_AUTH_AUTHORITY env var - - authority: "" +# VEX statuses to consider as ignored rules (env: GRYPE_VEX_ADD) +vex-add: [] - # GRYPE_REGISTRY_AUTH_USERNAME env var - username: "" +# match kernel-header packages with upstream kernel as kernel vulnerabilities (env: GRYPE_MATCH_UPSTREAM_KERNEL_HEADERS) +match-upstream-kernel-headers: false + +db: + # location to write the vulnerability database cache (env: GRYPE_DB_CACHE_DIR) + cache-dir: '~/Library/Caches/grype/db' + + # URL of the vulnerability database (env: GRYPE_DB_UPDATE_URL) + update-url: 'https://grype.anchore.io/databases' - # GRYPE_REGISTRY_AUTH_PASSWORD env var - password: "" + # certificate to trust download the database and listing file (env: GRYPE_DB_CA_CERT) + ca-cert: '' - # note: token and username/password are mutually exclusive - # GRYPE_REGISTRY_AUTH_TOKEN env var - token: "" + # check for database updates on execution (env: GRYPE_DB_AUTO_UPDATE) + auto-update: true - # filepath to the client certificate used for TLS authentication to the registry - # GRYPE_REGISTRY_AUTH_TLS_CERT env var - tls-cert: "" + # validate the database matches the known hash each execution (env: GRYPE_DB_VALIDATE_BY_HASH_ON_START) + validate-by-hash-on-start: true - # filepath to the client key used for TLS authentication to the registry - # GRYPE_REGISTRY_AUTH_TLS_KEY env var - tls-key: "" + # ensure db build is no older than the max-allowed-built-age (env: GRYPE_DB_VALIDATE_AGE) + validate-age: true - # - ... # note, more credentials can be provided via config file only (not env vars) + # Max allowed age for vulnerability database, + # age being the time since it was built + # Default max age is 120h (or five days) (env: GRYPE_DB_MAX_ALLOWED_BUILT_AGE) + max-allowed-built-age: 120h0m0s + # fail the scan if unable to check for database updates (env: GRYPE_DB_REQUIRE_UPDATE_CHECK) + require-update-check: false + + # Timeout for downloading GRYPE_DB_UPDATE_URL to see if the database needs to be downloaded + # This file is ~156KiB as of 2024-04-17 so the download should be quick; adjust as needed (env: GRYPE_DB_UPDATE_AVAILABLE_TIMEOUT) + update-available-timeout: 30s + + # Timeout for downloading actual vulnerability DB + # The DB is ~156MB as of 2024-04-17 so slower connections may exceed the default timeout; adjust as needed (env: GRYPE_DB_UPDATE_DOWNLOAD_TIMEOUT) + update-download-timeout: 5m0s + + # Maximum frequency to check for vulnerability database updates (env: GRYPE_DB_MAX_UPDATE_CHECK_FREQUENCY) + max-update-check-frequency: 2h0m0s log: - # suppress all output (except for the vulnerability list) - # same as -q ; GRYPE_LOG_QUIET env var + # suppress all logging output (env: GRYPE_LOG_QUIET) quiet: false - # increase verbosity - # same as GRYPE_LOG_VERBOSITY env var - verbosity: 0 + # explicitly set the logging level (available: [error warn info debug trace]) (env: GRYPE_LOG_LEVEL) + level: 'warn' - # the log level; note: detailed logging suppress the ETUI - # same as GRYPE_LOG_LEVEL env var - # Uses logrus logging levels: https://github.com/sirupsen/logrus#level-logging - level: "error" + # file path to write logs to (env: GRYPE_LOG_FILE) + file: '' - # location to write the log file (default is not to have a log file) - # same as GRYPE_LOG_FILE env var - file: "" +dev: + # capture resource profiling data (available: [cpu, mem]) (env: GRYPE_DEV_PROFILE) + profile: '' -match: - # sets the matchers below to use cpes when trying to find - # vulnerability matches. The stock matcher is the default - # when no primary matcher can be identified. - java: - using-cpes: false - python: - using-cpes: false - javascript: - using-cpes: false - ruby: - using-cpes: false - dotnet: - using-cpes: false - golang: - using-cpes: false - # even if CPE matching is disabled, make an exception when scanning for "stdlib". - always-use-cpe-for-stdlib: true - # allow main module pseudo versions, which may have only been "guessed at" by Syft, to be used in vulnerability matching - allow-main-module-pseudo-version-comparison: false - stock: - using-cpes: true + db: + # show sql queries in trace logging (requires -vv) (env: GRYPE_DEV_DB_DEBUG) + debug: false ``` ## Future plans diff --git a/Taskfile.yaml b/Taskfile.yaml index 5e08e9b61a0..336c7c6691b 100644 --- a/Taskfile.yaml +++ b/Taskfile.yaml @@ -40,7 +40,13 @@ tasks: ## High-level tasks ################################# + # note: the default task should not run levels of test, only build the project. default: + desc: Build the project + cmds: + - task: build + + validate: desc: Run all validation tasks aliases: - pr-validations @@ -57,7 +63,6 @@ tasks: - task: check-licenses - task: lint - task: check-json-schema-drift - - task: validate-cyclonedx-schema # TODO: while developing v6, we need to disable this check (since v5 and v6 are imported in the same codebase) # - task: validate-grype-db-schema @@ -177,11 +182,6 @@ tasks: cmds: - .github/scripts/json-schema-drift-check.sh - validate-cyclonedx-schema: - desc: Run integration tests - cmds: - - "cd schema/cyclonedx && make" - validate-grype-db-schema: desc: Ensure the codebase is only referencing a single grype-db schema version (multiple is not allowed) cmds: diff --git a/cmd/grype/cli/cli.go b/cmd/grype/cli/cli.go index f19bbb2ef99..49d887db338 100644 --- a/cmd/grype/cli/cli.go +++ b/cmd/grype/cli/cli.go @@ -1,9 +1,13 @@ package cli import ( + "errors" "os" "runtime/debug" + "strings" + "github.com/charmbracelet/lipgloss" + "github.com/muesli/termenv" "github.com/spf13/cobra" "github.com/anchore/clio" @@ -11,6 +15,7 @@ import ( grypeHandler "github.com/anchore/grype/cmd/grype/cli/ui" "github.com/anchore/grype/cmd/grype/internal/ui" v6 "github.com/anchore/grype/grype/db/v6" + "github.com/anchore/grype/grype/grypeerr" "github.com/anchore/grype/internal/bus" "github.com/anchore/grype/internal/log" "github.com/anchore/grype/internal/redact" @@ -29,14 +34,18 @@ func Command(id clio.Identification) *cobra.Command { return cmd } -func create(id clio.Identification) (clio.Application, *cobra.Command) { - clioCfg := clio.NewSetupConfig(id). +func SetupConfig(id clio.Identification) *clio.SetupConfig { + return clio.NewSetupConfig(id). WithGlobalConfigFlag(). // add persistent -c for reading an application config from WithGlobalLoggingFlags(). // add persistent -v and -q flags tied to the logging config WithConfigInRootHelp(). // --help on the root command renders the full application config in the help text WithUIConstructor( // select a UI based on the logging configuration and state of stdin (if stdin is a tty) func(cfg clio.Config) (*clio.UICollection, error) { + // remove CI var from consideration when determining if we should use the UI + lipgloss.SetDefaultRenderer(lipgloss.NewRenderer(os.Stdout, termenv.WithEnvironment(environWithoutCI{}))) + + // setup the UIs noUI := ui.None(cfg.Log.Quiet) if !cfg.Log.AllowUI(os.Stdin) || cfg.Log.Quiet { return clio.NewUICollection(noUI), nil @@ -70,7 +79,23 @@ func create(id clio.Identification) (clio.Application, *cobra.Command) { ). WithPostRuns(func(_ *clio.State, _ error) { stereoscope.Cleanup() + }). + WithMapExitCode(func(err error) int { + // return exit code 2 to indicate when a vulnerability severity is discovered + // that is equal or above the given --fail-on severity value. + if errors.Is(err, grypeerr.ErrAboveSeverityThreshold) { + return 2 + } + // return exit code 100 to indicate a DB upgrade is available (cmd: db check). + if errors.Is(err, grypeerr.ErrDBUpgradeAvailable) { + return 100 + } + return 1 }) +} + +func create(id clio.Identification) (clio.Application, *cobra.Command) { + clioCfg := SetupConfig(id) app := clio.New(*clioCfg) @@ -108,3 +133,24 @@ func syftVersion() (string, any) { func dbVersion() (string, any) { return "Supported DB Schema", v6.ModelVersion } + +type environWithoutCI struct { +} + +func (e environWithoutCI) Environ() []string { + var out []string + for _, s := range os.Environ() { + if strings.HasPrefix(s, "CI=") { + continue + } + out = append(out, s) + } + return out +} + +func (e environWithoutCI) Getenv(s string) string { + if s == "CI" { + return "" + } + return os.Getenv(s) +} diff --git a/cmd/grype/cli/commands/db_check.go b/cmd/grype/cli/commands/db_check.go index 18bc49b89e5..20d035636c5 100644 --- a/cmd/grype/cli/commands/db_check.go +++ b/cmd/grype/cli/commands/db_check.go @@ -12,13 +12,10 @@ import ( "github.com/anchore/grype/cmd/grype/cli/options" db "github.com/anchore/grype/grype/db/v6" "github.com/anchore/grype/grype/db/v6/distribution" + "github.com/anchore/grype/grype/grypeerr" "github.com/anchore/grype/internal/log" ) -const ( - exitCodeOnDBUpgradeAvailable = 100 -) - type dbCheckOptions struct { Output string `yaml:"output" json:"output" mapstructure:"output"` options.DatabaseCommand `yaml:",inline" mapstructure:",squash"` @@ -85,7 +82,7 @@ func runDBCheck(opts dbCheckOptions) error { } if updateAvailable { - os.Exit(exitCodeOnDBUpgradeAvailable) //nolint:gocritic + return grypeerr.ErrDBUpgradeAvailable } return nil } diff --git a/cmd/grype/cli/commands/db_import.go b/cmd/grype/cli/commands/db_import.go index 03beeb453a9..587ce08e281 100644 --- a/cmd/grype/cli/commands/db_import.go +++ b/cmd/grype/cli/commands/db_import.go @@ -16,9 +16,9 @@ func DBImport(app clio.Application) *cobra.Command { opts := options.DefaultDatabaseCommand(app.ID()) cmd := &cobra.Command{ - Use: "import FILE", - Short: "Import a vulnerability database or archive", - Long: fmt.Sprintf("import a vulnerability database or archive from a local FILE.\nDB archives can be obtained from %q.", opts.DB.UpdateURL), + Use: "import FILE | URL", + Short: "Import a vulnerability database or archive from a local file or URL", + Long: fmt.Sprintf("import a vulnerability database archive from a local FILE or URL.\nDB archives can be obtained from %q (or running `db list`). If the URL has a `checksum` query parameter with a fully qualified digest (e.g. 'sha256:abc728...') then the archive/DB will be verified against this value.", opts.DB.UpdateURL), Args: cobra.ExactArgs(1), RunE: func(_ *cobra.Command, args []string) error { return runDBImport(*opts, args[0]) @@ -33,7 +33,7 @@ func DBImport(app clio.Application) *cobra.Command { return app.SetupCommand(cmd, &configWrapper{opts}) } -func runDBImport(opts options.DatabaseCommand, dbArchivePath string) error { +func runDBImport(opts options.DatabaseCommand, reference string) error { // TODO: tui update? better logging? client, err := distribution.NewClient(opts.ToClientConfig()) if err != nil { @@ -44,12 +44,12 @@ func runDBImport(opts options.DatabaseCommand, dbArchivePath string) error { return fmt.Errorf("unable to create curator: %w", err) } - log.WithFields("path", dbArchivePath).Infof("importing vulnerability database archive") - if err := c.Import(dbArchivePath); err != nil { + log.WithFields("reference", reference).Infof("importing vulnerability database archive") + if err := c.Import(reference); err != nil { return fmt.Errorf("unable to import vulnerability database: %w", err) } s := c.Status() - log.WithFields("built", s.Built.String(), "status", s.Status()).Info("vulnerability database imported") + log.WithFields("built", s.Built.String(), "status", renderStoreValidation(s)).Info("vulnerability database imported") return nil } diff --git a/cmd/grype/cli/commands/db_list.go b/cmd/grype/cli/commands/db_list.go index f20ecd84f6b..3be6a4a9d7b 100644 --- a/cmd/grype/cli/commands/db_list.go +++ b/cmd/grype/cli/commands/db_list.go @@ -6,7 +6,6 @@ import ( "io" "net/url" "os" - "path" "github.com/spf13/cobra" @@ -62,28 +61,39 @@ func runDBList(opts dbListOptions) error { return fmt.Errorf("unable to get database listing: %w", err) } - return presentDBList(opts.Output, opts.DB.UpdateURL, os.Stdout, latest) + u, err := c.ResolveArchiveURL(latest.Archive) + if err != nil { + return fmt.Errorf("unable to resolve database URL: %w", err) + } + + return presentDBList(opts.Output, u, opts.DB.UpdateURL, os.Stdout, latest) } -func presentDBList(format string, u string, writer io.Writer, latest *distribution.LatestDocument) error { +func presentDBList(format string, archiveURL, listingURL string, writer io.Writer, latest *distribution.LatestDocument) error { if latest == nil { return fmt.Errorf("no database listing found") } - parsedURL, err := url.Parse(u) + // remove query params + archiveURLObj, err := url.Parse(archiveURL) if err != nil { - return fmt.Errorf("failed to parse base URL: %w", err) + return fmt.Errorf("unable to parse db URL %q: %w", archiveURL, err) } - parsedURL.Path = path.Join(path.Dir(parsedURL.Path), latest.Path) + archiveURLObj.RawQuery = "" + + if listingURL == distribution.DefaultConfig().LatestURL { + // append on the schema + listingURL = fmt.Sprintf("%s/v%v/%s", listingURL, latest.SchemaVersion.Model, distribution.LatestFileName) + } switch format { case textOutputFormat: fmt.Fprintf(writer, "Status: %s\n", latest.Status) fmt.Fprintf(writer, "Schema: %s\n", latest.SchemaVersion.String()) fmt.Fprintf(writer, "Built: %s\n", latest.Built.String()) - fmt.Fprintf(writer, "Listing: %s\n", u) - fmt.Fprintf(writer, "DB URL: %s\n", parsedURL.String()) + fmt.Fprintf(writer, "Listing: %s\n", listingURL) + fmt.Fprintf(writer, "DB URL: %s\n", archiveURLObj.String()) fmt.Fprintf(writer, "Checksum: %s\n", latest.Checksum) case jsonOutputFormat, "raw": enc := json.NewEncoder(writer) diff --git a/cmd/grype/cli/commands/db_list_test.go b/cmd/grype/cli/commands/db_list_test.go index 30d24194c49..3492f8e4114 100644 --- a/cmd/grype/cli/commands/db_list_test.go +++ b/cmd/grype/cli/commands/db_list_test.go @@ -66,7 +66,6 @@ func Test_ListingUserAgent(t *testing.T) { } func TestPresentDBList(t *testing.T) { - baseURL := "http://localhost:8000/latest.json" latestDoc := &distribution.LatestDocument{ Status: "active", Archive: distribution.Archive{ @@ -82,20 +81,39 @@ func TestPresentDBList(t *testing.T) { tests := []struct { name string format string + baseURL string + archiveURL string latest *distribution.LatestDocument expectedText string expectedErr require.ErrorAssertionFunc }{ { - name: "valid text format", - format: textOutputFormat, - latest: latestDoc, + name: "valid text format", + format: textOutputFormat, + latest: latestDoc, + baseURL: "http://localhost:8000/latest.json", + archiveURL: "http://localhost:8000/vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732718597.tar.zst", expectedText: `Status: active Schema: v6.0.0 Built: 2024-11-27T14:43:17Z Listing: http://localhost:8000/latest.json DB URL: http://localhost:8000/vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732718597.tar.zst Checksum: sha256:16bcb6551c748056f752f299fcdb4fa50fe61589d086be3889e670261ff21ca4 +`, + expectedErr: require.NoError, + }, + { + name: "complete default values", + format: textOutputFormat, + latest: latestDoc, + baseURL: "https://grype.anchore.io/databases", + archiveURL: "https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732718597.tar.zst", + expectedText: `Status: active +Schema: v6.0.0 +Built: 2024-11-27T14:43:17Z +Listing: https://grype.anchore.io/databases/v6/latest.json +DB URL: https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732718597.tar.zst +Checksum: sha256:16bcb6551c748056f752f299fcdb4fa50fe61589d086be3889e670261ff21ca4 `, expectedErr: require.NoError, }, @@ -133,7 +151,7 @@ Checksum: sha256:16bcb6551c748056f752f299fcdb4fa50fe61589d086be3889e670261ff21ca t.Run(tt.name, func(t *testing.T) { writer := &bytes.Buffer{} - err := presentDBList(tt.format, baseURL, writer, tt.latest) + err := presentDBList(tt.format, tt.archiveURL, tt.baseURL, writer, tt.latest) if tt.expectedErr == nil { tt.expectedErr = require.NoError } diff --git a/cmd/grype/cli/commands/db_providers.go b/cmd/grype/cli/commands/db_providers.go index 9394f9d23d7..57c303ef67e 100644 --- a/cmd/grype/cli/commands/db_providers.go +++ b/cmd/grype/cli/commands/db_providers.go @@ -7,7 +7,6 @@ import ( "strings" "time" - "github.com/olekukonko/tablewriter" "github.com/spf13/cobra" "github.com/anchore/clio" @@ -77,7 +76,10 @@ func runDBProviders(opts *dbProvidersOptions) error { switch opts.Output { case tableOutputFormat, textOutputFormat: - displayDBProvidersTable(toProviders(providerModels), sb) + err = displayDBProvidersTable(toProviders(providerModels), sb) + if err != nil { + return err + } case jsonOutputFormat: err = displayDBProvidersJSON(toProviders(providerModels), sb) if err != nil { @@ -113,29 +115,18 @@ func toProviders(providers []v6.Provider) []provider { return res } -func displayDBProvidersTable(providers []provider, output io.Writer) { +func displayDBProvidersTable(providers []provider, output io.Writer) error { rows := [][]string{} - for _, provider := range providers { - rows = append(rows, []string{provider.Name, provider.Version, provider.Processor, provider.DateCaptured.String(), provider.InputDigest}) + for _, p := range providers { + rows = append(rows, []string{p.Name, p.Version, p.Processor, p.DateCaptured.String(), p.InputDigest}) } - table := tablewriter.NewWriter(output) - table.SetHeader([]string{"Name", "Version", "Processor", "Date Captured", "Input Digest"}) - - table.SetHeaderLine(false) - table.SetBorder(false) - table.SetAutoWrapText(false) - table.SetAutoFormatHeaders(true) - table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) - table.SetAlignment(tablewriter.ALIGN_LEFT) - table.SetCenterSeparator("") - table.SetColumnSeparator("") - table.SetRowSeparator("") - table.SetTablePadding(" ") - table.SetNoWhiteSpace(true) - - table.AppendBulk(rows) - table.Render() + table := newTable(output, []string{"Name", "Version", "Processor", "Date Captured", "Input Digest"}) + + if err := table.Bulk(rows); err != nil { + return fmt.Errorf("failed to add table rows: %w", err) + } + return table.Render() } func displayDBProvidersJSON(providers []provider, output io.Writer) error { diff --git a/cmd/grype/cli/commands/db_providers_test.go b/cmd/grype/cli/commands/db_providers_test.go index 6c4c2cf1ce5..3f3771e5bc6 100644 --- a/cmd/grype/cli/commands/db_providers_test.go +++ b/cmd/grype/cli/commands/db_providers_test.go @@ -26,13 +26,13 @@ func TestDisplayDBProvidersTable(t *testing.T) { }, } - expectedOutput := `NAME VERSION PROCESSOR DATE CAPTURED INPUT DIGEST + expectedOutput := `NAME VERSION PROCESSOR DATE CAPTURED INPUT DIGEST provider1 1.0.0 vunnel@3.2 2024-11-25 14:30:00 +0000 UTC xxh64:1234567834567 provider2 2.0.0 vunnel@3.2 2024-11-26 10:15:00 +0000 UTC xxh64:9876543212345 ` var output bytes.Buffer - displayDBProvidersTable(providers, &output) + require.NoError(t, displayDBProvidersTable(providers, &output)) require.Equal(t, expectedOutput, output.String()) } diff --git a/cmd/grype/cli/commands/db_search.go b/cmd/grype/cli/commands/db_search.go index 9ee5f4dc770..fdd82409bc7 100644 --- a/cmd/grype/cli/commands/db_search.go +++ b/cmd/grype/cli/commands/db_search.go @@ -104,7 +104,7 @@ func DBSearch(app clio.Application) *cobra.Command { Search for affected packages by CPE (note: version/update is not considered): - $ grype db search --pkg 'cpe:2.3:a:jetty:jetty_http_server:*:*:*:*:*:*' + $ grype db search --pkg 'cpe:2.3:a:jetty:jetty_http_server:*:*:*:*:*:*:*:*' $ grype db search --pkg 'cpe:/a:jetty:jetty_http_server'`, PreRunE: disableUI(app), RunE: func(cmd *cobra.Command, args []string) (err error) { @@ -172,15 +172,14 @@ func runDBSearchMatches(opts dbSearchMatchOptions) error { } } - if len(rows) != 0 { - sb := &strings.Builder{} - err = presentDBSearchMatches(opts.Format.Output, rows, sb) - bus.Report(sb.String()) - if err != nil { - return fmt.Errorf("unable to present search results: %w", err) - } - } else { - bus.Notify("No results found") + sb := &strings.Builder{} + err = presentDBSearchMatches(opts.Format.Output, rows, sb) + rep := sb.String() + if rep != "" { + bus.Report(rep) + } + if err != nil { + return fmt.Errorf("unable to present search results: %w", err) } return queryErr @@ -189,14 +188,23 @@ func runDBSearchMatches(opts dbSearchMatchOptions) error { func presentDBSearchMatches(outputFormat string, structuredRows dbsearch.Matches, output io.Writer) error { switch outputFormat { case tableOutputFormat: + if len(structuredRows) == 0 { + bus.Notify("No results found") + return nil + } rows := renderDBSearchPackagesTableRows(structuredRows.Flatten()) - table := newTable(output) + table := newTable(output, []string{"Vulnerability", "Package", "Ecosystem", "Namespace", "Version Constraint"}) - table.SetHeader([]string{"Vulnerability", "Package", "Ecosystem", "Namespace", "Version Constraint"}) - table.AppendBulk(rows) - table.Render() + if err := table.Bulk(rows); err != nil { + return fmt.Errorf("failed to add table rows: %+v", err) + } + return table.Render() case jsonOutputFormat: + if structuredRows == nil { + // always allocate the top level collection + structuredRows = dbsearch.Matches{} + } enc := json.NewEncoder(output) enc.SetEscapeHTML(false) enc.SetIndent("", " ") diff --git a/cmd/grype/cli/commands/db_search_vuln.go b/cmd/grype/cli/commands/db_search_vuln.go index f9ea918be39..22563b25497 100644 --- a/cmd/grype/cli/commands/db_search_vuln.go +++ b/cmd/grype/cli/commands/db_search_vuln.go @@ -6,6 +6,7 @@ import ( "io" "sort" "strings" + "time" "github.com/hashicorp/go-multierror" "github.com/scylladb/go-set/strset" @@ -91,12 +92,11 @@ func runDBSearchVulnerabilities(opts dbSearchVulnerabilityOptions) error { return err } - if len(rows) != 0 { - sb := &strings.Builder{} - err = presentDBSearchVulnerabilities(opts.Format.Output, rows, sb) - bus.Report(sb.String()) - } else { - bus.Notify("No results found") + sb := &strings.Builder{} + err = presentDBSearchVulnerabilities(opts.Format.Output, rows, sb) + rep := sb.String() + if rep != "" { + bus.Report(rep) } return err @@ -129,20 +129,26 @@ func validateProvidersFilter(reader v6.Reader, providers []string) error { } func presentDBSearchVulnerabilities(outputFormat string, structuredRows []dbsearch.Vulnerability, output io.Writer) error { - if len(structuredRows) == 0 { - return nil - } - switch outputFormat { case tableOutputFormat: + if len(structuredRows) == 0 { + bus.Notify("No results found") + return nil + } + rows := renderDBSearchVulnerabilitiesTableRows(structuredRows) - table := newTable(output) + table := newTable(output, []string{"ID", "Provider", "Published", "Severity", "Reference"}) - table.SetHeader([]string{"ID", "Provider", "Published", "Severity", "Reference"}) - table.AppendBulk(rows) - table.Render() + if err := table.Bulk(rows); err != nil { + return fmt.Errorf("failed to add table rows: %+v", err) + } + return table.Render() case jsonOutputFormat: + if structuredRows == nil { + // always allocate the top level collection + structuredRows = []dbsearch.Vulnerability{} + } enc := json.NewEncoder(output) enc.SetEscapeHTML(false) enc.SetIndent("", " ") @@ -166,36 +172,14 @@ func renderDBSearchVulnerabilitiesTableRows(structuredRows []dbsearch.Vulnerabil versionsByRow := make(map[row][]string) for _, rr := range structuredRows { - // get the first severity value (which is ranked highest) - var sev string - if len(rr.Severities) > 0 { - sev = fmt.Sprintf("%s", rr.Severities[0].Value) - } - - prov := rr.Provider - var versions []string - for _, os := range rr.OperatingSystems { - versions = append(versions, os.Version) - } - - var published string - if rr.PublishedDate != nil && !rr.PublishedDate.IsZero() { - published = rr.PublishedDate.Format("2006-01-02") - } - - var ref string - if len(rr.References) > 0 { - ref = rr.References[0].URL - } - r := row{ Vuln: rr.ID, - ProviderWithoutVersions: prov, - PublishedDate: published, - Severity: sev, - Reference: ref, + ProviderWithoutVersions: rr.Provider, + PublishedDate: getDate(rr.PublishedDate), + Severity: rr.Severity, + Reference: getPrimaryReference(rr.References), } - versionsByRow[r] = append(versionsByRow[r], versions...) + versionsByRow[r] = append(versionsByRow[r], getOSVersions(rr.OperatingSystems)...) } var rows [][]string @@ -220,3 +204,26 @@ func renderDBSearchVulnerabilitiesTableRows(structuredRows []dbsearch.Vulnerabil return rows } + +func getOSVersions(oss []dbsearch.OperatingSystem) []string { + var versions []string + for _, os := range oss { + versions = append(versions, os.Version) + } + return versions +} + +func getPrimaryReference(refs []v6.Reference) string { + if len(refs) > 0 { + return refs[0].URL + } + + return "" +} + +func getDate(t *time.Time) string { + if t != nil && !t.IsZero() { + return t.Format("2006-01-02") + } + return "" +} diff --git a/cmd/grype/cli/commands/db_search_vuln_test.go b/cmd/grype/cli/commands/db_search_vuln_test.go new file mode 100644 index 00000000000..a5a48f703c0 --- /dev/null +++ b/cmd/grype/cli/commands/db_search_vuln_test.go @@ -0,0 +1,140 @@ +package commands + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/anchore/grype/cmd/grype/cli/commands/internal/dbsearch" + v6 "github.com/anchore/grype/grype/db/v6" +) + +func TestGetOSVersions(t *testing.T) { + tests := []struct { + name string + input []dbsearch.OperatingSystem + expected []string + }{ + { + name: "empty list", + input: []dbsearch.OperatingSystem{}, + expected: nil, + }, + { + name: "single os", + input: []dbsearch.OperatingSystem{ + { + Name: "debian", + Version: "11", + }, + }, + expected: []string{"11"}, + }, + { + name: "multiple os", + input: []dbsearch.OperatingSystem{ + { + Name: "ubuntu", + Version: "16.04", + }, + { + Name: "ubuntu", + Version: "22.04", + }, + { + Name: "ubuntu", + Version: "24.04", + }, + }, + expected: []string{"16.04", "22.04", "24.04"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := getOSVersions(tt.input) + require.Equal(t, tt.expected, actual) + }) + } +} + +func TestGetPrimaryReference(t *testing.T) { + tests := []struct { + name string + input []v6.Reference + expected string + }{ + { + name: "empty list", + input: []v6.Reference{}, + expected: "", + }, + { + name: "single reference", + input: []v6.Reference{ + { + URL: "https://example.com/vuln/123", + Tags: []string{"primary"}, + }, + }, + expected: "https://example.com/vuln/123", + }, + { + name: "multiple references", + input: []v6.Reference{ + { + URL: "https://example.com/vuln/123", + Tags: []string{"primary"}, + }, + { + URL: "https://example.com/advisory/123", + Tags: []string{"secondary"}, + }, + }, + expected: "https://example.com/vuln/123", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := getPrimaryReference(tt.input) + require.Equal(t, tt.expected, actual) + }) + } +} + +func TestGetDate(t *testing.T) { + tests := []struct { + name string + input *time.Time + expected string + }{ + { + name: "nil time", + input: nil, + expected: "", + }, + { + name: "zero time", + input: &time.Time{}, + expected: "", + }, + { + name: "valid time", + input: timePtr(time.Date(2023, 5, 15, 0, 0, 0, 0, time.UTC)), + expected: "2023-05-15", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := getDate(tt.input) + require.Equal(t, tt.expected, actual) + }) + } +} + +func timePtr(t time.Time) *time.Time { + return &t +} diff --git a/cmd/grype/cli/commands/db_status.go b/cmd/grype/cli/commands/db_status.go index 46c31c63f10..af402e27aab 100644 --- a/cmd/grype/cli/commands/db_status.go +++ b/cmd/grype/cli/commands/db_status.go @@ -5,14 +5,15 @@ import ( "fmt" "io" "os" + "time" "github.com/spf13/cobra" "github.com/anchore/clio" "github.com/anchore/grype/cmd/grype/cli/options" - v6 "github.com/anchore/grype/grype/db/v6" "github.com/anchore/grype/grype/db/v6/distribution" "github.com/anchore/grype/grype/db/v6/installation" + "github.com/anchore/grype/grype/vulnerability" ) type dbStatusOptions struct { @@ -67,17 +68,19 @@ func runDBStatus(opts dbStatusOptions) error { return fmt.Errorf("failed to present db status information: %+v", err) } - return status.Err + return status.Error } -func presentDBStatus(format string, writer io.Writer, status v6.Status) error { +func presentDBStatus(format string, writer io.Writer, status vulnerability.ProviderStatus) error { switch format { case textOutputFormat: fmt.Fprintln(writer, "Path: ", status.Path) fmt.Fprintln(writer, "Schema: ", status.SchemaVersion) - fmt.Fprintln(writer, "Built: ", status.Built.String()) - fmt.Fprintln(writer, "Checksum: ", status.Checksum) - fmt.Fprintln(writer, "Status: ", status.Status()) + fmt.Fprintln(writer, "Built: ", status.Built.Format(time.RFC3339)) + if status.From != "" { + fmt.Fprintln(writer, "From: ", status.From) + } + fmt.Fprintln(writer, "Status: ", renderStoreValidation(status)) case jsonOutputFormat: enc := json.NewEncoder(writer) enc.SetEscapeHTML(false) @@ -91,3 +94,10 @@ func presentDBStatus(format string, writer io.Writer, status v6.Status) error { return nil } + +func renderStoreValidation(status vulnerability.ProviderStatus) string { + if status.Error != nil { + return "invalid" + } + return "valid" +} diff --git a/cmd/grype/cli/commands/db_status_test.go b/cmd/grype/cli/commands/db_status_test.go index dbfbcc73525..a2d774a8b4c 100644 --- a/cmd/grype/cli/commands/db_status_test.go +++ b/cmd/grype/cli/commands/db_status_test.go @@ -10,30 +10,30 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - v6 "github.com/anchore/grype/grype/db/v6" + "github.com/anchore/grype/grype/vulnerability" ) func TestPresentDBStatus(t *testing.T) { - validStatus := v6.Status{ + validStatus := vulnerability.ProviderStatus{ Path: "/Users/test/Library/Caches/grype/db/6/vulnerability.db", + From: "https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.2_2025-03-14T01:31:06Z_1741925227.tar.zst?checksum=sha256%3Ad4654e3b212f1d8a1aaab979599691099af541568d687c4a7c4e7c1da079b9b8", SchemaVersion: "6.0.0", - Built: v6.Time{Time: time.Date(2024, 11, 27, 14, 43, 17, 0, time.UTC)}, - Checksum: "xxh64:89d3ae128f6e718e", - Err: nil, + Built: time.Date(2024, 11, 27, 14, 43, 17, 0, time.UTC), + Error: nil, } - invalidStatus := v6.Status{ + invalidStatus := vulnerability.ProviderStatus{ Path: "/Users/test/Library/Caches/grype/db/6/vulnerability.db", + From: "https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.2_2025-03-14T01:31:06Z_1741925227.tar.zst?checksum=sha256%3Ad4654e3b212f1d8a1aaab979599691099af541568d687c4a7c4e7c1da079b9b8", SchemaVersion: "6.0.0", - Built: v6.Time{Time: time.Date(2024, 11, 27, 14, 43, 17, 0, time.UTC)}, - Checksum: "xxh64:89d3ae128f6e718e", - Err: errors.New("checksum mismatch"), + Built: time.Date(2024, 11, 27, 14, 43, 17, 0, time.UTC), + Error: errors.New("checksum mismatch"), } tests := []struct { name string format string - status v6.Status + status vulnerability.ProviderStatus expectedText string expectedErr require.ErrorAssertionFunc }{ @@ -44,7 +44,7 @@ func TestPresentDBStatus(t *testing.T) { expectedText: `Path: /Users/test/Library/Caches/grype/db/6/vulnerability.db Schema: 6.0.0 Built: 2024-11-27T14:43:17Z -Checksum: xxh64:89d3ae128f6e718e +From: https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.2_2025-03-14T01:31:06Z_1741925227.tar.zst?checksum=sha256%3Ad4654e3b212f1d8a1aaab979599691099af541568d687c4a7c4e7c1da079b9b8 Status: valid `, expectedErr: require.NoError, @@ -56,7 +56,7 @@ Status: valid expectedText: `Path: /Users/test/Library/Caches/grype/db/6/vulnerability.db Schema: 6.0.0 Built: 2024-11-27T14:43:17Z -Checksum: xxh64:89d3ae128f6e718e +From: https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.2_2025-03-14T01:31:06Z_1741925227.tar.zst?checksum=sha256%3Ad4654e3b212f1d8a1aaab979599691099af541568d687c4a7c4e7c1da079b9b8 Status: invalid `, expectedErr: require.NoError, @@ -67,10 +67,10 @@ Status: invalid status: validStatus, expectedText: `{ "schemaVersion": "6.0.0", + "from": "https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.2_2025-03-14T01:31:06Z_1741925227.tar.zst?checksum=sha256%3Ad4654e3b212f1d8a1aaab979599691099af541568d687c4a7c4e7c1da079b9b8", "built": "2024-11-27T14:43:17Z", "path": "/Users/test/Library/Caches/grype/db/6/vulnerability.db", - "checksum": "xxh64:89d3ae128f6e718e", - "error": "" + "valid": true } `, expectedErr: require.NoError, @@ -81,9 +81,10 @@ Status: invalid status: invalidStatus, expectedText: `{ "schemaVersion": "6.0.0", + "from": "https://grype.anchore.io/databases/v6/vulnerability-db_v6.0.2_2025-03-14T01:31:06Z_1741925227.tar.zst?checksum=sha256%3Ad4654e3b212f1d8a1aaab979599691099af541568d687c4a7c4e7c1da079b9b8", "built": "2024-11-27T14:43:17Z", "path": "/Users/test/Library/Caches/grype/db/6/vulnerability.db", - "checksum": "xxh64:89d3ae128f6e718e", + "valid": false, "error": "checksum mismatch" } `, diff --git a/cmd/grype/cli/commands/internal/dbsearch/affected_packages.go b/cmd/grype/cli/commands/internal/dbsearch/affected_packages.go index cc5190dd0e7..17af2ba64d7 100644 --- a/cmd/grype/cli/commands/internal/dbsearch/affected_packages.go +++ b/cmd/grype/cli/commands/internal/dbsearch/affected_packages.go @@ -33,6 +33,10 @@ type AffectedPackageInfo struct { // CPE is a Common Platform Enumeration that is affected by the vulnerability CPE *CPE `json:"cpe,omitempty"` + // Namespace is a holdover value from the v5 DB schema that combines provider and search methods into a single value + // Deprecated: this field will be removed in a later version of the search schema + Namespace string `json:"namespace"` + // Detail is the detailed information about the affected package Detail v6.AffectedPackageBlob `json:"detail"` } @@ -110,10 +114,11 @@ func newAffectedPackageRows(affectedPkgs []affectedPackageWithDecorations, affec rows = append(rows, AffectedPackage{ Vulnerability: newVulnerabilityInfo(*pkg.Vulnerability, pkg.vulnerabilityDecorations), AffectedPackageInfo: AffectedPackageInfo{ - Model: &pkg.AffectedPackageHandle, - OS: toOS(pkg.OperatingSystem), - Package: toPackage(pkg.Package), - Detail: detail, + Model: &pkg.AffectedPackageHandle, + OS: toOS(pkg.OperatingSystem), + Package: toPackage(pkg.Package), + Namespace: v6.MimicV5Namespace(pkg.Vulnerability, &pkg.AffectedPackageHandle), + Detail: detail, }, }) } @@ -138,8 +143,9 @@ func newAffectedPackageRows(affectedPkgs []affectedPackageWithDecorations, affec // tracking model information is not possible with CPE handles Vulnerability: newVulnerabilityInfo(*ac.Vulnerability, ac.vulnerabilityDecorations), AffectedPackageInfo: AffectedPackageInfo{ - CPE: c, - Detail: detail, + CPE: c, + Namespace: v6.MimicV5Namespace(ac.Vulnerability, nil), // no affected package will default to NVD + Detail: detail, }, }) } diff --git a/cmd/grype/cli/commands/internal/dbsearch/affected_packages_test.go b/cmd/grype/cli/commands/internal/dbsearch/affected_packages_test.go index b67b32d2e3f..22b399bae0b 100644 --- a/cmd/grype/cli/commands/internal/dbsearch/affected_packages_test.go +++ b/cmd/grype/cli/commands/internal/dbsearch/affected_packages_test.go @@ -49,8 +49,9 @@ func TestAffectedPackageTableRowMarshalJSON(t *testing.T) { }, }, AffectedPackageInfo: AffectedPackageInfo{ - Package: &Package{Name: "pkg1", Ecosystem: "ecosystem1"}, - CPE: &CPE{Part: "a", Vendor: "vendor1", Product: "product1"}, + Package: &Package{Name: "pkg1", Ecosystem: "ecosystem1"}, + CPE: &CPE{Part: "a", Vendor: "vendor1", Product: "product1"}, + Namespace: "namespace1", Detail: v6.AffectedPackageBlob{ CVEs: []string{"CVE-1234-5678"}, Qualifiers: &v6.AffectedPackageQualifiers{ @@ -119,7 +120,8 @@ func TestAffectedPackageTableRowMarshalJSON(t *testing.T) { "name": "pkg1", "ecosystem": "ecosystem1" }, - "cpe": "cpe:2.3:a:vendor1:product1:*:*:*:*:*:*", + "cpe": "cpe:2.3:a:vendor1:product1:*:*:*:*:*:*:*:*", + "namespace": "namespace1", "detail": { "cves": [ "CVE-1234-5678" @@ -167,7 +169,23 @@ func TestNewAffectedPackageRows(t *testing.T) { Status: "active", PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), - BlobValue: &v6.VulnerabilityBlob{Description: "Test vulnerability"}, + BlobValue: &v6.VulnerabilityBlob{ + Description: "Test vulnerability", + Severities: []v6.Severity{ + { + Scheme: "CVSS_V3", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 9.8, + }, + }, + Source: "nvd@nist.gov", + Rank: 1, + }, + }, + }, }, BlobValue: &v6.AffectedPackageBlob{ CVEs: []string{"CVE-1234-5678"}, @@ -272,11 +290,28 @@ func TestNewAffectedPackageRows(t *testing.T) { expected := []AffectedPackage{ { Vulnerability: VulnerabilityInfo{ - VulnerabilityBlob: v6.VulnerabilityBlob{Description: "Test vulnerability"}, - Provider: "provider1", - Status: "active", - PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), - ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), + VulnerabilityBlob: v6.VulnerabilityBlob{ + Description: "Test vulnerability", + Severities: []v6.Severity{ + { + Scheme: "CVSS_V3", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 9.8, + }, + }, + Source: "nvd@nist.gov", + Rank: 1, + }, + }, + }, + Severity: "critical", + Provider: "provider1", + Status: "active", + PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), + ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), KnownExploited: []KnownExploited{ { CVE: "CVE-1234-5678", @@ -301,8 +336,9 @@ func TestNewAffectedPackageRows(t *testing.T) { }, }, AffectedPackageInfo: AffectedPackageInfo{ - OS: &OperatingSystem{Name: "Linux", Version: "5.10"}, - Package: &Package{Name: "pkg1", Ecosystem: "ecosystem1"}, + OS: &OperatingSystem{Name: "Linux", Version: "5.10"}, + Package: &Package{Name: "pkg1", Ecosystem: "ecosystem1"}, + Namespace: "provider1:distro:Linux:5.10", Detail: v6.AffectedPackageBlob{ CVEs: []string{"CVE-1234-5678"}, Qualifiers: &v6.AffectedPackageQualifiers{ @@ -327,6 +363,7 @@ func TestNewAffectedPackageRows(t *testing.T) { { Vulnerability: VulnerabilityInfo{ VulnerabilityBlob: v6.VulnerabilityBlob{Description: "CPE vulnerability description"}, + Severity: "unknown", Provider: "provider2", KnownExploited: []KnownExploited{ { @@ -352,7 +389,8 @@ func TestNewAffectedPackageRows(t *testing.T) { }, }, AffectedPackageInfo: AffectedPackageInfo{ - CPE: &CPE{Part: "a", Vendor: "vendor1", Product: "product1"}, + CPE: &CPE{Part: "a", Vendor: "vendor1", Product: "product1"}, + Namespace: "provider2:cpe", Detail: v6.AffectedPackageBlob{ CVEs: []string{"CVE-9876-5432"}, Ranges: []v6.AffectedRange{ @@ -507,6 +545,7 @@ func TestAffectedPackages(t *testing.T) { { Vulnerability: VulnerabilityInfo{ VulnerabilityBlob: v6.VulnerabilityBlob{Description: "Test vulnerability"}, + Severity: "unknown", Provider: "provider1", Status: "active", PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), @@ -535,8 +574,9 @@ func TestAffectedPackages(t *testing.T) { }, }, AffectedPackageInfo: AffectedPackageInfo{ - OS: &OperatingSystem{Name: "Linux", Version: "5.10"}, - Package: &Package{Name: "pkg1", Ecosystem: "ecosystem1"}, + OS: &OperatingSystem{Name: "Linux", Version: "5.10"}, + Package: &Package{Name: "pkg1", Ecosystem: "ecosystem1"}, + Namespace: "provider1:distro:Linux:5.10", Detail: v6.AffectedPackageBlob{ CVEs: []string{"CVE-1234-5678"}, Ranges: []v6.AffectedRange{ @@ -557,6 +597,7 @@ func TestAffectedPackages(t *testing.T) { { Vulnerability: VulnerabilityInfo{ VulnerabilityBlob: v6.VulnerabilityBlob{Description: "CPE vulnerability description"}, + Severity: "unknown", Provider: "provider2", KnownExploited: []KnownExploited{ { @@ -582,7 +623,8 @@ func TestAffectedPackages(t *testing.T) { }, }, AffectedPackageInfo: AffectedPackageInfo{ - CPE: &CPE{Part: "a", Vendor: "vendor1", Product: "product1"}, + CPE: &CPE{Part: "a", Vendor: "vendor1", Product: "product1"}, + Namespace: "provider2:cpe", Detail: v6.AffectedPackageBlob{ CVEs: []string{"CVE-9876-5432"}, Ranges: []v6.AffectedRange{ diff --git a/cmd/grype/cli/commands/internal/dbsearch/matches.go b/cmd/grype/cli/commands/internal/dbsearch/matches.go index 8343adb1107..0e3aba8399f 100644 --- a/cmd/grype/cli/commands/internal/dbsearch/matches.go +++ b/cmd/grype/cli/commands/internal/dbsearch/matches.go @@ -41,7 +41,7 @@ func (m Matches) Flatten() []AffectedPackage { return rows } -func newMatchesRows(affectedPkgs []affectedPackageWithDecorations, affectedCPEs []affectedCPEWithDecorations) (rows []Match, retErr error) { +func newMatchesRows(affectedPkgs []affectedPackageWithDecorations, affectedCPEs []affectedCPEWithDecorations) (rows []Match, retErr error) { // nolint:funlen var affectedPkgsByVuln = make(map[v6.ID][]AffectedPackageInfo) var vulnsByID = make(map[v6.ID]v6.VulnerabilityHandle) var decorationsByID = make(map[v6.ID]vulnerabilityDecorations) @@ -62,10 +62,11 @@ func newMatchesRows(affectedPkgs []affectedPackageWithDecorations, affectedCPEs } aff := AffectedPackageInfo{ - Model: &pkg.AffectedPackageHandle, - OS: toOS(pkg.OperatingSystem), - Package: toPackage(pkg.Package), - Detail: detail, + Model: &pkg.AffectedPackageHandle, + OS: toOS(pkg.OperatingSystem), + Package: toPackage(pkg.Package), + Namespace: v6.MimicV5Namespace(pkg.Vulnerability, &pkg.AffectedPackageHandle), + Detail: detail, } affectedPkgsByVuln[pkg.Vulnerability.ID] = append(affectedPkgsByVuln[pkg.Vulnerability.ID], aff) @@ -94,8 +95,9 @@ func newMatchesRows(affectedPkgs []affectedPackageWithDecorations, affectedCPEs aff := AffectedPackageInfo{ // tracking model information is not possible with CPE handles - CPE: c, - Detail: detail, + CPE: c, + Namespace: v6.MimicV5Namespace(ac.Vulnerability, nil), // no affected package will default to NVD + Detail: detail, } affectedPkgsByVuln[ac.Vulnerability.ID] = append(affectedPkgsByVuln[ac.Vulnerability.ID], aff) diff --git a/cmd/grype/cli/commands/internal/dbsearch/versions.go b/cmd/grype/cli/commands/internal/dbsearch/versions.go index 292114f4b6e..9d5e27620fb 100644 --- a/cmd/grype/cli/commands/internal/dbsearch/versions.go +++ b/cmd/grype/cli/commands/internal/dbsearch/versions.go @@ -1,17 +1,20 @@ package dbsearch const ( - // MatchesSchemaVersion is the schema version for the `db search ` command - MatchesSchemaVersion = "1.0.1" + // MatchesSchemaVersion is the schema version for the `db search` command + MatchesSchemaVersion = "1.0.3" // MatchesSchemaVersion Changelog: // 1.0.0 - Initial schema 🎉 // 1.0.1 - Add KEV and EPSS data to vulnerability matches + // 1.0.2 - Add v5 namespace emulation for affected packages + // 1.0.3 - Add severity string field to vulnerability object // VulnerabilitiesSchemaVersion is the schema version for the `db search vuln` command - VulnerabilitiesSchemaVersion = "1.0.1" + VulnerabilitiesSchemaVersion = "1.0.3" // VulnerabilitiesSchemaVersion // 1.0.0 - Initial schema 🎉 // 1.0.1 - Add KEV and EPSS data to vulnerability + // 1.0.3 - Add severity string field to vulnerability object ) diff --git a/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities.go b/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities.go index e53f30a7c39..5e434ba144f 100644 --- a/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities.go +++ b/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities.go @@ -7,6 +7,8 @@ import ( "time" v6 "github.com/anchore/grype/grype/db/v6" + "github.com/anchore/grype/grype/vulnerability" + "github.com/anchore/grype/internal/cvss" "github.com/anchore/grype/internal/log" ) @@ -30,6 +32,9 @@ type VulnerabilityInfo struct { v6.VulnerabilityBlob `json:",inline"` + // Severity is the single string representation of the vulnerability's severity based on the set of available severity values + Severity string `json:"severity,omitempty"` + // Provider is the upstream data processor (usually Vunnel) that is responsible for vulnerability records. Each provider // should be scoped to a specific vulnerability dataset, for instance, the "ubuntu" provider for all records from // Canonicals' Ubuntu Security Notices (for all Ubuntu distro versions). @@ -83,6 +88,23 @@ type EPSS struct { Date string `json:"date"` } +type CVSSSeverity struct { + // Vector is the CVSS assessment as a parameterized string + Vector string `json:"vector"` + + // Version is the CVSS version (e.g. "3.0") + Version string `json:"version,omitempty"` + + // Metrics is the CVSS quantitative assessment based on the vector + Metrics CvssMetrics `json:"metrics"` +} + +type CvssMetrics struct { + BaseScore float64 `json:"baseScore"` + ExploitabilityScore *float64 `json:"exploitabilityScore,omitempty"` + ImpactScore *float64 `json:"impactScore,omitempty"` +} + type vulnerabilityAffectedPackageJoin struct { Vulnerability v6.VulnerabilityHandle OperatingSystems []v6.OperatingSystem @@ -111,9 +133,11 @@ func newVulnerabilityInfo(vuln v6.VulnerabilityHandle, vc vulnerabilityDecoratio if vuln.BlobValue != nil { blob = *vuln.BlobValue } + patchCVSSMetrics(&blob) return VulnerabilityInfo{ Model: vuln, VulnerabilityBlob: blob, + Severity: getSeverity(blob.Severities), Provider: vuln.Provider.ID, Status: string(vuln.Status), PublishedDate: vuln.PublishedDate, @@ -124,6 +148,29 @@ func newVulnerabilityInfo(vuln v6.VulnerabilityHandle, vc vulnerabilityDecoratio } } +func patchCVSSMetrics(blob *v6.VulnerabilityBlob) { + for i := range blob.Severities { + sev := &blob.Severities[i] + if val, ok := sev.Value.(v6.CVSSSeverity); ok { + met, err := cvss.ParseMetricsFromVector(val.Vector) + if err != nil { + log.WithFields("vector", val.Vector, "error", err).Debug("unable to parse CVSS vector") + continue + } + newSev := CVSSSeverity{ + Vector: val.Vector, + Version: val.Version, + Metrics: CvssMetrics{ + BaseScore: met.BaseScore, + ExploitabilityScore: met.ExploitabilityScore, + ImpactScore: met.ImpactScore, + }, + } + sev.Value = newSev + } + } +} + func newOperatingSystems(oss []v6.OperatingSystem) (os []OperatingSystem) { for _, o := range oss { os = append(os, OperatingSystem{ @@ -226,3 +273,18 @@ func FindVulnerabilities(reader interface { //nolint:funlen return newVulnerabilityRows(pairs...), err } + +func getSeverity(sevs []v6.Severity) string { + if len(sevs) == 0 { + return vulnerability.UnknownSeverity.String() + } + // get the first severity value (which is ranked highest) + switch v := sevs[0].Value.(type) { + case string: + return v + case CVSSSeverity: + return cvss.SeverityFromBaseScore(v.Metrics.BaseScore).String() + } + + return fmt.Sprintf("%v", sevs[0].Value) +} diff --git a/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities_test.go b/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities_test.go index 285addb5cf5..301b7b9e153 100644 --- a/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities_test.go +++ b/cmd/grype/cli/commands/internal/dbsearch/vulnerabilities_test.go @@ -10,8 +10,96 @@ import ( "github.com/stretchr/testify/require" v6 "github.com/anchore/grype/grype/db/v6" + "github.com/anchore/grype/grype/vulnerability" ) +func TestGetSeverity(t *testing.T) { + tests := []struct { + name string + input []v6.Severity + expected string + }{ + { + name: "empty list", + input: []v6.Severity{}, + expected: vulnerability.UnknownSeverity.String(), + }, + { + name: "string severity", + input: []v6.Severity{ + { + Scheme: "HML", + Value: "high", + Source: "nvd@nist.gov", + Rank: 1, + }, + }, + expected: "high", + }, + { + name: "CVSS severity", + input: []v6.Severity{ + { + Scheme: "CVSS_V3", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 9.8, + }, + }, + Source: "nvd@nist.gov", + Rank: 1, + }, + }, + expected: "critical", + }, + { + name: "other value type", + input: []v6.Severity{ + { + Scheme: "OTHER", + Value: 42.0, + Source: "custom", + Rank: 1, + }, + }, + expected: "42", + }, + { + name: "multiple severities", + input: []v6.Severity{ + { + Scheme: "HML", + Value: "high", + Source: "nvd@nist.gov", + Rank: 1, + }, + { + Scheme: "CVSS_V3", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 9.8, + }, + }, + Source: "nvd@nist.gov", + Rank: 2, + }, + }, + expected: "high", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := getSeverity(tt.input) + require.Equal(t, tt.expected, actual) + }) + } +} + func TestNewVulnerabilityRows(t *testing.T) { vap := vulnerabilityAffectedPackageJoin{ Vulnerability: v6.VulnerabilityHandle{ @@ -22,7 +110,23 @@ func TestNewVulnerabilityRows(t *testing.T) { ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), WithdrawnDate: nil, Provider: &v6.Provider{ID: "provider1"}, - BlobValue: &v6.VulnerabilityBlob{Description: "Test description"}, + BlobValue: &v6.VulnerabilityBlob{ + Description: "Test description", + Severities: []v6.Severity{ + { + Scheme: "CVSS_V3", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 9.8, + }, + }, + Source: "nvd@nist.gov", + Rank: 1, + }, + }, + }, }, OperatingSystems: []v6.OperatingSystem{ {Name: "Linux", MajorVersion: "5", MinorVersion: "10"}, @@ -58,12 +162,29 @@ func TestNewVulnerabilityRows(t *testing.T) { expected := []Vulnerability{ { VulnerabilityInfo: VulnerabilityInfo{ - VulnerabilityBlob: v6.VulnerabilityBlob{Description: "Test description"}, - Provider: "provider1", - Status: "active", - PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), - ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), - WithdrawnDate: nil, + VulnerabilityBlob: v6.VulnerabilityBlob{ + Description: "Test description", + Severities: []v6.Severity{ + { + Scheme: "CVSS_V3", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 9.8, + }, + }, + Source: "nvd@nist.gov", + Rank: 1, + }, + }, + }, + Severity: "critical", + Provider: "provider1", + Status: "active", + PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), + ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), + WithdrawnDate: nil, KnownExploited: []KnownExploited{ { CVE: "CVE-1234-5678", @@ -113,7 +234,20 @@ func TestVulnerabilities(t *testing.T) { PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), Provider: &v6.Provider{ID: "provider1"}, - BlobValue: &v6.VulnerabilityBlob{Description: "Test description"}, + BlobValue: &v6.VulnerabilityBlob{ + Description: "Test description", + Severities: []v6.Severity{ + { + Scheme: v6.SeveritySchemeCVSS, + Value: v6.CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + Version: "3.1", + }, + Source: "nvd", + Rank: 1, + }, + }, + }, }, }, nil) @@ -156,12 +290,31 @@ func TestVulnerabilities(t *testing.T) { expected := []Vulnerability{ { VulnerabilityInfo: VulnerabilityInfo{ - VulnerabilityBlob: v6.VulnerabilityBlob{Description: "Test description"}, - Provider: "provider1", - Status: "active", - PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), - ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), - WithdrawnDate: nil, + VulnerabilityBlob: v6.VulnerabilityBlob{ + Description: "Test description", + Severities: []v6.Severity{ + { + Scheme: "CVSS", + Value: CVSSSeverity{ + Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + Version: "3.1", + Metrics: CvssMetrics{ + BaseScore: 7.5, + ExploitabilityScore: ptr(3.9), + ImpactScore: ptr(3.6), + }, + }, + Source: "nvd", + Rank: 1, + }, + }, + }, + Severity: "high", + Provider: "provider1", + Status: "active", + PublishedDate: ptr(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)), + ModifiedDate: ptr(time.Date(2023, 2, 1, 0, 0, 0, 0, time.UTC)), + WithdrawnDate: nil, KnownExploited: []KnownExploited{ { CVE: "CVE-1234-5678", diff --git a/cmd/grype/cli/commands/root.go b/cmd/grype/cli/commands/root.go index 0883b6b81d0..ab717d5714f 100644 --- a/cmd/grype/cli/commands/root.go +++ b/cmd/grype/cli/commands/root.go @@ -4,6 +4,7 @@ import ( "errors" "fmt" "strings" + "time" "github.com/spf13/cobra" "github.com/wagoodman/go-partybus" @@ -11,7 +12,7 @@ import ( "github.com/anchore/clio" "github.com/anchore/grype/cmd/grype/cli/options" "github.com/anchore/grype/grype" - v6 "github.com/anchore/grype/grype/db/v6" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/event" "github.com/anchore/grype/grype/event/parsers" "github.com/anchore/grype/grype/grypeerr" @@ -35,7 +36,6 @@ import ( "github.com/anchore/grype/internal/stringutil" "github.com/anchore/syft/syft" "github.com/anchore/syft/syft/cataloging" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" ) @@ -65,6 +65,7 @@ You can also explicitly specify the scheme to use: {{.appName}} registry:yourrepo/yourimage:tag pull image directly from a registry (no container runtime required) {{.appName}} purl:path/to/purl/file read a newline separated file of package URLs from a path on disk {{.appName}} PURL read a single package PURL directly (e.g. pkg:apk/openssl@3.2.1?distro=alpine-3.20.3) + {{.appName}} CPE read a single CPE directly (e.g. cpe:2.3:a:openssl:openssl:3.0.14:*:*:*:*:*) You can also pipe in Syft JSON directly: syft yourimage:tag -o json | {{.appName}} @@ -119,7 +120,7 @@ func runGrype(app clio.Application, opts *options.Grype, userInput string) (errs } var vp vulnerability.Provider - var status *v6.Status + var status *vulnerability.ProviderStatus var packages []pkg.Package var s *sbom.SBOM var pkgContext pkg.Context @@ -151,11 +152,15 @@ func runGrype(app clio.Application, opts *options.Grype, userInput string) (errs return nil }, func() (err error) { + startTime := time.Now() + defer func() { log.WithFields("time", time.Since(startTime)).Info("loaded DB") }() log.Debug("loading DB") vp, status, err = grype.LoadVulnerabilityDB(opts.ToClientConfig(), opts.ToCuratorConfig(), opts.DB.AutoUpdate) return validateDBLoad(err, status) }, func() (err error) { + startTime := time.Now() + defer func() { log.WithFields("time", time.Since(startTime)).Info("gathered packages") }() log.Debugf("gathering packages") // packages are grype.Package, not syft.Package // the SBOM is returned for downstream formatting concerns @@ -168,7 +173,6 @@ func runGrype(app clio.Application, opts *options.Grype, userInput string) (errs return nil }, ) - if err != nil { return err } @@ -179,6 +183,7 @@ func runGrype(app clio.Application, opts *options.Grype, userInput string) (errs return fmt.Errorf("applying vex rules: %w", err) } + startTime := time.Now() applyDistroHint(packages, &pkgContext, opts) vulnMatcher := grype.VulnerabilityMatcher{ @@ -201,25 +206,51 @@ func runGrype(app clio.Application, opts *options.Grype, userInput string) (errs errs = appendErrors(errs, err) } - model, err := models.NewDocument(app.ID(), packages, pkgContext, *remainingMatches, ignoredMatches, vp, opts, status, models.SortByPackage) + log.WithFields("time", time.Since(startTime)).Info("found vulnerability matches") + startTime = time.Now() + + model, err := models.NewDocument(app.ID(), packages, pkgContext, *remainingMatches, ignoredMatches, vp, opts, dbInfo(status, vp), models.SortStrategy(opts.SortBy.Criteria)) if err != nil { return fmt.Errorf("failed to create document: %w", err) } if err = writer.Write(models.PresenterConfig{ - ID: app.ID(), - Document: model, - SBOM: s, - AppConfig: opts, - DBStatus: status, - Pretty: opts.Pretty, + ID: app.ID(), + Document: model, + SBOM: s, + Pretty: opts.Pretty, }); err != nil { errs = appendErrors(errs, err) } + log.WithFields("time", time.Since(startTime)).Trace("wrote vulnerability report") + return errs } +func dbInfo(status *vulnerability.ProviderStatus, vp vulnerability.Provider) any { + var providers map[string]vulnerability.DataProvenance + + if vp != nil { + providers = make(map[string]vulnerability.DataProvenance) + if dpr, ok := vp.(vulnerability.StoreMetadataProvider); ok { + dps, err := dpr.DataProvenance() + // ignore errors here + if err == nil { + providers = dps + } + } + } + + return struct { + Status *vulnerability.ProviderStatus `json:"status"` + Providers map[string]vulnerability.DataProvenance `json:"providers"` + }{ + Status: status, + Providers: providers, + } +} + func applyDistroHint(pkgs []pkg.Package, context *pkg.Context, opts *options.Grype) { if opts.Distro != "" { log.Infof("using distro: %s", opts.Distro) @@ -230,28 +261,21 @@ func applyDistroHint(pkgs []pkg.Package, context *pkg.Context, opts *options.Gry if len(split) > 1 { v = split[1] } - context.Distro = &linux.Release{ - PrettyName: d, - Name: d, - ID: d, - IDLike: []string{ - d, - }, - Version: v, - VersionID: v, - } + context.Distro = distro.NewFromNameVersion(d, v) } - hasOSPackage := false + hasOSPackageWithoutDistro := false for _, p := range pkgs { switch p.Type { case syftPkg.AlpmPkg, syftPkg.DebPkg, syftPkg.RpmPkg, syftPkg.KbPkg: - hasOSPackage = true + if p.Distro == nil { + hasOSPackageWithoutDistro = true + } } } - if context.Distro == nil && hasOSPackage { - log.Warnf("Unable to determine the OS distribution. This may result in missing vulnerabilities. " + + if context.Distro == nil && hasOSPackageWithoutDistro { + log.Warnf("Unable to determine the OS distribution of some packages. This may result in missing vulnerabilities. " + "You may specify a distro using: --distro :") } } @@ -326,7 +350,7 @@ func getProviderConfig(opts *options.Grype) pkg.ProviderConfig { } } -func validateDBLoad(loadErr error, status *v6.Status) error { +func validateDBLoad(loadErr error, status *vulnerability.ProviderStatus) error { if loadErr != nil { // notify the user about grype db delete to fix checksum errors if strings.Contains(loadErr.Error(), "checksum") { @@ -340,8 +364,8 @@ func validateDBLoad(loadErr error, status *v6.Status) error { if status == nil { return fmt.Errorf("unable to determine the status of the vulnerability db") } - if status.Err != nil { - return fmt.Errorf("db could not be loaded: %w", status.Err) + if status.Error != nil { + return fmt.Errorf("db could not be loaded: %w", status.Error) } return nil } @@ -373,7 +397,9 @@ func validateRootArgs(cmd *cobra.Command, args []string) error { } func applyVexRules(opts *options.Grype) error { - if len(opts.Ignore) == 0 && len(opts.VexDocuments) > 0 { + // If any vex documents are provided, assume the user intends to ignore vulnerabilities that those + // vex documents list as "fixed" or "not_affected". + if len(opts.VexDocuments) > 0 { opts.Ignore = append(opts.Ignore, ignoreVEXFixedNotAffected...) } diff --git a/cmd/grype/cli/commands/root_test.go b/cmd/grype/cli/commands/root_test.go index bd1d27b80dc..edfa10dcfc7 100644 --- a/cmd/grype/cli/commands/root_test.go +++ b/cmd/grype/cli/commands/root_test.go @@ -6,10 +6,13 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/anchore/clio" "github.com/anchore/grype/cmd/grype/cli/options" + "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/pkg" + "github.com/anchore/grype/grype/vex" "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/syft" "github.com/anchore/syft/syft/cataloging" @@ -28,24 +31,24 @@ func Test_applyDistroHint(t *testing.T) { applyDistroHint([]pkg.Package{}, &ctx, &cfg) assert.NotNil(t, ctx.Distro) - assert.Equal(t, "alpine", ctx.Distro.Name) + assert.Equal(t, "alpine", ctx.Distro.Name()) assert.Equal(t, "3.10", ctx.Distro.Version) // does override an existing distro - cfg.Distro = "ubuntu:latest" + cfg.Distro = "ubuntu:24.04" applyDistroHint([]pkg.Package{}, &ctx, &cfg) assert.NotNil(t, ctx.Distro) - assert.Equal(t, "ubuntu", ctx.Distro.Name) - assert.Equal(t, "latest", ctx.Distro.Version) + assert.Equal(t, "ubuntu", ctx.Distro.Name()) + assert.Equal(t, "24.04", ctx.Distro.Version) // doesn't remove an existing distro when empty cfg.Distro = "" applyDistroHint([]pkg.Package{}, &ctx, &cfg) assert.NotNil(t, ctx.Distro) - assert.Equal(t, "ubuntu", ctx.Distro.Name) - assert.Equal(t, "latest", ctx.Distro.Version) + assert.Equal(t, "ubuntu", ctx.Distro.Name()) + assert.Equal(t, "24.04", ctx.Distro.Version) } func Test_getProviderConfig(t *testing.T) { @@ -86,3 +89,118 @@ func Test_getProviderConfig(t *testing.T) { }) } } + +func Test_applyVexRules(t *testing.T) { + tests := []struct { + name string + initialIgnoreRules []match.IgnoreRule + vexDocuments []string + vexAdd []string + expectedIgnoreRules []match.IgnoreRule + expectError bool + expectedErrorSubstring string + }{ + { + name: "no VEX documents provided - no rules added", + initialIgnoreRules: []match.IgnoreRule{}, + vexDocuments: []string{}, + vexAdd: []string{}, + expectedIgnoreRules: []match.IgnoreRule{}, + expectError: false, + }, + { + name: "VEX documents provided with empty ignore rules - automatic rules added", + initialIgnoreRules: []match.IgnoreRule{}, + vexDocuments: []string{"path/to/vex.json"}, + vexAdd: []string{}, + expectedIgnoreRules: []match.IgnoreRule{ + {VexStatus: string(vex.StatusNotAffected)}, + {VexStatus: string(vex.StatusFixed)}, + }, + expectError: false, + }, + { + name: "VEX documents provided with existing ignore rules - automatic rules still added", + initialIgnoreRules: []match.IgnoreRule{ + {Vulnerability: "CVE-2023-1234"}, + }, + vexDocuments: []string{"path/to/vex.json"}, + vexAdd: []string{}, + expectedIgnoreRules: []match.IgnoreRule{ + {Vulnerability: "CVE-2023-1234"}, + {VexStatus: string(vex.StatusNotAffected)}, + {VexStatus: string(vex.StatusFixed)}, + }, + expectError: false, + }, + { + name: "vex-add with valid statuses", + initialIgnoreRules: []match.IgnoreRule{}, + vexDocuments: []string{"path/to/vex.json"}, + vexAdd: []string{"affected", "under_investigation"}, + expectedIgnoreRules: []match.IgnoreRule{ + {VexStatus: string(vex.StatusNotAffected)}, + {VexStatus: string(vex.StatusFixed)}, + {VexStatus: string(vex.StatusAffected)}, + {VexStatus: string(vex.StatusUnderInvestigation)}, + }, + expectError: false, + }, + { + name: "vex-add with invalid status", + initialIgnoreRules: []match.IgnoreRule{}, + vexDocuments: []string{"path/to/vex.json"}, + vexAdd: []string{"invalid_status"}, + expectedIgnoreRules: nil, + expectError: true, + expectedErrorSubstring: "invalid VEX status in vex-add setting: invalid_status", + }, + { + name: "vex-add attempting to use fixed status", + initialIgnoreRules: []match.IgnoreRule{}, + vexDocuments: []string{"path/to/vex.json"}, + vexAdd: []string{"fixed"}, + expectedIgnoreRules: nil, + expectError: true, + expectedErrorSubstring: "invalid VEX status in vex-add setting: fixed", + }, + { + name: "multiple VEX documents with existing rules", + initialIgnoreRules: []match.IgnoreRule{ + {Vulnerability: "CVE-2023-1234"}, + {FixState: "unknown"}, + }, + vexDocuments: []string{"vex1.json", "vex2.json"}, + vexAdd: []string{"affected"}, + expectedIgnoreRules: []match.IgnoreRule{ + {Vulnerability: "CVE-2023-1234"}, + {FixState: "unknown"}, + {VexStatus: string(vex.StatusNotAffected)}, + {VexStatus: string(vex.StatusFixed)}, + {VexStatus: string(vex.StatusAffected)}, + }, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + opts := &options.Grype{ + Ignore: append([]match.IgnoreRule{}, tt.initialIgnoreRules...), + VexDocuments: tt.vexDocuments, + VexAdd: tt.vexAdd, + } + + err := applyVexRules(opts) + + if tt.expectError { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.expectedErrorSubstring) + return + } + + require.NoError(t, err) + assert.Equal(t, tt.expectedIgnoreRules, opts.Ignore) + }) + } +} diff --git a/cmd/grype/cli/commands/util.go b/cmd/grype/cli/commands/util.go index 0a60dec73aa..2a2cdb1a3f5 100644 --- a/cmd/grype/cli/commands/util.go +++ b/cmd/grype/cli/commands/util.go @@ -1,7 +1,6 @@ package commands import ( - "bytes" "fmt" "io" "os" @@ -10,6 +9,8 @@ import ( "github.com/hashicorp/go-multierror" "github.com/olekukonko/tablewriter" + "github.com/olekukonko/tablewriter/renderer" + "github.com/olekukonko/tablewriter/tw" "github.com/spf13/cobra" "golang.org/x/exp/maps" @@ -87,65 +88,36 @@ func appendErrors(errs error, err ...error) error { return multierror.Append(errs, err...) } -func newTable(output io.Writer) *tablewriter.Table { - // we use a trimming writer to ensure that the table is not padded with spaces when there is a single long row - // and several short rows. AFAICT there is no table setting to control this behavior. Why do it as a writer? So - // we don't need to buffer the entire table in memory before writing it out. - table := tablewriter.NewWriter(newTrimmingWriter(output)) - table.SetAutoWrapText(false) - table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) - table.SetAlignment(tablewriter.ALIGN_LEFT) - - table.SetHeaderLine(false) - table.SetBorder(false) - table.SetAutoFormatHeaders(true) - table.SetCenterSeparator("") - table.SetColumnSeparator("") - table.SetRowSeparator("") - table.SetTablePadding(" ") - table.SetNoWhiteSpace(true) - return table -} - -// trimmingWriter is a writer that trims whitespace from the end of each line. It is assumed that whole lines are -// passed to Write() calls (no partial lines). -type trimmingWriter struct { - output io.Writer - buffer bytes.Buffer -} - -func newTrimmingWriter(w io.Writer) *trimmingWriter { - return &trimmingWriter{output: w} -} - -func (tw *trimmingWriter) Write(p []byte) (int, error) { - for _, b := range p { - switch b { - case '\n': - // write a newline and discard any buffered spaces - _, err := tw.output.Write([]byte{'\n'}) - if err != nil { - return 0, err - } - tw.buffer.Reset() - case ' ', '\t': - // buffer spaces and tabs - tw.buffer.WriteByte(b) - default: - // write any buffered spaces, then the non-whitespace character - if tw.buffer.Len() > 0 { - _, err := tw.output.Write(tw.buffer.Bytes()) - if err != nil { - return 0, err - } - tw.buffer.Reset() - } - _, err := tw.output.Write([]byte{b}) - if err != nil { - return 0, err - } - } - } - - return len(p), nil +func newTable(output io.Writer, columns []string) *tablewriter.Table { + return tablewriter.NewTable(output, + tablewriter.WithHeader(columns), + tablewriter.WithHeaderAutoWrap(tw.WrapNone), + tablewriter.WithRowAutoWrap(tw.WrapNone), + tablewriter.WithAutoHide(tw.On), + tablewriter.WithRenderer(renderer.NewBlueprint()), + tablewriter.WithBehavior( + tw.Behavior{ + TrimSpace: tw.On, + AutoHide: tw.On, + }, + ), + tablewriter.WithPadding( + tw.Padding{ + Right: " ", + }, + ), + tablewriter.WithRendition( + tw.Rendition{ + Symbols: tw.NewSymbols(tw.StyleNone), + Settings: tw.Settings{ + Lines: tw.Lines{ + ShowTop: tw.Off, + ShowBottom: tw.Off, + ShowHeaderLine: tw.Off, + ShowFooterLine: tw.Off, + }, + }, + }, + ), + ) } diff --git a/cmd/grype/cli/commands/util_test.go b/cmd/grype/cli/commands/util_test.go index 8f868708701..fb04777c0db 100644 --- a/cmd/grype/cli/commands/util_test.go +++ b/cmd/grype/cli/commands/util_test.go @@ -1,14 +1,12 @@ package commands import ( - "bytes" "fmt" "sync" "sync/atomic" "testing" "github.com/hashicorp/go-multierror" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -165,59 +163,3 @@ func Test_parallelMapped(t *testing.T) { }) } } - -func TestTrimmingWriter(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "removes trailing spaces", - input: "line with trailing spaces \nline with no trailing spaces\n", - expected: "line with trailing spaces\nline with no trailing spaces\n", - }, - { - name: "handles multiple spaces and tabs", - input: "line with tabs\t\t\t\nline with spaces \t \t\t\n", - expected: "line with tabs\nline with spaces\n", - }, - { - name: "handles embedded whitespace", - input: "line one with spaces and tabs\t\t\nnext line\t\n", - expected: "line one with spaces and tabs\nnext line\n", - }, - { - name: "handles empty input", - input: "", - expected: "", - }, - { - name: "handles only spaces and tabs", - input: " \t\t\n \t \t\n", - expected: "\n\n", - }, - { - name: "handles single character input", - input: "a", - expected: "a", - }, - { - name: "handles input ending without newline", - input: "line without newline ", - expected: "line without newline", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var output bytes.Buffer - writer := newTrimmingWriter(&output) - - n, err := writer.Write([]byte(tt.input)) - assert.NoError(t, err) - assert.Equal(t, len(tt.input), n) - assert.Equal(t, tt.expected, output.String()) - }) - } -} diff --git a/cmd/grype/cli/options/database.go b/cmd/grype/cli/options/database.go index 73044576866..ccafe815920 100644 --- a/cmd/grype/cli/options/database.go +++ b/cmd/grype/cli/options/database.go @@ -4,6 +4,7 @@ import ( "time" "github.com/anchore/clio" + "github.com/anchore/go-homedir" "github.com/anchore/grype/grype/db/v6/distribution" "github.com/anchore/grype/grype/db/v6/installation" ) @@ -25,6 +26,7 @@ type Database struct { var _ interface { clio.FieldDescriber + clio.PostLoader } = (*Database)(nil) func DefaultDatabase(id clio.Identification) Database { @@ -64,3 +66,9 @@ This file is ~156KiB as of 2024-04-17 so the download should be quick; adjust as The DB is ~156MB as of 2024-04-17 so slower connections may exceed the default timeout; adjust as needed`) descriptions.Add(&cfg.MaxUpdateCheckFrequency, `Maximum frequency to check for vulnerability database updates`) } + +func (cfg *Database) PostLoad() error { + var err error + cfg.Dir, err = homedir.Expand(cfg.Dir) + return err +} diff --git a/cmd/grype/cli/options/database_search_os.go b/cmd/grype/cli/options/database_search_os.go index 4f2f6fd2c1d..00c073dd881 100644 --- a/cmd/grype/cli/options/database_search_os.go +++ b/cmd/grype/cli/options/database_search_os.go @@ -32,9 +32,6 @@ func (o *DBSearchOSs) PostLoad() error { if err != nil { return err } - if spec != nil { - spec.AllowMultiple = true - } specs = append(specs, spec) } o.Specs = specs diff --git a/cmd/grype/cli/options/database_search_os_test.go b/cmd/grype/cli/options/database_search_os_test.go index f4ae9af4b78..26b485fc1dc 100644 --- a/cmd/grype/cli/options/database_search_os_test.go +++ b/cmd/grype/cli/options/database_search_os_test.go @@ -27,7 +27,7 @@ func TestDBSearchOSsPostLoad(t *testing.T) { OSs: []string{"ubuntu"}, }, expectedSpecs: []*v6.OSSpecifier{ - {Name: "ubuntu", AllowMultiple: true}, + {Name: "ubuntu"}, }, }, { @@ -36,7 +36,7 @@ func TestDBSearchOSsPostLoad(t *testing.T) { OSs: []string{"ubuntu@20"}, }, expectedSpecs: []*v6.OSSpecifier{ - {Name: "ubuntu", MajorVersion: "20", AllowMultiple: true}, + {Name: "ubuntu", MajorVersion: "20"}, }, }, { @@ -45,7 +45,7 @@ func TestDBSearchOSsPostLoad(t *testing.T) { OSs: []string{"ubuntu@20.04"}, }, expectedSpecs: []*v6.OSSpecifier{ - {Name: "ubuntu", MajorVersion: "20", MinorVersion: "04", AllowMultiple: true}, + {Name: "ubuntu", MajorVersion: "20", MinorVersion: "04"}, }, }, { @@ -54,7 +54,7 @@ func TestDBSearchOSsPostLoad(t *testing.T) { OSs: []string{"ubuntu@focal"}, }, expectedSpecs: []*v6.OSSpecifier{ - {Name: "ubuntu", LabelVersion: "focal", AllowMultiple: true}, + {Name: "ubuntu", LabelVersion: "focal"}, }, }, { @@ -70,7 +70,7 @@ func TestDBSearchOSsPostLoad(t *testing.T) { OSs: []string{"ubuntu:20"}, }, expectedSpecs: []*v6.OSSpecifier{ - {Name: "ubuntu", MajorVersion: "20", AllowMultiple: true}, + {Name: "ubuntu", MajorVersion: "20"}, }, }, { diff --git a/cmd/grype/cli/options/grype.go b/cmd/grype/cli/options/grype.go index 4bd5b3f6e7d..0dab835425b 100644 --- a/cmd/grype/cli/options/grype.go +++ b/cmd/grype/cli/options/grype.go @@ -31,6 +31,7 @@ type Grype struct { Registry registry `yaml:"registry" json:"registry" mapstructure:"registry"` ShowSuppressed bool `yaml:"show-suppressed" json:"show-suppressed" mapstructure:"show-suppressed"` ByCVE bool `yaml:"by-cve" json:"by-cve" mapstructure:"by-cve"` // --by-cve, indicates if the original match vulnerability IDs should be preserved or the CVE should be used instead + SortBy SortBy `yaml:",inline" json:",inline" mapstructure:",squash"` Name string `yaml:"name" json:"name" mapstructure:"name"` DefaultImagePullSource string `yaml:"default-image-pull-source" json:"default-image-pull-source" mapstructure:"default-image-pull-source"` VexDocuments []string `yaml:"vex-documents" json:"vex-documents" mapstructure:"vex-documents"` @@ -64,6 +65,7 @@ func DefaultGrype(id clio.Identification) *Grype { CheckForAppUpdate: true, VexAdd: []string{}, MatchUpstreamKernelHeaders: false, + SortBy: defaultSortBy(), } } diff --git a/cmd/grype/cli/options/sort_by.go b/cmd/grype/cli/options/sort_by.go new file mode 100644 index 00000000000..8a7a6121880 --- /dev/null +++ b/cmd/grype/cli/options/sort_by.go @@ -0,0 +1,47 @@ +package options + +import ( + "fmt" + "strings" + + "github.com/scylladb/go-set/strset" + + "github.com/anchore/clio" + "github.com/anchore/fangs" + "github.com/anchore/grype/grype/presenter/models" +) + +var _ interface { + fangs.FlagAdder + fangs.PostLoader +} = (*SortBy)(nil) + +type SortBy struct { + Criteria string `yaml:"sort-by" json:"sort-by" mapstructure:"sort-by"` + AllowableOptions []string `yaml:"-" json:"-" mapstructure:"-"` +} + +func defaultSortBy() SortBy { + var strategies []string + for _, s := range models.SortStrategies() { + strategies = append(strategies, strings.ToLower(s.String())) + } + return SortBy{ + Criteria: models.DefaultSortStrategy.String(), + AllowableOptions: strategies, + } +} + +func (o *SortBy) AddFlags(flags clio.FlagSet) { + flags.StringVarP(&o.Criteria, + "sort-by", "", + fmt.Sprintf("sort the match results with the given strategy, options=%v", o.AllowableOptions), + ) +} + +func (o *SortBy) PostLoad() error { + if !strset.New(o.AllowableOptions...).Has(strings.ToLower(o.Criteria)) { + return fmt.Errorf("invalid sort-by criteria: %q (allowable: %s)", o.Criteria, strings.Join(o.AllowableOptions, ", ")) + } + return nil +} diff --git a/go.mod b/go.mod index d39f72ba4f9..b5166e255eb 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/anchore/grype -go 1.24.0 +go 1.24.1 require ( github.com/CycloneDX/cyclonedx-go v0.9.2 @@ -10,29 +10,34 @@ require ( github.com/adrg/xdg v0.5.3 github.com/anchore/archiver/v3 v3.5.3-0.20241210171143-5b1d8d1c7c51 github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9 - github.com/anchore/clio v0.0.0-20241115144204-29e89f9fa837 + github.com/anchore/clio v0.0.0-20250408180537-ec8fa27f0d9f + github.com/anchore/fangs v0.0.0-20250402135612-96e29e45f3fe github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537 - github.com/anchore/go-logger v0.0.0-20230725134548-c21dafa1ec5a + github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d + github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722 github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04 github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4 github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 - github.com/anchore/stereoscope v0.0.13 - github.com/anchore/syft v1.20.0 + github.com/anchore/stereoscope v0.1.6 + github.com/anchore/syft v1.28.0 github.com/aquasecurity/go-pep440-version v0.0.1 github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de + github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef github.com/bmatcuk/doublestar/v2 v2.0.4 - github.com/charmbracelet/bubbletea v1.3.4 - github.com/charmbracelet/lipgloss v1.0.0 + github.com/charmbracelet/bubbletea v1.3.5 + github.com/charmbracelet/lipgloss v1.1.0 github.com/dave/jennifer v1.7.1 - github.com/docker/docker v28.0.1+incompatible + github.com/docker/docker v28.3.0+incompatible github.com/dustin/go-humanize v1.0.1 github.com/facebookincubator/nvdtools v0.1.5 - github.com/gabriel-vasile/mimetype v1.4.8 - github.com/gkampitakis/go-snaps v0.5.11 + github.com/gabriel-vasile/mimetype v1.4.9 + github.com/gkampitakis/go-snaps v0.5.13 github.com/glebarez/sqlite v1.11.0 github.com/go-test/deep v1.1.1 + github.com/go-viper/mapstructure/v2 v2.3.0 + github.com/gohugoio/hashstructure v0.5.0 github.com/google/go-cmp v0.7.0 - github.com/google/go-containerregistry v0.20.3 + github.com/google/go-containerregistry v0.20.6 github.com/google/uuid v1.6.0 github.com/gookit/color v1.5.4 github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b @@ -45,17 +50,15 @@ require ( github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 - github.com/mitchellh/go-homedir v1.1.0 - github.com/mitchellh/hashstructure/v2 v2.0.2 - github.com/mitchellh/mapstructure v1.5.0 - github.com/olekukonko/tablewriter v0.0.5 + github.com/muesli/termenv v0.16.0 + github.com/olekukonko/tablewriter v1.0.7 github.com/openvex/go-vex v0.2.5 github.com/owenrumney/go-sarif v1.1.2-0.20231003122901-1000f5e05554 github.com/pandatix/go-cvss v0.6.2 // pinned to pull in 386 arch fix: https://github.com/scylladb/go-set/commit/cc7b2070d91ebf40d233207b633e28f5bd8f03a5 github.com/scylladb/go-set v1.0.3-0.20200225121959-cc7b2070d91e - github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 - github.com/spf13/afero v1.12.0 + github.com/sergi/go-diff v1.4.0 + github.com/spf13/afero v1.14.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 github.com/ulikunitz/xz v0.5.12 @@ -63,45 +66,44 @@ require ( github.com/wagoodman/go-presenter v0.0.0-20211015174752-f9c01afc824b github.com/wagoodman/go-progress v0.0.0-20230925121702-07e42b3cdba0 github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 - golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 - golang.org/x/time v0.11.0 - golang.org/x/tools v0.31.0 + golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 + golang.org/x/time v0.12.0 + golang.org/x/tools v0.34.0 gopkg.in/yaml.v3 v3.0.1 - gorm.io/gorm v1.25.12 + gorm.io/gorm v1.30.0 ) -require github.com/DataDog/zstd v1.5.5 // indirect - -require github.com/muesli/termenv v0.16.0 - require ( cel.dev/expr v0.16.1 // indirect cloud.google.com/go v0.116.0 // indirect cloud.google.com/go/auth v0.13.0 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.6 // indirect - cloud.google.com/go/compute/metadata v0.6.0 // indirect + cloud.google.com/go/compute/metadata v0.7.0 // indirect cloud.google.com/go/iam v1.2.2 // indirect cloud.google.com/go/monitoring v1.21.2 // indirect cloud.google.com/go/storage v1.49.0 // indirect dario.cat/mergo v1.0.1 // indirect github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0 // indirect - github.com/BurntSushi/toml v1.4.0 // indirect + github.com/BurntSushi/toml v1.5.0 // indirect + github.com/DataDog/zstd v1.5.5 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver v1.5.0 // indirect - github.com/Masterminds/semver/v3 v3.3.0 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/Microsoft/hcsshim v0.11.7 // indirect - github.com/ProtonMail/go-crypto v1.1.5 // indirect + github.com/ProtonMail/go-crypto v1.2.0 // indirect + github.com/STARRY-S/zip v0.2.1 // indirect github.com/acobaugh/osrelease v0.1.0 // indirect github.com/agext/levenshtein v1.2.1 // indirect - github.com/anchore/fangs v0.0.0-20241014225144-4e1713cafd77 // indirect + github.com/anchore/go-lzo v0.1.0 // indirect github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb // indirect + github.com/anchore/go-rpmdb v0.0.0-20250516171929-f77691e1faec // indirect github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect - github.com/andybalholm/brotli v1.1.1 // indirect + github.com/anchore/go-sync v0.0.0-20250326131806-4eda43a485b6 // indirect + github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/aquasecurity/go-version v0.0.1 // indirect github.com/aws/aws-sdk-go v1.44.288 // indirect @@ -109,70 +111,77 @@ require ( github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/becheran/wildmatch-go v1.0.0 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect - github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef // indirect + github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb // indirect github.com/bmatcuk/doublestar/v4 v4.8.1 // indirect + github.com/bodgit/plumbing v1.3.0 // indirect + github.com/bodgit/sevenzip v1.6.0 // indirect + github.com/bodgit/windows v1.0.1 // indirect github.com/buger/jsonparser v1.1.1 // indirect github.com/census-instrumentation/opencensus-proto v0.4.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/charmbracelet/bubbles v0.20.0 // indirect + github.com/charmbracelet/bubbles v0.21.0 // indirect + github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect github.com/charmbracelet/harmonica v0.2.0 // indirect github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect github.com/charmbracelet/x/term v0.2.1 // indirect - github.com/cloudflare/circl v1.3.8 // indirect + github.com/cloudflare/circl v1.6.1 // indirect github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78 // indirect github.com/containerd/cgroups v1.1.0 // indirect - github.com/containerd/containerd v1.7.24 // indirect - github.com/containerd/containerd/api v1.7.19 // indirect - github.com/containerd/continuity v0.4.2 // indirect - github.com/containerd/errdefs v0.3.0 // indirect + github.com/containerd/containerd v1.7.27 // indirect + github.com/containerd/containerd/api v1.8.0 // indirect + github.com/containerd/continuity v0.4.4 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect github.com/containerd/fifo v1.1.0 // indirect github.com/containerd/log v0.1.0 // indirect github.com/containerd/platforms v0.2.1 // indirect github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect - github.com/containerd/ttrpc v1.2.5 // indirect - github.com/containerd/typeurl/v2 v2.1.1 // indirect - github.com/cyphar/filepath-securejoin v0.3.6 // indirect + github.com/containerd/ttrpc v1.2.7 // indirect + github.com/containerd/typeurl/v2 v2.2.0 // indirect + github.com/cyphar/filepath-securejoin v0.4.1 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da // indirect + github.com/diskfs/go-diskfs v1.6.1-0.20250601133945-2af1c7ece24c // indirect github.com/distribution/reference v0.6.0 // indirect - github.com/docker/cli v27.5.0+incompatible // indirect + github.com/docker/cli v28.3.0+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker-credential-helpers v0.8.2 // indirect + github.com/docker/docker-credential-helpers v0.9.3 // indirect github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c // indirect github.com/docker/go-units v0.5.0 // indirect - github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect - github.com/edsrzf/mmap-go v1.1.0 // indirect + github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect github.com/elliotchance/phpserialize v1.4.0 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/envoyproxy/go-control-plane v0.13.1 // indirect github.com/envoyproxy/protoc-gen-validate v1.1.0 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/fatih/color v1.17.0 // indirect github.com/felixge/fgprof v0.9.5 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/fsnotify/fsnotify v1.7.0 // indirect - github.com/github/go-spdx/v2 v2.3.2 // indirect - github.com/gkampitakis/ciinfo v0.3.1 // indirect + github.com/fsnotify/fsnotify v1.8.0 // indirect + github.com/github/go-spdx/v2 v2.3.3 // indirect + github.com/gkampitakis/ciinfo v0.3.2 // indirect github.com/gkampitakis/go-diff v1.3.2 // indirect - github.com/glebarez/go-sqlite v1.21.2 // indirect + github.com/glebarez/go-sqlite v1.22.0 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect - github.com/go-git/go-git/v5 v5.13.2 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/go-git/go-git/v5 v5.16.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-restruct/restruct v1.2.0-alpha // indirect - github.com/goccy/go-yaml v1.15.13 // indirect + github.com/goccy/go-yaml v1.18.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/licensecheck v0.3.1 // indirect - github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd // indirect + github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e // indirect github.com/google/s2a-go v0.1.8 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl/v2 v2.23.0 // indirect github.com/huandu/xstrings v1.5.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -183,30 +192,31 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/klauspost/compress v1.17.11 // indirect + github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/pgzip v1.2.6 // indirect - github.com/knqyf263/go-rpmdb v0.1.1 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/logrusorgru/aurora v2.0.3+incompatible // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect - github.com/magiconair/properties v1.8.9 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/maruel/natural v1.1.1 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect - github.com/microsoft/go-rustaudit v0.0.0-20220730194248-4b17361d90a5 // indirect + github.com/mholt/archives v0.1.3 // indirect + github.com/mikelolasagasti/xz v1.0.1 // indirect + github.com/minio/minlz v1.0.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect - github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/locker v1.0.1 // indirect github.com/moby/sys/mountinfo v0.7.2 // indirect - github.com/moby/sys/sequential v0.5.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect github.com/moby/sys/signal v0.7.0 // indirect github.com/moby/sys/user v0.3.0 // indirect github.com/moby/sys/userns v0.1.0 // indirect @@ -214,49 +224,54 @@ require ( github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect github.com/muesli/cancelreader v0.2.2 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect + github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1 // indirect github.com/nwaples/rardecode v1.1.3 // indirect + github.com/nwaples/rardecode/v2 v2.1.0 // indirect + github.com/olekukonko/errors v0.0.0-20250405072817-4e6d85265da6 // indirect + github.com/olekukonko/ll v0.0.8 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect github.com/opencontainers/runtime-spec v1.1.0 // indirect github.com/opencontainers/selinux v1.11.0 // indirect github.com/package-url/packageurl-go v0.1.1 // indirect github.com/pborman/indent v1.2.1 // indirect github.com/pelletier/go-toml v1.9.5 // indirect - github.com/pelletier/go-toml/v2 v2.2.2 // indirect - github.com/pierrec/lz4/v4 v4.1.21 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect + github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pkg/profile v1.7.0 // indirect + github.com/pkg/xattr v0.4.9 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/rogpeppe/go-internal v1.13.1 // indirect - github.com/saferwall/pe v1.5.6 // indirect - github.com/sagikazarmark/locafero v0.4.0 // indirect - github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/rust-secure-code/go-rustaudit v0.0.0-20250226111315-e20ec32e963c // indirect + github.com/sagikazarmark/locafero v0.9.0 // indirect github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect github.com/sassoftware/go-rpmutils v0.4.0 // indirect - github.com/secDre4mer/pkcs7 v0.0.0-20240322103146-665324a4461d // indirect github.com/shopspring/decimal v1.4.0 // indirect - github.com/sirupsen/logrus v1.9.3 // indirect - github.com/skeema/knownhosts v1.3.0 // indirect + github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af // indirect + github.com/skeema/knownhosts v1.3.1 // indirect + github.com/sorairolake/lzip-go v0.3.5 // indirect github.com/sourcegraph/conc v0.3.0 // indirect + github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb // indirect github.com/spdx/tools-golang v0.5.5 // indirect - github.com/spf13/cast v1.7.0 // indirect + github.com/spf13/cast v1.7.1 // indirect github.com/spf13/pflag v1.0.6 // indirect - github.com/spf13/viper v1.19.0 // indirect + github.com/spf13/viper v1.20.1 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect - github.com/sylabs/sif/v2 v2.20.2 // indirect - github.com/sylabs/squashfs v1.0.4 // indirect + github.com/sylabs/sif/v2 v2.21.1 // indirect + github.com/sylabs/squashfs v1.0.6 // indirect github.com/therootcompany/xz v1.0.1 // indirect github.com/tidwall/gjson v1.18.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/sjson v1.2.5 // indirect github.com/vbatts/go-mtree v0.5.4 // indirect - github.com/vbatts/tar-split v0.11.6 // indirect + github.com/vbatts/tar-split v0.12.1 // indirect github.com/vifraa/gopom v1.0.0 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect @@ -267,35 +282,34 @@ require ( go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.29.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 // indirect - go.opentelemetry.io/otel v1.33.0 // indirect - go.opentelemetry.io/otel/metric v1.33.0 // indirect - go.opentelemetry.io/otel/sdk v1.33.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.29.0 // indirect - go.opentelemetry.io/otel/trace v1.33.0 // indirect - go.uber.org/atomic v1.9.0 // indirect - go.uber.org/multierr v1.9.0 // indirect - golang.org/x/crypto v0.36.0 // indirect - golang.org/x/mod v0.24.0 // indirect - golang.org/x/net v0.37.0 // indirect - golang.org/x/oauth2 v0.25.0 // indirect - golang.org/x/sync v0.12.0 // indirect - golang.org/x/sys v0.31.0 // indirect - golang.org/x/term v0.30.0 // indirect - golang.org/x/text v0.23.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.36.0 // indirect + go.opentelemetry.io/otel/metric v1.36.0 // indirect + go.opentelemetry.io/otel/sdk v1.36.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect + go.opentelemetry.io/otel/trace v1.36.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + go4.org v0.0.0-20230225012048-214862532bf5 // indirect + golang.org/x/crypto v0.39.0 // indirect + golang.org/x/mod v0.25.0 // indirect + golang.org/x/net v0.41.0 // indirect + golang.org/x/oauth2 v0.30.0 // indirect + golang.org/x/sync v0.15.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/term v0.32.0 // indirect + golang.org/x/text v0.26.0 // indirect golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect google.golang.org/api v0.215.0 // indirect google.golang.org/genproto v0.0.0-20241118233622-e639e219e697 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 // indirect google.golang.org/grpc v1.67.3 // indirect - google.golang.org/protobuf v1.36.3 // indirect - gopkg.in/ini.v1 v1.67.0 // indirect + google.golang.org/protobuf v1.36.4 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - modernc.org/libc v1.61.13 // indirect + modernc.org/libc v1.65.10 // indirect modernc.org/mathutil v1.7.1 // indirect - modernc.org/memory v1.8.2 // indirect - modernc.org/sqlite v1.35.0 // indirect + modernc.org/memory v1.11.0 // indirect + modernc.org/sqlite v1.38.0 // indirect ) // this is a breaking change, so we need to pin the version until glebarez/go-sqlite is updated to use internal/libc diff --git a/go.sum b/go.sum index d69a3797c22..d3b56ed48c9 100644 --- a/go.sum +++ b/go.sum @@ -185,8 +185,8 @@ cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZ cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= -cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= +cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= @@ -629,8 +629,8 @@ github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25 github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= -github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/CycloneDX/cyclonedx-go v0.9.2 h1:688QHn2X/5nRezKe2ueIVCt+NRqf7fl3AVQk+vaFcIo= github.com/CycloneDX/cyclonedx-go v0.9.2/go.mod h1:vcK6pKgO1WanCdd61qx4bFnSsDJQ6SbM2ZuMIgq86Jg= @@ -648,10 +648,8 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapp github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0= -github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs= github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= @@ -662,8 +660,10 @@ github.com/Microsoft/hcsshim v0.11.7/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8= github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= -github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4= -github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.2.0 h1:+PhXXn4SPGd+qk76TlEePBfOfivE0zkWFenhGhFLzWs= +github.com/ProtonMail/go-crypto v1.2.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= +github.com/STARRY-S/zip v0.2.1 h1:pWBd4tuSGm3wtpoqRZZ2EAwOmcHK6XFf7bU9qcJXyFg= +github.com/STARRY-S/zip v0.2.1/go.mod h1:xNvshLODWtC4EJ702g7cTYn13G53o1+X9BWnPFpcWV4= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/acobaugh/osrelease v0.1.0 h1:Yb59HQDGGNhCj4suHaFQQfBps5wyoKLSSX/J/+UifRE= @@ -684,32 +684,40 @@ github.com/anchore/archiver/v3 v3.5.3-0.20241210171143-5b1d8d1c7c51 h1:yhk+P8lF3 github.com/anchore/archiver/v3 v3.5.3-0.20241210171143-5b1d8d1c7c51/go.mod h1:nwuGSd7aZp0rtYt79YggCGafz1RYsclE7pi3fhLwvuw= github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9 h1:p0ZIe0htYOX284Y4axJaGBvXHU0VCCzLN5Wf5XbKStU= github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9/go.mod h1:3ZsFB9tzW3vl4gEiUeuSOMDnwroWxIxJelOOHUp8dSw= -github.com/anchore/clio v0.0.0-20241115144204-29e89f9fa837 h1:bIG3WsfosZsJ5LMC7PB9J/ekFM3a0j0ZEDvN3ID6GTI= -github.com/anchore/clio v0.0.0-20241115144204-29e89f9fa837/go.mod h1:tRQVKkjYeejrh9AdM0s1esbwtMU7rdHAHSQWkv4qskE= -github.com/anchore/fangs v0.0.0-20241014225144-4e1713cafd77 h1:h7+GCqazHVS5GDJYYS6wjjglYi8xFnVWMdSUukoImTM= -github.com/anchore/fangs v0.0.0-20241014225144-4e1713cafd77/go.mod h1:qbev5czQeyDO74fPNThiEKYkgt0mx1axb+5wQcxDPFY= +github.com/anchore/clio v0.0.0-20250408180537-ec8fa27f0d9f h1:jTeN+fKTXz1VFo3Zj7Msnx//s5kD6Htd+SS0z9/o7Ss= +github.com/anchore/clio v0.0.0-20250408180537-ec8fa27f0d9f/go.mod h1:jQ+jv7v9RQnc5oA+Z0rAyXsQfaCAZHwY/CJZiLVggQ4= +github.com/anchore/fangs v0.0.0-20250402135612-96e29e45f3fe h1:qv/xxpjF5RdKPqZjx8RM0aBi3HUCAO0DhRBMs2xhY1I= +github.com/anchore/fangs v0.0.0-20250402135612-96e29e45f3fe/go.mod h1:vrcYMDps9YXwwx2a9AsvipM6Fi5H9//9bymGb8G8BIQ= github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537 h1:GjNGuwK5jWjJMyVppBjYS54eOiiSNv4Ba869k4wh72Q= github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537/go.mod h1:1aiktV46ATCkuVg0O573ZrH56BUawTECPETbZyBcqT8= -github.com/anchore/go-logger v0.0.0-20230725134548-c21dafa1ec5a h1:nJ2G8zWKASyVClGVgG7sfM5mwoZlZ2zYpIzN2OhjWkw= -github.com/anchore/go-logger v0.0.0-20230725134548-c21dafa1ec5a/go.mod h1:ubLFmlsv8/DFUQrZwY5syT5/8Er3ugSr4rDFwHsE3hg= +github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d h1:gT69osH9AsdpOfqxbRwtxcNnSZ1zg4aKy2BevO3ZBdc= +github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d/go.mod h1:PhSnuFYknwPZkOWKB1jXBNToChBA+l0FjwOxtViIc50= +github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722 h1:2SqmFgE7h+Ql4VyBzhjLkRF/3gDrcpUBj8LjvvO6OOM= +github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722/go.mod h1:oFuE8YuTCM+spgMXhePGzk3asS94yO9biUfDzVTFqNw= +github.com/anchore/go-lzo v0.1.0 h1:NgAacnzqPeGH49Ky19QKLBZEuFRqtTG9cdaucc3Vncs= +github.com/anchore/go-lzo v0.1.0/go.mod h1:3kLx0bve2oN1iDwgM1U5zGku1Tfbdb0No5qp1eL1fIk= github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb h1:iDMnx6LIjtjZ46C0akqveX83WFzhpTD3eqOthawb5vU= github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb/go.mod h1:DmTY2Mfcv38hsHbG78xMiTDdxFtkHpgYNVDPsF2TgHk= +github.com/anchore/go-rpmdb v0.0.0-20250516171929-f77691e1faec h1:SjjPMOXTzpuU1ZME4XeoHyek+dry3/C7I8gzaCo02eg= +github.com/anchore/go-rpmdb v0.0.0-20250516171929-f77691e1faec/go.mod h1:eQVa6QFGzKy0qMcnW2pez0XBczvgwSjw9vA23qifEyU= github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 h1:aM1rlcoLz8y5B2r4tTLMiVTrMtpfY0O8EScKJxaSaEc= github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092/go.mod h1:rYqSE9HbjzpHTI74vwPvae4ZVYZd1lue2ta6xHPdblA= +github.com/anchore/go-sync v0.0.0-20250326131806-4eda43a485b6 h1:Ha+LSCVuXYSYGi7wIkJK6G8g6jI3LH7y6LbyEVyp4Io= +github.com/anchore/go-sync v0.0.0-20250326131806-4eda43a485b6/go.mod h1:+9oM3XUy8iea/vWj9FhZ9bQGUBN8JpPxxJm5Wbcx9XM= github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04 h1:VzprUTpc0vW0nnNKJfJieyH/TZ9UYAnTZs5/gHTdAe8= github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04/go.mod h1:6dK64g27Qi1qGQZ67gFmBFvEHScy0/C8qhQhNe5B5pQ= github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4 h1:rmZG77uXgE+o2gozGEBoUMpX27lsku+xrMwlmBZJtbg= github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E= github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY= github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI= -github.com/anchore/stereoscope v0.0.13 h1:9Ivkh7k+vOeG3JHrt44jOg/8UdZrCvMsSjLQ7trHBig= -github.com/anchore/stereoscope v0.0.13/go.mod h1:QfhhFc2pezp5aX/dVJ5qnBFpBUv5+KUTphwaQLxMUig= -github.com/anchore/syft v1.20.0 h1:4nVM/eiqrb2GJCkW+d1xv8M5mxply8vVblpWOvVCgN8= -github.com/anchore/syft v1.20.0/go.mod h1:h8U0q+Fk7f1d9ay4oa+gDb//AJYFuQftrBLOuS6llz4= +github.com/anchore/stereoscope v0.1.6 h1:DxaPHugD9EndPxOaIMaEYjHJJURjKNaHzD1NyQUUmdU= +github.com/anchore/stereoscope v0.1.6/go.mod h1:ejAlYkAb/cRvSMlxQlrG2dMruqQpcJAh4w2Fu02FEYQ= +github.com/anchore/syft v1.28.0 h1:uLdCvWNb2btvCyfIawWOsXD238v6eDTaz5RTfS2lMqA= +github.com/anchore/syft v1.28.0/go.mod h1:jGpfAy5lRvOUrOxWAfbbu9t3TK8VwJpAAJHz6HFQofw= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= -github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 h1:8PmGpDEZl9yDpcdEr6Odf23feCxK3LNUNMxjXg41pZQ= +github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= @@ -750,10 +758,18 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef h1:TSFnfbbu2oAOuWbeDDTtwXWE6z+PmpgbSsMBeV7l0ww= github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef/go.mod h1:9iglf1GG4oNRJ39bZ5AZrjgAFD2RwQbXw6Qf7Cs47wo= +github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4= +github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI= github.com/bmatcuk/doublestar/v2 v2.0.4 h1:6I6oUiT/sU27eE2OFcWqBhL1SwjyvQuOssxT4a1yidI= github.com/bmatcuk/doublestar/v2 v2.0.4/go.mod h1:QMmcs3H2AUQICWhfzLXz+IYln8lRQmTZRptLie8RgRw= github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38= github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= +github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU= +github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs= +github.com/bodgit/sevenzip v1.6.0 h1:a4R0Wu6/P1o1pP/3VV++aEOcyeBxeO/xE2Y9NSTrr6A= +github.com/bodgit/sevenzip v1.6.0/go.mod h1:zOBh9nJUof7tcrlqJFv1koWRrhz3LbDbUNngkuZxLMc= +github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4= +github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M= @@ -772,16 +788,20 @@ github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE= -github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU= -github.com/charmbracelet/bubbletea v1.3.4 h1:kCg7B+jSCFPLYRA52SDZjr51kG/fMUEoPoZrkaDHyoI= -github.com/charmbracelet/bubbletea v1.3.4/go.mod h1:dtcUCyCGEX3g9tosuYiut3MXgY/Jsv9nKVdibKKRRXo= +github.com/charmbracelet/bubbles v0.21.0 h1:9TdC97SdRVg/1aaXNVWfFH3nnLAwOXr8Fn6u6mfQdFs= +github.com/charmbracelet/bubbles v0.21.0/go.mod h1:HF+v6QUR4HkEpz62dx7ym2xc71/KBHg+zKwJtMw+qtg= +github.com/charmbracelet/bubbletea v1.3.5 h1:JAMNLTbqMOhSwoELIr0qyP4VidFq72/6E9j7HHmRKQc= +github.com/charmbracelet/bubbletea v1.3.5/go.mod h1:TkCnmH+aBd4LrXhXcqrKiYwRs7qyQx5rBgH5fVY3v54= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ= github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao= -github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg= -github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= @@ -797,8 +817,8 @@ github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38 github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.3.8 h1:j+V8jJt09PoeMFIu2uh5JUyEaIHTXVOHslFoLNAKqwI= -github.com/cloudflare/circl v1.3.8/go.mod h1:PDRU+oXvdD7KCtgKxW95M5Z8BpSCJXQORiZFnBQS5QU= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= @@ -817,14 +837,16 @@ github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78 h1:QVw89YDxXxEe+l8gU8E github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= -github.com/containerd/containerd v1.7.24 h1:zxszGrGjrra1yYJW/6rhm9cJ1ZQ8rkKBR48brqsa7nA= -github.com/containerd/containerd v1.7.24/go.mod h1:7QUzfURqZWCZV7RLNEn1XjUCQLEf0bkaK4GjUaZehxw= -github.com/containerd/containerd/api v1.7.19 h1:VWbJL+8Ap4Ju2mx9c9qS1uFSB1OVYr5JJrW2yT5vFoA= -github.com/containerd/containerd/api v1.7.19/go.mod h1:fwGavl3LNwAV5ilJ0sbrABL44AQxmNjDRcwheXDb6Ig= -github.com/containerd/continuity v0.4.2 h1:v3y/4Yz5jwnvqPKJJ+7Wf93fyWoCB3F5EclWG023MDM= -github.com/containerd/continuity v0.4.2/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= -github.com/containerd/errdefs v0.3.0 h1:FSZgGOeK4yuT/+DnF07/Olde/q4KBoMsaamhXxIMDp4= -github.com/containerd/errdefs v0.3.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/containerd v1.7.27 h1:yFyEyojddO3MIGVER2xJLWoCIn+Up4GaHFquP7hsFII= +github.com/containerd/containerd v1.7.27/go.mod h1:xZmPnl75Vc+BLGt4MIfu6bp+fy03gdHAn9bz+FreFR0= +github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0= +github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc= +github.com/containerd/continuity v0.4.4 h1:/fNVfTJ7wIl/YPMHjf+5H32uFhl63JucB34PlCpMKII= +github.com/containerd/continuity v0.4.4/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= github.com/containerd/fifo v1.1.0 h1:4I2mbh5stb1u6ycIABlBw9zgtlK8viPI9QkQNRQEEmY= github.com/containerd/fifo v1.1.0/go.mod h1:bmC4NWMbXlt2EZ0Hc7Fx7QzTFxgPID13eH0Qu+MAb2o= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= @@ -833,17 +855,17 @@ github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpS github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= github.com/containerd/stargz-snapshotter/estargz v0.16.3 h1:7evrXtoh1mSbGj/pfRccTampEyKpjpOnS3CyiV1Ebr8= github.com/containerd/stargz-snapshotter/estargz v0.16.3/go.mod h1:uyr4BfYfOj3G9WBVE8cOlQmXAbPN9VEQpBBeJIuOipU= -github.com/containerd/ttrpc v1.2.5 h1:IFckT1EFQoFBMG4c3sMdT8EP3/aKfumK1msY+Ze4oLU= -github.com/containerd/ttrpc v1.2.5/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o= -github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4= -github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0= +github.com/containerd/ttrpc v1.2.7 h1:qIrroQvuOL9HQ1X6KHe2ohc7p+HP/0VE6XPU7elJRqQ= +github.com/containerd/ttrpc v1.2.7/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o= +github.com/containerd/typeurl/v2 v2.2.0 h1:6NBDbQzr7I5LHgp34xAXYF5DOTQDn05X58lsPEmzLso= +github.com/containerd/typeurl/v2 v2.2.0/go.mod h1:8XOOxnyatxSWuG8OfsZXVnAF4iZfedjS/8UHSPJnX4g= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM= -github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= +github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo= github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -853,16 +875,20 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8Yc github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da h1:ZOjWpVsFZ06eIhnh4mkaceTiVoktdU67+M7KDHJ268M= github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da/go.mod h1:B3tI9iGHi4imdLi4Asdha1Sc6feLMTfPLXh9IUYmysk= github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1/go.mod h1:+hnT3ywWDTAFrW5aE+u2Sa/wT555ZqwoCS+pk3p6ry4= +github.com/diskfs/go-diskfs v1.6.1-0.20250601133945-2af1c7ece24c h1:Vg+RNk+3Kwbe3wUYsgXsk43+7oyOJ5tRDzOrcpV40yQ= +github.com/diskfs/go-diskfs v1.6.1-0.20250601133945-2af1c7ece24c/go.mod h1:LhQyXqOugWFRahYUSw47NyZJPezFzB9UELwhpszLP/k= github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/docker/cli v27.5.0+incompatible h1:aMphQkcGtpHixwwhAXJT1rrK/detk2JIvDaFkLctbGM= -github.com/docker/cli v27.5.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c= +github.com/djherbis/times v1.6.0/go.mod h1:gOHeRAz2h+VJNZ5Gmc/o7iD9k4wW7NMVqieYCY99oc0= +github.com/docker/cli v28.3.0+incompatible h1:s+ttruVLhB5ayeuf2BciwDVxYdKi+RoUlxmwNHV3Vfo= +github.com/docker/cli v28.3.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v28.0.1+incompatible h1:FCHjSRdXhNRFjlHMTv4jUNlIBbTeRjrWfeFuJp7jpo0= -github.com/docker/docker v28.0.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo= -github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= +github.com/docker/docker v28.3.0+incompatible h1:ffS62aKWupCWdvcee7nBU9fhnmknOqDPaJAMtfK0ImQ= +github.com/docker/docker v28.3.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8= +github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo= github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c h1:+pKlWGMw7gf6bQ+oDZB4KHQFypsfjYlq/C4rfL7D3g8= @@ -870,18 +896,18 @@ github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6Uezg github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= -github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY= -github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= +github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4= +github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= -github.com/edsrzf/mmap-go v1.1.0 h1:6EUwBLQ/Mcr1EYLE4Tn1VdW1A4ckqCQWZBw8Hr0kjpQ= -github.com/edsrzf/mmap-go v1.1.0/go.mod h1:19H/e8pUPLicwkyNgOykDXkJ9F0MHE+Z52B8EIth78Q= -github.com/elazarl/goproxy v1.4.0 h1:4GyuSbFa+s26+3rmYNSuUVsx+HgPrV1bk1jXI0l9wjM= -github.com/elazarl/goproxy v1.4.0/go.mod h1:X/5W/t+gzDyLfHW4DrMdpjqYjpXsURlBt9lpBDxZZZQ= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/elliotchance/phpserialize v1.4.0 h1:cAp/9+KSnEbUC8oYCE32n2n84BeW8HOY3HMDI8hG2OY= github.com/elliotchance/phpserialize v1.4.0/go.mod h1:gt7XX9+ETUcLXbtTKEuyrqW3lcLUAeS/AnGZ2e49TZs= +github.com/elliotwutingfeng/asciiset v0.0.0-20230602022725-51bbb787efab h1:h1UgjJdAAhj+uPL68n7XASS6bU+07ZX1WJvVS2eyoeY= +github.com/elliotwutingfeng/asciiset v0.0.0-20230602022725-51bbb787efab/go.mod h1:GLo/8fDswSAniFG+BFIaiSPcK610jyzgEhWYPQwuQdw= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -913,6 +939,8 @@ github.com/facebookincubator/nvdtools v0.1.5/go.mod h1:Kh55SAWnjckS96TBSrXI99KrE github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= +github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/fatih/set v0.2.1 h1:nn2CaJyknWE/6txyUDGwysr3G5QC6xWB/PtVjPBbeaA= github.com/fatih/set v0.2.1/go.mod h1:+RKtMCH+favT2+3YecHGxcc0b4KyVWA1QWWJUs4E0CI= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= @@ -925,21 +953,21 @@ github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzP github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= -github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= -github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= -github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= +github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/github/go-spdx/v2 v2.3.2 h1:IfdyNHTqzs4zAJjXdVQfRnxt1XMfycXoHBE2Vsm1bjs= -github.com/github/go-spdx/v2 v2.3.2/go.mod h1:2ZxKsOhvBp+OYBDlsGnUMcchLeo2mrpEBn2L1C+U3IQ= -github.com/gkampitakis/ciinfo v0.3.1 h1:lzjbemlGI4Q+XimPg64ss89x8Mf3xihJqy/0Mgagapo= -github.com/gkampitakis/ciinfo v0.3.1/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= +github.com/github/go-spdx/v2 v2.3.3 h1:QI7evnHWEfWkT54eJwkoV/f3a0xD3gLlnVmT5wQG6LE= +github.com/github/go-spdx/v2 v2.3.3/go.mod h1:2ZxKsOhvBp+OYBDlsGnUMcchLeo2mrpEBn2L1C+U3IQ= +github.com/gkampitakis/ciinfo v0.3.2 h1:JcuOPk8ZU7nZQjdUhctuhQofk7BGHuIy0c9Ez8BNhXs= +github.com/gkampitakis/ciinfo v0.3.2/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M= github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= -github.com/gkampitakis/go-snaps v0.5.11 h1:LFG0ggUKR+KEiiaOvFCmLgJ5NO2zf93AxxddkBn3LdQ= -github.com/gkampitakis/go-snaps v0.5.11/go.mod h1:PcKmy8q5Se7p48ywpogN5Td13reipz1Iivah4wrTIvY= -github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo= -github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k= +github.com/gkampitakis/go-snaps v0.5.13 h1:Hhjmvv1WboSCxkR9iU2mj5PQ8tsz/y8ECGrIbjjPF8Q= +github.com/gkampitakis/go-snaps v0.5.13/go.mod h1:HNpx/9GoKisdhw9AFOBT1N7DBs9DiHo/hGheFGBZ+mc= +github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= +github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw= github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= @@ -955,8 +983,8 @@ github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UN github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.13.2 h1:7O7xvsK7K+rZPKW6AQR1YyNhfywkv7B8/FsP3ki6Zv0= -github.com/go-git/go-git/v5 v5.13.2/go.mod h1:hWdW5P4YZRjmpGHwRH2v3zkWcNl6HeXaXQEMGb3NJ9A= +github.com/go-git/go-git/v5 v5.16.2 h1:fT6ZIOjE5iEnkzKyxTHK1W4HGAsPhqEqiSAssSO77hM= +github.com/go-git/go-git/v5 v5.16.2/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -967,8 +995,8 @@ github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpx github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= @@ -979,16 +1007,20 @@ github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U= github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk= +github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= -github.com/goccy/go-yaml v1.15.13 h1:Xd87Yddmr2rC1SLLTm2MNDcTjeO/GYo0JGiww6gSTDg= -github.com/goccy/go-yaml v1.15.13/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= +github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/gohugoio/hashstructure v0.5.0 h1:G2fjSBU36RdwEJBWJ+919ERvOVqAg9tfcYp47K9swqg= +github.com/gohugoio/hashstructure v0.5.0/go.mod h1:Ser0TniXuu/eauYmrwM4o64EBvySxNzITEOLlm4igec= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= @@ -996,8 +1028,9 @@ github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -1049,8 +1082,8 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-containerregistry v0.20.3 h1:oNx7IdTI936V8CQRveCjaxOiegWwvM7kqkbXTpyiovI= -github.com/google/go-containerregistry v0.20.3/go.mod h1:w00pIgBRDVUDFM6bq+Qx8lwNWK+cxgCuX1vd3PIBDNI= +github.com/google/go-containerregistry v0.20.6 h1:cvWX87UxxLgaH76b4hIvya6Dzz9qHB31qAwjAohdSTU= +github.com/google/go-containerregistry v0.20.6/go.mod h1:T0x8MuoAoKX/873bkeSfLD2FAkwCDf9/HZgsFJ02E2Y= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/licensecheck v0.3.1 h1:QoxgoDkaeC4nFrtGN1jV7IPmDCHFNIVh54e5hSt6sPs= github.com/google/licensecheck v0.3.1/go.mod h1:ORkR35t/JjW+emNKtfJDII0zlciG9JgbT7SmsohlHmY= @@ -1079,8 +1112,8 @@ github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= -github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo= -github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw= +github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs= +github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM= github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= @@ -1155,7 +1188,8 @@ github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09 github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= @@ -1213,8 +1247,8 @@ github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= -github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= -github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= @@ -1223,8 +1257,6 @@ github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f h1:GvCU5GX github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f/go.mod h1:q59u9px8b7UTj0nIjEjvmTWekazka6xIt6Uogz5Dm+8= github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d h1:X4cedH4Kn3JPupAwwWuo4AzYp16P0OyLO9d7OnMZc/c= github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d/go.mod h1:o8sgWoz3JADecfc/cTYD92/Et1yMqMy0utV1z+VaZao= -github.com/knqyf263/go-rpmdb v0.1.1 h1:oh68mTCvp1XzxdU7EfafcWzzfstUZAEa3MW0IJye584= -github.com/knqyf263/go-rpmdb v0.1.1/go.mod h1:9LQcoMCMQ9vrF7HcDtXfvqGO4+ddxFQ8+YF/0CVGDww= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -1249,8 +1281,8 @@ github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuz github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/magiconair/properties v1.8.9 h1:nWcCbLq1N2v/cpNsy5WvQ37Fb+YElfq20WJ/a8RkpQM= -github.com/magiconair/properties v1.8.9/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= @@ -1262,8 +1294,8 @@ github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVc github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= @@ -1276,7 +1308,6 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw= github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= @@ -1284,13 +1315,17 @@ github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4 github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= -github.com/microsoft/go-rustaudit v0.0.0-20220730194248-4b17361d90a5 h1:tQRHcLQwnwrPq2j2Qra/NnyjyESBGwdeBeVdAE9kXYg= -github.com/microsoft/go-rustaudit v0.0.0-20220730194248-4b17361d90a5/go.mod h1:vYT9HE7WCvL64iVeZylKmCsWKfE+JZ8105iuh2Trk8g= +github.com/mholt/archives v0.1.3 h1:aEAaOtNra78G+TvV5ohmXrJOAzf++dIlYeDW3N9q458= +github.com/mholt/archives v0.1.3/go.mod h1:LUCGp++/IbV/I0Xq4SzcIR6uwgeh2yjnQWamjRQfLTU= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mikelolasagasti/xz v1.0.1 h1:Q2F2jX0RYJUG3+WsM+FJknv+6eVjsjXNDV0KJXZzkD0= +github.com/mikelolasagasti/xz v1.0.1/go.mod h1:muAirjiOUxPRXwm9HdDtB3uoRPrGnL85XHtokL9Hcgc= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= +github.com/minio/minlz v1.0.0 h1:Kj7aJZ1//LlTP1DM8Jm7lNKvvJS2m74gyyXXn3+uJWQ= +github.com/minio/minlz v1.0.0/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= @@ -1299,25 +1334,23 @@ github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrk github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4= -github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= github.com/moby/sys/mountinfo v0.7.2 h1:1shs6aH5s4o5H2zQLn796ADW1wMrIwHsyJ2v9KouLrg= github.com/moby/sys/mountinfo v0.7.2/go.mod h1:1YOa8w8Ih7uW0wALDUgT1dTTSBrZ+HiBLGws92L2RU4= -github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= -github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo= @@ -1342,16 +1375,26 @@ github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3 github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1 h1:kpt9ZfKcm+EDG4s40hMwE//d5SBgDjUOrITReV2u4aA= +github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1/go.mod h1:qgCw4bBKZX8qMgGeEZzGFVT3notl42dBjNqO2jut0M0= +github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249 h1:NHrXEjTNQY7P0Zfx1aMrNhpgxHmow66XQtm0aQLY0AE= +github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249/go.mod h1:mpRZBD8SJ55OIICQ3iWH0Yz3cjzA61JdqMLoWXeB2+8= github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U= +github.com/nwaples/rardecode/v2 v2.1.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw= +github.com/olekukonko/errors v0.0.0-20250405072817-4e6d85265da6 h1:r3FaAI0NZK3hSmtTDrBVREhKULp8oUeqLT5Eyl2mSPo= +github.com/olekukonko/errors v0.0.0-20250405072817-4e6d85265da6/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y= +github.com/olekukonko/ll v0.0.8 h1:sbGZ1Fx4QxJXEqL/6IG8GEFnYojUSQ45dJVwN2FH2fc= +github.com/olekukonko/ll v0.0.8/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g= +github.com/olekukonko/tablewriter v1.0.7 h1:HCC2e3MM+2g72M81ZcJU11uciw6z/p82aEnm4/ySDGw= +github.com/olekukonko/tablewriter v1.0.7/go.mod h1:H428M+HzoUXC6JU2Abj9IT9ooRmdq9CxuDmKMtrOCMs= github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= -github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU= @@ -1372,14 +1415,14 @@ github.com/pborman/indent v1.2.1/go.mod h1:FitS+t35kIYtB5xWTZAPhnmrxcciEEOdbyrrp github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= -github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= -github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= +github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= @@ -1391,6 +1434,8 @@ github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA= github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pkg/xattr v0.4.9 h1:5883YPCtkSd8LFbs13nXplj9g9tlrwoJRjgpgMu1/fE= +github.com/pkg/xattr v0.4.9/go.mod h1:di8WF84zAKk8jzR1UBTEWh9AUlIZZ7M/JNt8e9B6ktU= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -1411,8 +1456,8 @@ github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8b github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= -github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= -github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= +github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= @@ -1424,19 +1469,18 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rust-secure-code/go-rustaudit v0.0.0-20250226111315-e20ec32e963c h1:8gOLsYwaY2JwlTMT4brS5/9XJdrdIbmk2obvQ748CC0= +github.com/rust-secure-code/go-rustaudit v0.0.0-20250226111315-e20ec32e963c/go.mod h1:kwM/7r/rVluTE8qJbHAffduuqmSv4knVQT2IajGvSiA= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= +github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/saferwall/pe v1.5.6 h1:DrRLnoQFxHWJ5lJUmrH7X2L0xeUu6SUS95Dc61eW2Yc= -github.com/saferwall/pe v1.5.6/go.mod h1:mJx+PuptmNpoPFBNhWs/uDMFL/kTHVZIkg0d4OUJFbQ= github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= -github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= -github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= -github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= -github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFTqyMTC9k= +github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk= github.com/sahilm/fuzzy v0.1.1 h1:ceu5RHF8DGgoi+/dR5PsECjCDH1BE3Fnmpo7aVXOdRA= github.com/sahilm/fuzzy v0.1.1/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y= github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA= @@ -1451,35 +1495,36 @@ github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7 github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sebdah/goldie/v2 v2.5.5 h1:rx1mwF95RxZ3/83sdS4Yp7t2C5TCokvWP4TBRbAyEWY= github.com/sebdah/goldie/v2 v2.5.5/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI= -github.com/secDre4mer/pkcs7 v0.0.0-20240322103146-665324a4461d h1:RQqyEogx5J6wPdoxqL132b100j8KjcVHO1c0KLRoIhc= -github.com/secDre4mer/pkcs7 v0.0.0-20240322103146-665324a4461d/go.mod h1:PegD7EVqlN88z7TpCqH92hHP+GBpfomGCCnw1PFtNOA= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= -github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= +github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= -github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= +github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af h1:Sp5TG9f7K39yfB+If0vjp97vuT74F72r8hfRpP8jLU0= +github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= +github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= +github.com/sorairolake/lzip-go v0.3.5 h1:ms5Xri9o1JBIWvOFAorYtUNik6HI3HgBTkISiqu0Cwg= +github.com/sorairolake/lzip-go v0.3.5/go.mod h1:N0KYq5iWrMXI0ZEXKXaS9hCyOjZUQdBDEIbXfoUwbdk= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb h1:bLo8hvc8XFm9J47r690TUKBzcjSWdJDxmjXJZ+/f92U= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb/go.mod h1:uKWaldnbMnjsSAXRurWqqrdyZen1R7kxl8TkmWk2OyM= github.com/spdx/tools-golang v0.5.5 h1:61c0KLfAcNqAjlg6UNMdkwpMernhw3zVRwDZ2x9XOmk= github.com/spdx/tools-golang v0.5.5/go.mod h1:MVIsXx8ZZzaRWNQpUDhC4Dud34edUYJYecciXgrw5vE= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= -github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= -github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= +github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA= +github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo= github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w= -github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= +github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= @@ -1488,8 +1533,8 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= -github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= -github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= +github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4= +github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= @@ -1513,10 +1558,10 @@ github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/sylabs/sif/v2 v2.20.2 h1:HGEPzauCHhIosw5o6xmT3jczuKEuaFzSfdjAsH33vYw= -github.com/sylabs/sif/v2 v2.20.2/go.mod h1:WyYryGRaR4Wp21SAymm5pK0p45qzZCSRiZMFvUZiuhc= -github.com/sylabs/squashfs v1.0.4 h1:uFSw7WXv7zjutPvU+JzY0nY494Vw8s4FAf4+7DhoMdI= -github.com/sylabs/squashfs v1.0.4/go.mod h1:PDgf8YmCntvN4d9Y8hBUBDCZL6qZOzOQwRGxnIdbERk= +github.com/sylabs/sif/v2 v2.21.1 h1:GZ0b5//AFAqJEChd8wHV/uSKx/l1iuGYwjR8nx+4wPI= +github.com/sylabs/sif/v2 v2.21.1/go.mod h1:YoqEGQnb5x/ItV653bawXHZJOXQaEWpGwHsSD3YePJI= +github.com/sylabs/squashfs v1.0.6 h1:PvJcDzxr+vIm2kH56mEMbaOzvGu79gK7P7IX+R7BDZI= +github.com/sylabs/squashfs v1.0.6/go.mod h1:DlDeUawVXLWAsSRa085Eo0ZenGzAB32JdAUFaB0LZfE= github.com/terminalstatic/go-xsd-validate v0.1.6 h1:TenYeQ3eY631qNi1/cTmLH/s2slHPRKTTHT+XSHkepo= github.com/terminalstatic/go-xsd-validate v0.1.6/go.mod h1:18lsvYFofBflqCrvo1umpABZ99+GneNTw2kEEc8UPJw= github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw= @@ -1538,8 +1583,8 @@ github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/vbatts/go-mtree v0.5.4 h1:OMAb8jaCyiFA7zXj0Zc/oARcxBDBoeu2LizjB8BVJl0= github.com/vbatts/go-mtree v0.5.4/go.mod h1:5GqJbVhm9BBiCc4K5uc/c42FPgXulHaQs4sFUEfIWMo= -github.com/vbatts/tar-split v0.11.6 h1:4SjTW5+PU11n6fZenf2IPoV8/tz3AaYHMWjf23envGs= -github.com/vbatts/tar-split v0.11.6/go.mod h1:dqKNtesIOr2j2Qv3W/cHjnvk9I8+G7oAkFDFN6TCBEI= +github.com/vbatts/tar-split v0.12.1 h1:CqKoORW7BUWBe7UL/iqTVvkTBOF8UvOMKOIZykxnnbo= +github.com/vbatts/tar-split v0.12.1/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= github.com/vifraa/gopom v1.0.0 h1:L9XlKbyvid8PAIK8nr0lihMApJQg/12OBvMA28BcWh0= github.com/vifraa/gopom v1.0.0/go.mod h1:oPa1dcrGrtlO37WPDBm5SqHAT+wTgF8An1Q71Z6Vv4o= github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= @@ -1599,38 +1644,38 @@ go.opentelemetry.io/contrib/detectors/gcp v1.29.0 h1:TiaiXB4DpGD3sdzNlYQxruQngn5 go.opentelemetry.io/contrib/detectors/gcp v1.29.0/go.mod h1:GW2aWZNwR2ZxDLdv8OyC2G8zkRoQBuURgV7RPQgcPoU= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 h1:r6I7RJCN86bpD/FQwedZ0vSixDpwuWREjW9oRMsmqDc= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0/go.mod h1:B9yO6b04uB80CzjedvewuqDhxJxi11s7/GtiGa8bAjI= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q= -go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= -go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I= -go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= -go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= -go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM= -go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM= -go.opentelemetry.io/otel/sdk/metric v1.29.0 h1:K2CfmJohnRgvZ9UAj2/FhIf/okdWcNdBwe1m8xFXiSY= -go.opentelemetry.io/otel/sdk/metric v1.29.0/go.mod h1:6zZLdCl2fkauYoZIOn/soQIDSWFmNSRcICarHfuhNJQ= -go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= -go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= +go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= +go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= +go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= +go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= -go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= -go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc= +go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -1649,8 +1694,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -1666,8 +1711,8 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= -golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= -golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= +golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= +golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -1713,8 +1758,8 @@ golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1784,8 +1829,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1816,8 +1861,8 @@ golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= -golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70= -golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1838,8 +1883,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1919,6 +1964,7 @@ golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220408201424-a24fb2fb8a0f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1943,8 +1989,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1960,8 +2006,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= -golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= +golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1983,16 +2029,16 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= -golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -2059,8 +2105,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= -golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= +golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= +golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2351,8 +2397,8 @@ google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqw google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -google.golang.org/protobuf v1.36.3 h1:82DV7MYdb8anAVi3qge1wSnMDrnKK7ebr+I0hHRN1BU= -google.golang.org/protobuf v1.36.3/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM= +google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -2362,8 +2408,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EV gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -2378,8 +2422,8 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8= -gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -2395,21 +2439,21 @@ lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= modernc.org/cc/v3 v3.36.2/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/cc/v4 v4.24.4 h1:TFkx1s6dCkQpd6dKurBNmpo+G8Zl4Sq/ztJ+2+DEsh0= -modernc.org/cc/v4 v4.24.4/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0= +modernc.org/cc/v4 v4.26.1 h1:+X5NtzVBn0KgsBCBe+xkDC7twLb/jNVj9FPgiwSQO3s= +modernc.org/cc/v4 v4.26.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0= modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc= modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw= modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= modernc.org/ccgo/v3 v3.16.8/go.mod h1:zNjwkizS+fIFDrDjIAgBSCLkWbJuHF+ar3QRn+Z9aws= modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJDo= -modernc.org/ccgo/v4 v4.23.16 h1:Z2N+kk38b7SfySC1ZkpGLN2vthNJP1+ZzGZIlH7uBxo= -modernc.org/ccgo/v4 v4.23.16/go.mod h1:nNma8goMTY7aQZQNTyN9AIoJfxav4nvTnvKThAeMDdo= +modernc.org/ccgo/v4 v4.28.0 h1:rjznn6WWehKq7dG4JtLRKxb52Ecv8OUGah8+Z/SfpNU= +modernc.org/ccgo/v4 v4.28.0/go.mod h1:JygV3+9AV6SmPhDasu4JgquwU81XAKLd3OKTUDNOiKE= modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= -modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE= -modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ= -modernc.org/gc/v2 v2.6.3 h1:aJVhcqAte49LF+mGveZ5KPlsp4tdGdAOT4sipJXADjw= -modernc.org/gc/v2 v2.6.3/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito= +modernc.org/fileutil v1.3.3 h1:3qaU+7f7xxTUmvU1pJTZiDLAIoJVdUSSauJNHg9yXoA= +modernc.org/fileutil v1.3.3/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc= +modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI= +modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito= modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A= @@ -2418,8 +2462,8 @@ modernc.org/libc v1.16.17/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= modernc.org/libc v1.16.19/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= modernc.org/libc v1.17.0/go.mod h1:XsgLldpP4aWlPlsjqKRdHPqCxCjISdHfM/yeWC5GyW0= modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s= -modernc.org/libc v1.61.13 h1:3LRd6ZO1ezsFiX1y+bHd1ipyEHIJKvuprv0sLTBwLW8= -modernc.org/libc v1.61.13/go.mod h1:8F/uJWL/3nNil0Lgt1Dpz+GgkApWh04N3el3hxJcA6E= +modernc.org/libc v1.65.10 h1:ZwEk8+jhW7qBjHIT+wd0d9VjitRyQef9BnzlzGwMODc= +modernc.org/libc v1.65.10/go.mod h1:StFvYpx7i/mXtBAfVOjaU0PWZOvIRoZSgXhrwXzr8Po= modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= @@ -2428,8 +2472,8 @@ modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJ modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= modernc.org/memory v1.2.0/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= modernc.org/memory v1.2.1/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/memory v1.8.2 h1:cL9L4bcoAObu4NkxOlKWBWtNHIsnnACGF/TbqQ6sbcI= -modernc.org/memory v1.8.2/go.mod h1:ZbjSvMO5NQ1A2i3bWeDiVMxIorXwdClKE/0SZ+BMotU= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= +modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8= @@ -2437,8 +2481,8 @@ modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4= -modernc.org/sqlite v1.35.0 h1:yQps4fegMnZFdphtzlfQTCNBWtS0CZv48pRpW3RFHRw= -modernc.org/sqlite v1.35.0/go.mod h1:9cr2sicr7jIaWTBKQmAxQLfBv9LL0su4ZTEV+utt3ic= +modernc.org/sqlite v1.38.0 h1:+4OrfPQ8pxHKuWG4md1JpR/EYAh3Md7TdejuuzE7EUI= +modernc.org/sqlite v1.38.0/go.mod h1:1Bj+yES4SVvBZ4cBOpVZ6QgesMCKpJZDq0nxYzOpmNE= modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= diff --git a/grype/db/v5/differ/differ.go b/grype/db/v5/differ/differ.go index 5054a46a653..62ae9a5ce7a 100644 --- a/grype/db/v5/differ/differ.go +++ b/grype/db/v5/differ/differ.go @@ -8,6 +8,8 @@ import ( "path" "github.com/olekukonko/tablewriter" + "github.com/olekukonko/tablewriter/renderer" + "github.com/olekukonko/tablewriter/tw" "github.com/wagoodman/go-partybus" "github.com/wagoodman/go-progress" @@ -162,25 +164,12 @@ func (d *Differ) Present(outputFormat string, diff *[]v5.Diff, output io.Writer) rows = append(rows, []string{d.ID, d.Namespace, d.Reason}) } - table := tablewriter.NewWriter(output) - columns := []string{"ID", "Namespace", "Reason"} - - table.SetHeader(columns) - table.SetAutoWrapText(false) - table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) - table.SetAlignment(tablewriter.ALIGN_LEFT) - - table.SetHeaderLine(false) - table.SetBorder(false) - table.SetAutoFormatHeaders(true) - table.SetCenterSeparator("") - table.SetColumnSeparator("") - table.SetRowSeparator("") - table.SetTablePadding(" ") - table.SetNoWhiteSpace(true) - - table.AppendBulk(rows) - table.Render() + table := newTable(output, []string{"ID", "Namespace", "Reason"}) + + if err := table.Bulk(rows); err != nil { + return fmt.Errorf("failed to add table rows: %+v", err) + } + return table.Render() case "json": enc := json.NewEncoder(output) enc.SetEscapeHTML(false) @@ -193,3 +182,37 @@ func (d *Differ) Present(outputFormat string, diff *[]v5.Diff, output io.Writer) } return nil } + +func newTable(output io.Writer, columns []string) *tablewriter.Table { + return tablewriter.NewTable(output, + tablewriter.WithHeader(columns), + tablewriter.WithHeaderAutoWrap(tw.WrapNone), + tablewriter.WithRowAutoWrap(tw.WrapNone), + tablewriter.WithAutoHide(tw.On), + tablewriter.WithRenderer(renderer.NewBlueprint()), + tablewriter.WithBehavior( + tw.Behavior{ + TrimSpace: tw.On, + AutoHide: tw.On, + }, + ), + tablewriter.WithPadding( + tw.Padding{ + Right: " ", + }, + ), + tablewriter.WithRendition( + tw.Rendition{ + Symbols: tw.NewSymbols(tw.StyleNone), + Settings: tw.Settings{ + Lines: tw.Lines{ + ShowTop: tw.Off, + ShowBottom: tw.Off, + ShowHeaderLine: tw.Off, + ShowFooterLine: tw.Off, + }, + }, + }, + ), + ) +} diff --git a/grype/db/v5/differ/test-fixtures/snapshot/TestPresent_Table.golden b/grype/db/v5/differ/test-fixtures/snapshot/TestPresent_Table.golden index 85a6e5327bb..e719779e975 100644 --- a/grype/db/v5/differ/test-fixtures/snapshot/TestPresent_Table.golden +++ b/grype/db/v5/differ/test-fixtures/snapshot/TestPresent_Table.golden @@ -1,4 +1,4 @@ -ID NAMESPACE REASON +ID NAMESPACE REASON CVE-1 nvd added CVE-2 nvd removed CVE-3 nvd changed diff --git a/grype/db/v5/distribution/curator.go b/grype/db/v5/distribution/curator.go index cff1c83d571..609d67a565b 100644 --- a/grype/db/v5/distribution/curator.go +++ b/grype/db/v5/distribution/curator.go @@ -477,6 +477,7 @@ func (c Curator) ListingFromURL() (Listing, error) { return Listing{}, fmt.Errorf("unable to create listing temp file: %w", err) } defer func() { + log.CloseAndLogError(tempFile, tempFile.Name()) err := c.fs.RemoveAll(tempFile.Name()) if err != nil { log.Errorf("failed to remove file (%s): %w", tempFile.Name(), err) diff --git a/grype/db/v5/namespace/distro/namespace_test.go b/grype/db/v5/namespace/distro/namespace_test.go index f916d66b6d3..bdc6f9ac325 100644 --- a/grype/db/v5/namespace/distro/namespace_test.go +++ b/grype/db/v5/namespace/distro/namespace_test.go @@ -41,6 +41,14 @@ func TestFromString(t *testing.T) { namespaceString: "wolfi:distro:wolfi:rolling", result: NewNamespace("wolfi", grypeDistro.Wolfi, "rolling"), }, + { + namespaceString: "echo:distro:echo:rolling", + result: NewNamespace("echo", grypeDistro.Echo, "rolling"), + }, + { + namespaceString: "minimos:distro:minimos:rolling", + result: NewNamespace("minimos", grypeDistro.MinimOS, "rolling"), + }, } for _, test := range successTests { diff --git a/grype/db/v5/namespace/index.go b/grype/db/v5/namespace/index.go deleted file mode 100644 index 5c207cd4f0d..00000000000 --- a/grype/db/v5/namespace/index.go +++ /dev/null @@ -1,248 +0,0 @@ -package namespace - -import ( - "fmt" - "regexp" - "sort" - "strings" - - hashiVer "github.com/anchore/go-version" - "github.com/anchore/grype/grype/db/v5/namespace/cpe" - "github.com/anchore/grype/grype/db/v5/namespace/distro" - "github.com/anchore/grype/grype/db/v5/namespace/language" - grypeDistro "github.com/anchore/grype/grype/distro" - "github.com/anchore/grype/internal" - "github.com/anchore/grype/internal/log" - syftPkg "github.com/anchore/syft/syft/pkg" -) - -var simpleSemVer = regexp.MustCompile(`^(?P\d+)(\.(?P\d+)(\.(?P\d+(?P[^-_]+)*))?)?$`) - -type Index struct { - all []Namespace - byLanguage map[syftPkg.Language][]*language.Namespace - byDistroKey map[string][]*distro.Namespace - cpe []*cpe.Namespace -} - -func FromStrings(namespaces []string) (*Index, error) { - all := make([]Namespace, 0) - byLanguage := make(map[syftPkg.Language][]*language.Namespace) - byDistroKey := make(map[string][]*distro.Namespace) - cpeNamespaces := make([]*cpe.Namespace, 0) - - for _, n := range namespaces { - ns, err := FromString(n) - - if err != nil { - log.Warnf("unable to create namespace object from namespace=%s: %+v", n, err) - continue - } - - all = append(all, ns) - - switch nsObj := ns.(type) { - case *language.Namespace: - l := nsObj.Language() - if _, ok := byLanguage[l]; !ok { - byLanguage[l] = make([]*language.Namespace, 0) - } - - byLanguage[l] = append(byLanguage[l], nsObj) - case *distro.Namespace: - distroKey := fmt.Sprintf("%s:%s", nsObj.DistroType(), nsObj.Version()) - if _, ok := byDistroKey[distroKey]; !ok { - byDistroKey[distroKey] = make([]*distro.Namespace, 0) - } - - byDistroKey[distroKey] = append(byDistroKey[distroKey], nsObj) - case *cpe.Namespace: - cpeNamespaces = append(cpeNamespaces, nsObj) - default: - log.Warnf("unable to index namespace=%s", n) - continue - } - } - - return &Index{ - all: all, - byLanguage: byLanguage, - byDistroKey: byDistroKey, - cpe: cpeNamespaces, - }, nil -} - -func (i *Index) NamespacesForLanguage(l syftPkg.Language) []*language.Namespace { - if _, ok := i.byLanguage[l]; ok { - return i.byLanguage[l] - } - - return nil -} - -//nolint:funlen,gocognit -func (i *Index) NamespacesForDistro(d *grypeDistro.Distro) []*distro.Namespace { - if d == nil { - return nil - } - - dTy := DistroTypeString(d.Type) - - if d.IsRolling() { - distroKey := fmt.Sprintf("%s:%s", dTy, "rolling") - if v, ok := i.byDistroKey[distroKey]; ok { - return v - } - } - - var versionSegments []int - if d.Version != nil { - versionSegments = d.Version.Segments() - } - - switch d.Type { - case grypeDistro.Alpine: - if v := i.getAlpineMajorMinorNamespace(d, versionSegments); v != nil { - return v - } - - // Fall back to alpine:edge if no version segments found - // alpine:edge is labeled as alpine-x.x_alphaYYYYMMDD - distroKey := fmt.Sprintf("%s:%s", dTy, "edge") - if v, ok := i.byDistroKey[distroKey]; ok { - return v - } - case grypeDistro.Debian: - if v, ok := i.findClosestNamespace(d, versionSegments); ok { - return v - } - - if d.RawVersion == "unstable" { - distroKey := fmt.Sprintf("%s:%s", dTy, "unstable") - if v, ok := i.byDistroKey[distroKey]; ok { - return v - } - } - } - - if v, ok := i.findClosestNamespace(d, versionSegments); ok { - return v - } - - return nil -} - -func (i *Index) getAlpineMajorMinorNamespace(d *grypeDistro.Distro, versionSegments []int) []*distro.Namespace { - var hasPrerelease bool - if d.Version != nil { - hasPrerelease = d.Version.Prerelease() != "" - } - - if !hasPrerelease { - namespaces, done := i.findClosestNamespace(d, versionSegments) - if done { - return namespaces - } - } - // If the version does not match x.y.z then it is edge - // In this case it would have - or _ alpha,beta,etc - // note: later in processing we handle the alpine:edge case - return nil -} - -func (i *Index) findClosestNamespace(d *grypeDistro.Distro, versionSegments []int) ([]*distro.Namespace, bool) { - ty := DistroTypeString(d.Type) - - // look for exact match - distroKey := fmt.Sprintf("%s:%s", ty, d.FullVersion()) - if v, ok := i.byDistroKey[distroKey]; ok { - return v, true - } - - values := internal.MatchNamedCaptureGroups(simpleSemVer, d.RawVersion) - - switch { - case values["major"] == "": - // use edge - break - case values["minor"] == "": - namespaces, done := i.findHighestMatchingMajorVersionNamespaces(d, versionSegments) - if done { - return namespaces, true - } - - default: - - if len(versionSegments) >= 2 { - // try with only first two version components - distroKey = fmt.Sprintf("%s:%d.%d", ty, versionSegments[0], versionSegments[1]) - if v, ok := i.byDistroKey[distroKey]; ok { - return v, true - } - } - - if len(versionSegments) >= 1 { - // try using only major version component - distroKey = fmt.Sprintf("%s:%d", ty, versionSegments[0]) - if v, ok := i.byDistroKey[distroKey]; ok { - return v, true - } - } - } - return nil, false -} - -func (i *Index) findHighestMatchingMajorVersionNamespaces(d *grypeDistro.Distro, versionSegments []int) ([]*distro.Namespace, bool) { - // find the highest version that matches the major version - majorVersion := versionSegments[0] - - var all []*distro.Namespace - for _, vs := range i.byDistroKey { - for _, v := range vs { - if v.DistroType() == d.Type { - all = append(all, v) - } - } - } - - type namespaceVersion struct { - version *hashiVer.Version - namespace *distro.Namespace - } - - var valid []namespaceVersion - for _, v := range all { - if strings.HasPrefix(v.Version(), fmt.Sprintf("%d.", majorVersion)) { - ver, err := hashiVer.NewVersion(v.Version()) - if err != nil { - continue - } - valid = append(valid, namespaceVersion{ - version: ver, - namespace: v, - }) - } - } - - // return the highest version from valid - sort.Slice(valid, func(i, j int) bool { - return valid[i].version.GreaterThan(valid[j].version) - }) - - if len(valid) > 0 { - return []*distro.Namespace{valid[0].namespace}, true - } - return nil, false -} - -func (i *Index) CPENamespaces() []*cpe.Namespace { - return i.cpe -} - -func DistroTypeString(ty grypeDistro.Type) string { - switch ty { - case grypeDistro.CentOS, grypeDistro.RedHat, grypeDistro.Fedora, grypeDistro.RockyLinux, grypeDistro.AlmaLinux, grypeDistro.Gentoo: - return strings.ToLower(string(grypeDistro.RedHat)) - } - return strings.ToLower(string(ty)) -} diff --git a/grype/db/v5/namespace/index_test.go b/grype/db/v5/namespace/index_test.go deleted file mode 100644 index 248c02dba87..00000000000 --- a/grype/db/v5/namespace/index_test.go +++ /dev/null @@ -1,404 +0,0 @@ -package namespace - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/anchore/grype/grype/db/v5/namespace/cpe" - "github.com/anchore/grype/grype/db/v5/namespace/distro" - "github.com/anchore/grype/grype/db/v5/namespace/language" - osDistro "github.com/anchore/grype/grype/distro" - syftPkg "github.com/anchore/syft/syft/pkg" -) - -func TestFromStringSlice(t *testing.T) { - tests := []struct { - namespaces []string - byLanguage map[syftPkg.Language][]*language.Namespace - byDistroKey map[string][]*distro.Namespace - cpe []*cpe.Namespace - }{ - { - namespaces: []string{ - "github:language:python", - "github:language:python:conda", - "debian:distro:debian:8", - "alpine:distro:alpine:3.15", - "alpine:distro:alpine:3.16", - "msrc:distro:windows:12345", - "nvd:cpe", - "github:language:ruby", - "abc.xyz:language:ruby", - "github:language:rust", - "something:language:rust", - "1234.4567:language:unknown", - "---:cpe", - "another-provider:distro:alpine:3.15", - "another-provider:distro:alpine:3.16", - }, - byLanguage: map[syftPkg.Language][]*language.Namespace{ - syftPkg.Python: { - language.NewNamespace("github", syftPkg.Python, ""), - language.NewNamespace("github", syftPkg.Python, syftPkg.Type("conda")), - }, - syftPkg.Ruby: { - language.NewNamespace("github", syftPkg.Ruby, ""), - language.NewNamespace("abc.xyz", syftPkg.Ruby, ""), - }, - syftPkg.Rust: { - language.NewNamespace("github", syftPkg.Rust, ""), - language.NewNamespace("something", syftPkg.Rust, ""), - }, - syftPkg.Language("unknown"): { - language.NewNamespace("1234.4567", syftPkg.Language("unknown"), ""), - }, - }, - byDistroKey: map[string][]*distro.Namespace{ - "debian:8": { - distro.NewNamespace("debian", osDistro.Debian, "8"), - }, - "alpine:3.15": { - distro.NewNamespace("alpine", osDistro.Alpine, "3.15"), - distro.NewNamespace("another-provider", osDistro.Alpine, "3.15"), - }, - "alpine:3.16": { - distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), - distro.NewNamespace("another-provider", osDistro.Alpine, "3.16"), - }, - "windows:12345": { - distro.NewNamespace("msrc", osDistro.Windows, "12345"), - }, - }, - cpe: []*cpe.Namespace{ - cpe.NewNamespace("---"), - cpe.NewNamespace("nvd"), - }, - }, - } - - for _, test := range tests { - result, _ := FromStrings(test.namespaces) - assert.Len(t, result.all, len(test.namespaces)) - - for l, elems := range result.byLanguage { - assert.Contains(t, test.byLanguage, l) - assert.ElementsMatch(t, elems, test.byLanguage[l]) - } - - for d, elems := range result.byDistroKey { - assert.Contains(t, test.byDistroKey, d) - assert.ElementsMatch(t, elems, test.byDistroKey[d]) - } - - assert.ElementsMatch(t, result.cpe, test.cpe) - } -} - -func TestIndex_CPENamespaces(t *testing.T) { - tests := []struct { - namespaces []string - cpe []*cpe.Namespace - }{ - { - namespaces: []string{"nvd:cpe", "another-source:cpe", "x:distro:y:10"}, - cpe: []*cpe.Namespace{ - cpe.NewNamespace("nvd"), - cpe.NewNamespace("another-source"), - }, - }, - } - - for _, test := range tests { - result, _ := FromStrings(test.namespaces) - assert.Len(t, result.all, len(test.namespaces)) - assert.ElementsMatch(t, result.CPENamespaces(), test.cpe) - } -} - -func newDistro(t *testing.T, dt osDistro.Type, v string, idLikes []string) *osDistro.Distro { - d, err := osDistro.New(dt, v, idLikes...) - assert.NoError(t, err) - return d -} - -func TestIndex_NamespacesForDistro(t *testing.T) { - namespaceIndex, err := FromStrings([]string{ - "alpine:distro:alpine:2.17", - "alpine:distro:alpine:3.15", - "alpine:distro:alpine:3.16", - "alpine:distro:alpine:4.13", - "alpine:distro:alpine:edge", - "debian:distro:debian:8", - "debian:distro:debian:unstable", - "amazon:distro:amazonlinux:2", - "amazon:distro:amazonlinux:2022", - "abc.xyz:distro:unknown:123.456", - "redhat:distro:redhat:8", - "redhat:distro:redhat:9", - "other-provider:distro:debian:8", - "other-provider:distro:redhat:9", - "suse:distro:sles:12.5", - "mariner:distro:mariner:2.0", - "mariner:distro:azurelinux:3.0", - "msrc:distro:windows:471816", - "ubuntu:distro:ubuntu:18.04", - "ubuntu:distro:ubuntu:18.10", - "ubuntu:distro:ubuntu:20.04", - "ubuntu:distro:ubuntu:20.10", - "oracle:distro:oraclelinux:8", - "wolfi:distro:wolfi:rolling", - "chainguard:distro:chainguard:rolling", - "archlinux:distro:archlinux:rolling", - }) - - assert.NoError(t, err) - - tests := []struct { - name string - distro *osDistro.Distro - namespaces []*distro.Namespace - }{ - { - name: "alpine patch version matches minor version namespace", - distro: newDistro(t, osDistro.Alpine, "3.15.4", []string{"alpine"}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "3.15"), - }, - }, - { - name: "alpine missing patch version matches with minor version", - distro: newDistro(t, osDistro.Alpine, "3.16", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), - }, - }, - { - name: "alpine missing minor version uses latest minor version", - distro: newDistro(t, osDistro.Alpine, "3", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), - }, - }, - { - name: "ubuntu missing minor version uses latest minor version", - distro: newDistro(t, osDistro.Ubuntu, "18", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("ubuntu", osDistro.Ubuntu, "18.10"), - }, - }, - { - name: "alpine rc version with no patch should match edge", - distro: newDistro(t, osDistro.Alpine, "3.16.4-r4", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "edge"), - }, - }, - - { - name: "alpine edge version matches edge namespace", - distro: &osDistro.Distro{Type: osDistro.Alpine, Version: nil, RawVersion: "3.17.1_alpha20221002", IDLike: []string{"alpine"}}, - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "edge"), - }, - }, - { - name: "alpine raw version matches edge with - character", - distro: &osDistro.Distro{Type: osDistro.Alpine, Version: nil, RawVersion: "3.17.1-alpha20221002", IDLike: []string{"alpine"}}, - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "edge"), - }, - }, - { - name: "alpine raw version matches edge with - character no sha", - distro: newDistro(t, osDistro.Alpine, "3.17.1-alpha", []string{"alpine"}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "edge"), - }, - }, - { - name: "alpine raw version matches edge with _ character no sha", - // we don't create a newDistro from this since parsing the version fails - distro: &osDistro.Distro{Type: osDistro.Alpine, Version: nil, RawVersion: "3.17.1_alpha", IDLike: []string{"alpine"}}, - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "edge"), - }, - }, - { - name: "alpine malformed version matches with closest", - distro: newDistro(t, osDistro.Alpine, "3.16.4.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), - }, - }, - { - name: "Debian minor version matches debian and other-provider namespaces", - distro: newDistro(t, osDistro.Debian, "8.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("debian", osDistro.Debian, "8"), - distro.NewNamespace("other-provider", osDistro.Debian, "8"), - }, - }, - { - name: "Redhat minor version matches redhat and other-provider namespaces", - distro: newDistro(t, osDistro.RedHat, "9.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("redhat", osDistro.RedHat, "9"), - distro.NewNamespace("other-provider", osDistro.RedHat, "9"), - }, - }, - { - name: "Centos minor version matches redhat and other-provider namespaces", - distro: newDistro(t, osDistro.CentOS, "9.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("redhat", osDistro.RedHat, "9"), - distro.NewNamespace("other-provider", osDistro.RedHat, "9"), - }, - }, - { - name: "Alma Linux minor version matches redhat and other-provider namespaces", - distro: newDistro(t, osDistro.AlmaLinux, "9.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("redhat", osDistro.RedHat, "9"), - distro.NewNamespace("other-provider", osDistro.RedHat, "9"), - }, - }, - { - name: "Rocky Linux minor version matches redhat and other-provider namespaces", - distro: newDistro(t, osDistro.RockyLinux, "9.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("redhat", osDistro.RedHat, "9"), - distro.NewNamespace("other-provider", osDistro.RedHat, "9"), - }, - }, - { - name: "SLES minor version matches suse namespace", - distro: newDistro(t, osDistro.SLES, "12.5", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("suse", osDistro.SLES, "12.5"), - }, - }, - { - name: "Windows version object matches msrc namespace with exact version", - distro: newDistro(t, osDistro.Windows, "471816", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("msrc", osDistro.Windows, "471816"), - }, - }, - { - name: "Ubuntu minor semvar matches ubuntu namespace with exact version", - distro: newDistro(t, osDistro.Ubuntu, "18.04", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("ubuntu", osDistro.Ubuntu, "18.04"), - }, - }, - { - name: "Fedora minor semvar will not match a namespace", - distro: newDistro(t, osDistro.Fedora, "31.4", []string{}), - namespaces: nil, - }, - { - name: "Amazon Linux Major semvar matches amazon namespace with exact version", - distro: newDistro(t, osDistro.AmazonLinux, "2", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("amazon", osDistro.AmazonLinux, "2"), - }, - }, - { - name: "Amazon Linux year version matches amazon namespace with exact uear", - distro: newDistro(t, osDistro.AmazonLinux, "2022", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("amazon", osDistro.AmazonLinux, "2022"), - }, - }, - { - name: "Mariner minor semvar matches no namespace", - distro: newDistro(t, osDistro.Mariner, "20.1", []string{}), - namespaces: nil, - }, - { - name: "Mariner 2.0 matches mariner namespace", - distro: newDistro(t, osDistro.Mariner, "2.0", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("mariner", "mariner", "2.0"), - }, - }, - { - name: "azurelinux 3 is matched by mariner 3 namespace", - distro: newDistro(t, osDistro.Azure, "3.0", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("mariner", osDistro.Azure, "3.0"), - }, - }, - { - name: "Oracle Linux Major semvar matches oracle namespace with exact version", - distro: newDistro(t, osDistro.OracleLinux, "8", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("oracle", osDistro.OracleLinux, "8"), - }, - }, - { - - name: "Arch Linux matches archlinux rolling namespace", - distro: newDistro(t, osDistro.ArchLinux, "", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("archlinux", osDistro.ArchLinux, "rolling"), - }, - }, - { - - name: "Wolfi matches wolfi rolling namespace", - distro: newDistro(t, osDistro.Wolfi, "20221011", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("wolfi", osDistro.Wolfi, "rolling"), - }, - }, - { - - name: "Chainguard matches chainguard rolling namespace", - distro: newDistro(t, osDistro.Chainguard, "20230214", []string{}), - namespaces: []*distro.Namespace{ - distro.NewNamespace("chainguard", osDistro.Chainguard, "rolling"), - }, - }, - { - - name: "Gentoo doesn't match any namespace since the gentoo rolling namespace doesn't exist in index", - distro: newDistro(t, osDistro.Gentoo, "", []string{}), - namespaces: nil, - }, - { - name: "Open Suse Leap semvar matches no namespace", - distro: newDistro(t, osDistro.OpenSuseLeap, "100", []string{}), - namespaces: nil, - }, - { - name: "Photon minor semvar no namespace", - distro: newDistro(t, osDistro.Photon, "20.1", []string{}), - namespaces: nil, - }, - { - name: "Busybox minor semvar matches no namespace", - distro: newDistro(t, osDistro.Busybox, "20.1", []string{}), - namespaces: nil, - }, - { - name: "debian unstable", - distro: &osDistro.Distro{ - Type: osDistro.Debian, - RawVersion: "unstable", - Version: nil, - }, - namespaces: []*distro.Namespace{ - distro.NewNamespace("debian", osDistro.Debian, "unstable"), - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - namespaces := namespaceIndex.NamespacesForDistro(test.distro) - assert.ElementsMatch(t, test.namespaces, namespaces) - }) - } -} diff --git a/grype/db/v5/pkg/qualifier/from_json.go b/grype/db/v5/pkg/qualifier/from_json.go index a06e76dc64f..dbb661ee4af 100644 --- a/grype/db/v5/pkg/qualifier/from_json.go +++ b/grype/db/v5/pkg/qualifier/from_json.go @@ -3,7 +3,7 @@ package qualifier import ( "encoding/json" - "github.com/mitchellh/mapstructure" + "github.com/go-viper/mapstructure/v2" "github.com/anchore/grype/grype/db/v5/pkg/qualifier/platformcpe" "github.com/anchore/grype/grype/db/v5/pkg/qualifier/rpmmodularity" diff --git a/grype/db/v6/affected_package_store.go b/grype/db/v6/affected_package_store.go index 8701c250f3e..36dd36dc1d3 100644 --- a/grype/db/v6/affected_package_store.go +++ b/grype/db/v6/affected_package_store.go @@ -3,11 +3,10 @@ package v6 import ( "errors" "fmt" - "regexp" - "sort" "strings" "time" + "golang.org/x/exp/maps" "gorm.io/gorm" "gorm.io/gorm/clause" @@ -25,7 +24,6 @@ var AnyOSSpecified *OSSpecifier var AnyPackageSpecified *PackageSpecifier var ErrMissingOSIdentification = errors.New("missing OS name or codename") var ErrOSNotPresent = errors.New("OS not present") -var ErrMultipleOSMatches = errors.New("multiple OS matches found but not allowed") var ErrLimitReached = errors.New("query limit reached") type GetAffectedPackageOptions struct { @@ -85,111 +83,6 @@ func (p PackageSpecifiers) String() string { return strings.Join(parts, ", ") } -type OSSpecifiers []*OSSpecifier - -// OSSpecifier is a struct that represents a distro in a way that can be used to query the affected package store. -type OSSpecifier struct { - // Name of the distro as identified by the ID field in /etc/os-release (or similar normalized name, e.g. "oracle" instead of "ol") - Name string - - // MajorVersion is the first field in the VERSION_ID field in /etc/os-release (e.g. 7 in "7.0.1406") - MajorVersion string - - // MinorVersion is the second field in the VERSION_ID field in /etc/os-release (e.g. 0 in "7.0.1406") - MinorVersion string - - // LabelVersion is a string that represents a floating version (e.g. "edge" or "unstable") or is the CODENAME field in /etc/os-release (e.g. "wheezy" for debian 7) - LabelVersion string - - // AllowMultiple specifies whether we intend to allow for multiple distro identities to be matched. - AllowMultiple bool -} - -func (d *OSSpecifier) String() string { - if d == nil { - return anyOS - } - - if *d == *NoOSSpecified { - return "none" - } - - var version string - if d.MajorVersion != "" { - version = d.MajorVersion - if d.MinorVersion != "" { - version += "." + d.MinorVersion - } - } else { - version = d.LabelVersion - } - - distroDisplayName := d.Name - if version != "" { - distroDisplayName += "@" + version - } - if version == d.MajorVersion && d.LabelVersion != "" { - distroDisplayName += " (" + d.LabelVersion + ")" - } - - return distroDisplayName -} - -func (d OSSpecifier) version() string { - if d.MajorVersion != "" && d.MinorVersion != "" { - return d.MajorVersion + "." + d.MinorVersion - } - - if d.MajorVersion != "" { - return d.MajorVersion - } - - if d.LabelVersion != "" { - return d.LabelVersion - } - - return "" -} - -func (d OSSpecifiers) String() string { - if d.IsAny() { - return anyOS - } - var parts []string - for _, v := range d { - parts = append(parts, v.String()) - } - return strings.Join(parts, ", ") -} - -func (d OSSpecifiers) IsAny() bool { - if len(d) == 0 { - return true - } - if len(d) == 1 && d[0] == AnyOSSpecified { - return true - } - return false -} - -func (d OSSpecifier) matchesVersionPattern(pattern string) bool { - // check if version or version label matches the given regex - r, err := regexp.Compile(pattern) - if err != nil { - log.Tracef("failed to compile distro specifier regex pattern %q: %v", pattern, err) - return false - } - - if r.MatchString(d.version()) { - return true - } - - if d.LabelVersion != "" { - return r.MatchString(d.LabelVersion) - } - return false -} - type AffectedPackageStoreWriter interface { AddAffectedPackages(packages ...*AffectedPackageHandle) error } @@ -201,18 +94,19 @@ type AffectedPackageStoreReader interface { type affectedPackageStore struct { db *gorm.DB blobStore *blobStore + osStore *operatingSystemStore } -func newAffectedPackageStore(db *gorm.DB, bs *blobStore) *affectedPackageStore { +func newAffectedPackageStore(db *gorm.DB, bs *blobStore, oss *operatingSystemStore) *affectedPackageStore { return &affectedPackageStore{ db: db, blobStore: bs, + osStore: oss, } } func (s *affectedPackageStore) AddAffectedPackages(packages ...*AffectedPackageHandle) error { - omit := []string{"OperatingSystem"} - if err := s.addOs(packages...); err != nil { + if err := s.osStore.addOsFromPackages(packages...); err != nil { return fmt.Errorf("unable to add affected package OS: %w", err) } @@ -221,6 +115,7 @@ func (s *affectedPackageStore) AddAffectedPackages(packages ...*AffectedPackageH return fmt.Errorf("unable to add affected packages: %w", err) } + omit := []string{"OperatingSystem"} if !hasCpes { omit = append(omit, "Package") } @@ -302,61 +197,6 @@ func (s *affectedPackageStore) addPackages(packages ...*AffectedPackageHandle) ( return hasCPEs, nil } -func (s *affectedPackageStore) addOs(packages ...*AffectedPackageHandle) error { // nolint:dupl - cacheInst, ok := cacheFromContext(s.db.Statement.Context) - if !ok { - return fmt.Errorf("unable to fetch OS cache from context") - } - - var final []*OperatingSystem - byCacheKey := make(map[string][]*OperatingSystem) - for _, p := range packages { - if p.OperatingSystem != nil { - p.OperatingSystem.clean() - key := p.OperatingSystem.cacheKey() - if existingID, ok := cacheInst.getID(p.OperatingSystem); ok { - // seen in a previous transaction... - p.OperatingSystemID = &existingID - } else if _, ok := byCacheKey[key]; !ok { - // not seen within this transaction - final = append(final, p.OperatingSystem) - } - byCacheKey[key] = append(byCacheKey[key], p.OperatingSystem) - } - } - - if len(final) == 0 { - return nil - } - - if err := s.db.Create(final).Error; err != nil { - return fmt.Errorf("unable to create OS records: %w", err) - } - - // update the cache with the new records - for _, ref := range final { - cacheInst.set(ref) - } - - // update all references with the IDs from the cache - for _, refs := range byCacheKey { - for _, ref := range refs { - id, ok := cacheInst.getID(ref) - if ok { - ref.setRowID(id) - } - } - } - - // update the parent objects with the FK ID - for _, p := range packages { - if p.OperatingSystem != nil { - p.OperatingSystemID = &p.OperatingSystem.ID - } - } - return nil -} - func (s *affectedPackageStore) GetAffectedPackages(pkg *PackageSpecifier, config *GetAffectedPackageOptions) ([]AffectedPackageHandle, error) { // nolint:funlen if config == nil { config = &GetAffectedPackageOptions{} @@ -509,30 +349,25 @@ func (s *affectedPackageStore) handleVulnerabilityOptions(query *gorm.DB, config } func (s *affectedPackageStore) handleOSOptions(query *gorm.DB, configs []*OSSpecifier) (*gorm.DB, error) { - resolvedDistroMap := make(map[int64]OperatingSystem) + ids := map[int64]struct{}{} if len(configs) == 0 { configs = append(configs, AnyOSSpecified) } var hasAny, hasNone, hasSpecific bool + // process OS specs... for _, config := range configs { switch { - case hasDistroSpecified(config): - curResolvedDistros, err := s.resolveDistro(*config) + case hasOSSpecified(config): + curResolved, err := s.osStore.GetOperatingSystems(*config) if err != nil { - return nil, fmt.Errorf("unable to resolve distro: %w", err) + return nil, fmt.Errorf("unable to resolve operating system: %w", err) } - switch { - case len(curResolvedDistros) == 0: - return nil, ErrOSNotPresent - case len(curResolvedDistros) > 1 && !config.AllowMultiple: - return nil, ErrMultipleOSMatches - } hasSpecific = true - for _, d := range curResolvedDistros { - resolvedDistroMap[int64(d.ID)] = d + for _, d := range curResolved { + ids[int64(d.ID)] = struct{}{} } case config == AnyOSSpecified: // TODO: one enhancement we may want to do later is "has OS defined but is not specific" which this does NOT cover. This is "may or may not have an OS defined" which is different. @@ -543,177 +378,24 @@ func (s *affectedPackageStore) handleOSOptions(query *gorm.DB, configs []*OSSpec } if (hasAny || hasNone) && hasSpecific { - return nil, fmt.Errorf("cannot mix specific distro with any or none distro specifiers") + return nil, fmt.Errorf("cannot mix specific OS with 'any' or 'none' OS specifiers") } - var resolvedDistros []OperatingSystem switch { case hasAny: return query, nil case hasNone: return query.Where("operating_system_id IS NULL"), nil - case hasSpecific: - for _, d := range resolvedDistroMap { - resolvedDistros = append(resolvedDistros, d) - } - sort.Slice(resolvedDistros, func(i, j int) bool { - return resolvedDistros[i].ID < resolvedDistros[j].ID - }) - } - - query = query.Joins("JOIN operating_systems ON affected_package_handles.operating_system_id = operating_systems.id") - - if len(resolvedDistros) > 0 { - ids := make([]ID, len(resolvedDistros)) - for i, d := range resolvedDistros { - ids[i] = d.ID - } - query = query.Where("operating_systems.id IN ?", ids) } - return query, nil -} - -func (s *affectedPackageStore) resolveDistro(d OSSpecifier) ([]OperatingSystem, error) { - if d.Name == "" && d.LabelVersion == "" { - return nil, ErrMissingOSIdentification + // we were told to filter by specific OSes but found no matching OSes... + if len(ids) == 0 { + return nil, ErrOSNotPresent } - // search for aliases for the given distro; we intentionally map some OSs to other OSs in terms of - // vulnerability (e.g. `centos` is an alias for `rhel`). If an alias is found always use that alias in - // searches (there will never be anything in the DB for aliased distros). - if err := s.applyOSAlias(&d); err != nil { - return nil, err - } - - query := s.db.Model(&OperatingSystem{}) - - if d.Name != "" { - query = query.Where("name = ? collate nocase OR release_id = ? collate nocase", d.Name, d.Name) - } - - if d.LabelVersion != "" { - query = query.Where("codename = ? collate nocase OR label_version = ? collate nocase", d.LabelVersion, d.LabelVersion) - } + query = query.Where("affected_package_handles.operating_system_id IN ?", maps.Keys(ids)) - return s.searchForDistroVersionVariants(query, d) -} - -func (s *affectedPackageStore) applyOSAlias(d *OSSpecifier) error { - if d.Name == "" { - return nil - } - - var aliases []OperatingSystemSpecifierOverride - err := s.db.Where("alias = ? collate nocase", d.Name).Find(&aliases).Error - if err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("failed to resolve alias for distro %q: %w", d.Name, err) - } - return nil - } - - var alias *OperatingSystemSpecifierOverride - - for _, a := range aliases { - if a.Codename != "" && a.Codename != d.LabelVersion { - continue - } - - if a.Version != "" && a.Version != d.version() { - continue - } - - if a.VersionPattern != "" && !d.matchesVersionPattern(a.VersionPattern) { - continue - } - - alias = &a - break - } - - if alias == nil { - return nil - } - - if alias.ReplacementName != nil { - d.Name = *alias.ReplacementName - } - - if alias.Rolling { - d.MajorVersion = "" - d.MinorVersion = "" - } - - if alias.ReplacementMajorVersion != nil { - d.MajorVersion = *alias.ReplacementMajorVersion - } - - if alias.ReplacementMinorVersion != nil { - d.MinorVersion = *alias.ReplacementMinorVersion - } - - if alias.ReplacementLabelVersion != nil { - d.LabelVersion = *alias.ReplacementLabelVersion - } - - return nil -} - -func (s *affectedPackageStore) searchForDistroVersionVariants(query *gorm.DB, d OSSpecifier) ([]OperatingSystem, error) { - var allOs []OperatingSystem - - handleQuery := func(q *gorm.DB, desc string) ([]OperatingSystem, error) { - err := q.Find(&allOs).Error - if err == nil { - return allOs, nil - } - if !errors.Is(err, gorm.ErrRecordNotFound) { - return nil, fmt.Errorf("failed to query distro by %s: %w", desc, err) - } - return nil, nil - } - - if d.MajorVersion == "" && d.MinorVersion == "" { - return handleQuery(query, "name and codename only") - } - - // search by the most specific criteria first, then fallback - d.MajorVersion = strings.TrimPrefix(d.MajorVersion, "0") - d.MinorVersion = strings.TrimPrefix(d.MinorVersion, "0") - - var result []OperatingSystem - var err error - if d.MajorVersion != "" { - if d.MinorVersion != "" { - // non-empty major and minor versions - specificQuery := query.Session(&gorm.Session{}).Where("major_version = ? AND minor_version = ?", d.MajorVersion, d.MinorVersion) - result, err = handleQuery(specificQuery, "major and minor versions") - if err != nil || len(result) > 0 { - return result, err - } - } - - // fallback to major version only, requiring the minor version to be blank. Note: it is important that we don't - // match on any record with the given major version, we must only match on records that are intentionally empty - // minor version. For instance, the DB may have rhel 8.1, 8.2, 8.3, 8.4, etc. We don't want to arbitrarily match - // on one of these or match even the latest version, as even that may yield incorrect vulnerability matching - // results. We are only intending to allow matches for when the vulnerability data is only specified at the major version level. - majorExclusiveQuery := query.Session(&gorm.Session{}).Where("major_version = ? AND minor_version = ?", d.MajorVersion, "") - result, err = handleQuery(majorExclusiveQuery, "exclusively major version") - if err != nil || len(result) > 0 { - return result, err - } - - // fallback to major version for any minor version - majorQuery := query.Session(&gorm.Session{}).Where("major_version = ?", d.MajorVersion) - result, err = handleQuery(majorQuery, "major version with any minor version") - if err != nil || len(result) > 0 { - return result, err - } - } - - return allOs, nil + return query, nil } func (s *affectedPackageStore) handlePreload(query *gorm.DB, config GetAffectedPackageOptions) *gorm.DB { @@ -779,7 +461,7 @@ func queryCPEAttributeScope(query *gorm.DB, value string, dbColumn string, allow return query.Where(fmt.Sprintf("%s = ? collate nocase", dbColumn), value) } -func hasDistroSpecified(d *OSSpecifier) bool { +func hasOSSpecified(d *OSSpecifier) bool { if d == AnyOSSpecified { return false } diff --git a/grype/db/v6/affected_package_store_test.go b/grype/db/v6/affected_package_store_test.go index 11ebb09e65e..606868fc9c7 100644 --- a/grype/db/v6/affected_package_store_test.go +++ b/grype/db/v6/affected_package_store_test.go @@ -137,7 +137,8 @@ func defaultAffectedPackageHandlePreloadCases() []affectedPackageHandlePreloadCo func TestAffectedPackageStore_AddAffectedPackages(t *testing.T) { setupAffectedPackageStore := func(t *testing.T) *affectedPackageStore { db := setupTestStore(t).db - return newAffectedPackageStore(db, newBlobStore(db)) + bs := newBlobStore(db) + return newAffectedPackageStore(db, bs, newOperatingSystemStore(db, bs)) } setupTestStoreWithPackages := func(t *testing.T) (*AffectedPackageHandle, *AffectedPackageHandle, *affectedPackageStore) { @@ -430,7 +431,8 @@ func TestAffectedPackageStore_AddAffectedPackages(t *testing.T) { func TestAffectedPackageStore_GetAffectedPackages_ByCPE(t *testing.T) { db := setupTestStore(t).db bs := newBlobStore(db) - s := newAffectedPackageStore(db, bs) + oss := newOperatingSystemStore(db, bs) + s := newAffectedPackageStore(db, bs, oss) cpe1 := Cpe{Part: "a", Vendor: "vendor1", Product: "product1"} cpe2 := Cpe{Part: "a", Vendor: "vendor2", Product: "product2"} @@ -595,7 +597,8 @@ func TestAffectedPackageStore_GetAffectedPackages_ByCPE(t *testing.T) { func TestAffectedPackageStore_GetAffectedPackages_CaseInsensitive(t *testing.T) { db := setupTestStore(t).db bs := newBlobStore(db) - s := newAffectedPackageStore(db, bs) + oss := newOperatingSystemStore(db, bs) + s := newAffectedPackageStore(db, bs, oss) cpe1 := Cpe{Part: "a", Vendor: "Vendor1", Product: "Product1"} // capitalized pkg1 := &AffectedPackageHandle{ @@ -734,7 +737,8 @@ func TestAffectedPackageStore_GetAffectedPackages_CaseInsensitive(t *testing.T) func TestAffectedPackageStore_GetAffectedPackages_MultipleVulnerabilitySpecs(t *testing.T) { db := setupTestStore(t).db bs := newBlobStore(db) - s := newAffectedPackageStore(db, bs) + oss := newOperatingSystemStore(db, bs) + s := newAffectedPackageStore(db, bs, oss) cpe1 := Cpe{Part: "a", Vendor: "vendor1", Product: "product1"} cpe2 := Cpe{Part: "a", Vendor: "vendor2", Product: "product2"} @@ -789,7 +793,8 @@ func TestAffectedPackageStore_GetAffectedPackages_MultipleVulnerabilitySpecs(t * func TestAffectedPackageStore_GetAffectedPackages(t *testing.T) { db := setupTestStore(t).db bs := newBlobStore(db) - s := newAffectedPackageStore(db, bs) + oss := newOperatingSystemStore(db, bs) + s := newAffectedPackageStore(db, bs, oss) pkg2d1 := testDistro1AffectedPackage2Handle() pkg2 := testNonDistroAffectedPackage2Handle() @@ -817,29 +822,16 @@ func TestAffectedPackageStore_GetAffectedPackages(t *testing.T) { expected: []AffectedPackageHandle{*pkg2d1}, }, { - name: "distro major version only (allow multiple)", + name: "distro major version only", pkg: pkgFromName(pkg2d1.Package.Name), options: &GetAffectedPackageOptions{ OSs: []*OSSpecifier{{ - Name: "ubuntu", - MajorVersion: "20", - AllowMultiple: true, + Name: "ubuntu", + MajorVersion: "20", }}, }, expected: []AffectedPackageHandle{*pkg2d1, *pkg2d2}, }, - { - name: "distro major version only (default)", - pkg: pkgFromName(pkg2d1.Package.Name), - options: &GetAffectedPackageOptions{ - OSs: []*OSSpecifier{{ - Name: "ubuntu", - MajorVersion: "20", - AllowMultiple: false, - }}, - }, - wantErr: expectErrIs(t, ErrMultipleOSMatches), - }, { name: "distro codename", pkg: pkgFromName(pkg2d1.Package.Name), @@ -956,7 +948,8 @@ func TestAffectedPackageStore_GetAffectedPackages(t *testing.T) { func TestAffectedPackageStore_ApplyPackageAlias(t *testing.T) { db := setupTestStore(t).db bs := newBlobStore(db) - s := newAffectedPackageStore(db, bs) + oss := newOperatingSystemStore(db, bs) + s := newAffectedPackageStore(db, bs, oss) tests := []struct { name string @@ -992,376 +985,6 @@ func TestAffectedPackageStore_ApplyPackageAlias(t *testing.T) { } } -func TestAffectedPackageStore_ResolveDistro(t *testing.T) { - // we always preload the OS aliases into the DB when staging for writing - db := setupTestStore(t).db - bs := newBlobStore(db) - s := newAffectedPackageStore(db, bs) - - ubuntu2004 := &OperatingSystem{Name: "ubuntu", ReleaseID: "ubuntu", MajorVersion: "20", MinorVersion: "04", LabelVersion: "focal"} - ubuntu2010 := &OperatingSystem{Name: "ubuntu", MajorVersion: "20", MinorVersion: "10", LabelVersion: "groovy"} - rhel8 := &OperatingSystem{Name: "rhel", ReleaseID: "rhel", MajorVersion: "8"} - rhel81 := &OperatingSystem{Name: "rhel", ReleaseID: "rhel", MajorVersion: "8", MinorVersion: "1"} - debian10 := &OperatingSystem{Name: "debian", ReleaseID: "debian", MajorVersion: "10"} - alpine318 := &OperatingSystem{Name: "alpine", ReleaseID: "alpine", MajorVersion: "3", MinorVersion: "18"} - alpineEdge := &OperatingSystem{Name: "alpine", ReleaseID: "alpine", LabelVersion: "edge"} - debianTrixie := &OperatingSystem{Name: "debian", ReleaseID: "debian", LabelVersion: "trixie"} - debian7 := &OperatingSystem{Name: "debian", ReleaseID: "debian", MajorVersion: "7", LabelVersion: "wheezy"} - wolfi := &OperatingSystem{Name: "wolfi", ReleaseID: "wolfi", MajorVersion: "20230201"} - arch := &OperatingSystem{Name: "arch", ReleaseID: "arch", MajorVersion: "20241110", MinorVersion: "0"} - oracle5 := &OperatingSystem{Name: "oracle", ReleaseID: "ol", MajorVersion: "5"} - oracle6 := &OperatingSystem{Name: "oracle", ReleaseID: "ol", MajorVersion: "6"} - amazon2 := &OperatingSystem{Name: "amazon", ReleaseID: "amzn", MajorVersion: "2"} - rocky8 := &OperatingSystem{Name: "rocky", ReleaseID: "rocky", MajorVersion: "8"} // should not be matched - alma8 := &OperatingSystem{Name: "almalinux", ReleaseID: "almalinux", MajorVersion: "8"} // should not be matched - - operatingSystems := []*OperatingSystem{ - ubuntu2004, - ubuntu2010, - rhel8, - rhel81, - debian10, - alpine318, - alpineEdge, - debianTrixie, - debian7, - wolfi, - arch, - oracle5, - oracle6, - amazon2, - rocky8, - alma8, - } - require.NoError(t, db.Create(&operatingSystems).Error) - - tests := []struct { - name string - distro OSSpecifier - expected []OperatingSystem - expectErr require.ErrorAssertionFunc - }{ - { - name: "specific distro with major and minor version", - distro: OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - MinorVersion: "04", - }, - expected: []OperatingSystem{*ubuntu2004}, - }, - { - name: "specific distro with major and minor version (missing left padding)", - distro: OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - MinorVersion: "4", - }, - expected: []OperatingSystem{*ubuntu2004}, - }, - { - name: "alias resolution with major version", - distro: OSSpecifier{ - Name: "centos", - MajorVersion: "8", - }, - expected: []OperatingSystem{*rhel8}, - }, - { - name: "alias resolution with major and minor version", - distro: OSSpecifier{ - Name: "centos", - MajorVersion: "8", - MinorVersion: "1", - }, - expected: []OperatingSystem{*rhel81}, - }, - { - name: "distro with major version only", - distro: OSSpecifier{ - Name: "debian", - MajorVersion: "10", - }, - expected: []OperatingSystem{*debian10}, - }, - { - name: "codename resolution", - distro: OSSpecifier{ - Name: "ubuntu", - LabelVersion: "focal", - }, - expected: []OperatingSystem{*ubuntu2004}, - }, - { - name: "codename and version info", - distro: OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - MinorVersion: "04", - LabelVersion: "focal", - }, - expected: []OperatingSystem{*ubuntu2004}, - }, - { - name: "conflicting codename and version info", - distro: OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - MinorVersion: "04", - LabelVersion: "fake", - }, - }, - { - name: "alpine edge version", - distro: OSSpecifier{ - Name: "alpine", - MajorVersion: "3", - MinorVersion: "21", - LabelVersion: "3.21.0_alpha20240807", - }, - expected: []OperatingSystem{*alpineEdge}, - }, - { - name: "arch rolling variant", - distro: OSSpecifier{ - Name: "arch", - }, - expected: []OperatingSystem{*arch}, - }, - { - name: "wolfi rolling variant", - distro: OSSpecifier{ - Name: "wolfi", - MajorVersion: "20221018", - }, - expected: []OperatingSystem{*wolfi}, - }, - { - name: "debian by codename for rolling alias", - distro: OSSpecifier{ - Name: "debian", - MajorVersion: "13", - LabelVersion: "trixie", - }, - expected: []OperatingSystem{*debianTrixie}, - }, - { - name: "debian by codename", - distro: OSSpecifier{ - Name: "debian", - LabelVersion: "wheezy", - }, - expected: []OperatingSystem{*debian7}, - }, - { - name: "debian by major version", - distro: OSSpecifier{ - Name: "debian", - MajorVersion: "7", - }, - expected: []OperatingSystem{*debian7}, - }, - { - name: "debian by major.minor version", - distro: OSSpecifier{ - Name: "debian", - MajorVersion: "7", - MinorVersion: "2", - }, - expected: []OperatingSystem{*debian7}, - }, - { - name: "alpine with major and minor version", - distro: OSSpecifier{ - Name: "alpine", - MajorVersion: "3", - MinorVersion: "18", - }, - expected: []OperatingSystem{*alpine318}, - }, - { - name: "lookup by release ID (not name)", - distro: OSSpecifier{ - Name: "ol", - MajorVersion: "5", - }, - expected: []OperatingSystem{*oracle5}, - }, - { - name: "lookup by non-standard name (oraclelinux)", - distro: OSSpecifier{ - Name: "oraclelinux", // based on the grype distro names - MajorVersion: "5", - }, - expected: []OperatingSystem{*oracle5}, - }, - { - name: "lookup by non-standard name (amazonlinux)", - distro: OSSpecifier{ - Name: "amazonlinux", // based on the grype distro names - MajorVersion: "2", - }, - expected: []OperatingSystem{*amazon2}, - }, - { - name: "lookup by non-standard name (oracle)", - distro: OSSpecifier{ - Name: "oracle", - MajorVersion: "5", - }, - expected: []OperatingSystem{*oracle5}, - }, - { - name: "lookup by non-standard name (amazon)", - distro: OSSpecifier{ - Name: "amazon", - MajorVersion: "2", - }, - expected: []OperatingSystem{*amazon2}, - }, - { - name: "lookup by non-standard name (rocky)", - distro: OSSpecifier{ - Name: "rocky", - MajorVersion: "8", - }, - expected: []OperatingSystem{*rhel8}, - }, - { - name: "lookup by non-standard name (rockylinux)", - distro: OSSpecifier{ - Name: "rockylinux", - MajorVersion: "8", - }, - expected: []OperatingSystem{*rhel8}, - }, - { - name: "lookup by non-standard name (alma)", - distro: OSSpecifier{ - Name: "alma", - MajorVersion: "8", - }, - expected: []OperatingSystem{*rhel8}, - }, - { - name: "lookup by non-standard name (almalinux)", - distro: OSSpecifier{ - Name: "almalinux", - MajorVersion: "8", - }, - expected: []OperatingSystem{*rhel8}, - }, - { - name: "missing distro name", - distro: OSSpecifier{ - MajorVersion: "8", - }, - expectErr: expectErrIs(t, ErrMissingOSIdentification), - }, - { - name: "nonexistent distro", - distro: OSSpecifier{ - Name: "madeup", - MajorVersion: "99", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if tt.expectErr == nil { - tt.expectErr = require.NoError - } - result, err := s.resolveDistro(tt.distro) - tt.expectErr(t, err) - if err != nil { - return - } - - if diff := cmp.Diff(tt.expected, result, cmpopts.EquateEmpty()); diff != "" { - t.Errorf("unexpected result (-want +got):\n%s", diff) - } - }) - } -} - -func TestDistroSpecifier_String(t *testing.T) { - tests := []struct { - name string - distro *OSSpecifier - expected string - }{ - { - name: "nil distro", - distro: AnyOSSpecified, - expected: "any", - }, - { - name: "no distro specified", - distro: NoOSSpecified, - expected: "none", - }, - { - name: "only name specified", - distro: &OSSpecifier{ - Name: "ubuntu", - }, - expected: "ubuntu", - }, - { - name: "name and major version specified", - distro: &OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - }, - expected: "ubuntu@20", - }, - { - name: "name, major, and minor version specified", - distro: &OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - MinorVersion: "04", - }, - expected: "ubuntu@20.04", - }, - { - name: "name, major version, and codename specified", - distro: &OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - LabelVersion: "focal", - }, - expected: "ubuntu@20 (focal)", - }, - { - name: "name and codename specified", - distro: &OSSpecifier{ - Name: "ubuntu", - LabelVersion: "focal", - }, - expected: "ubuntu@focal", - }, - { - name: "name, major version, minor version, and codename specified", - distro: &OSSpecifier{ - Name: "ubuntu", - MajorVersion: "20", - MinorVersion: "04", - LabelVersion: "focal", - }, - expected: "ubuntu@20.04", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := tt.distro.String() - require.Equal(t, tt.expected, result) - }) - } -} - func testDistro1AffectedPackage2Handle() *AffectedPackageHandle { now := time.Date(2023, 1, 1, 3, 4, 5, 0, time.UTC) later := now.Add(time.Hour * 200) diff --git a/grype/db/v6/data.go b/grype/db/v6/data.go index 42117c0d920..ee80d758259 100644 --- a/grype/db/v6/data.go +++ b/grype/db/v6/data.go @@ -22,11 +22,13 @@ func KnownOperatingSystemSpecifierOverrides() []OperatingSystemSpecifierOverride {Alias: "wolfi", Rolling: true}, {Alias: "chainguard", Rolling: true}, {Alias: "arch", Rolling: true}, + {Alias: "minimos", Rolling: true}, {Alias: "archlinux", ReplacementName: strRef("arch"), Rolling: true}, // non-standard, but common (dockerhub uses "archlinux") {Alias: "oracle", ReplacementName: strRef("ol")}, // non-standard, but common {Alias: "oraclelinux", ReplacementName: strRef("ol")}, // non-standard, but common (dockerhub uses "oraclelinux") {Alias: "amazon", ReplacementName: strRef("amzn")}, // non-standard, but common {Alias: "amazonlinux", ReplacementName: strRef("amzn")}, // non-standard, but common (dockerhub uses "amazonlinux") + {Alias: "echo", Rolling: true}, // TODO: trixie is a placeholder for now, but should be updated to sid when the time comes // this needs to be automated, but isn't clear how to do so since you'll see things like this: // @@ -42,7 +44,7 @@ func KnownOperatingSystemSpecifierOverrides() []OperatingSystemSpecifierOverride // // depending where the team is during the development cycle you will see different behavior, making automating // this a little challenging. - {Alias: "debian", Codename: "trixie", Rolling: true}, // is currently sid, which is considered rolling + {Alias: "debian", Codename: "trixie", Rolling: true, ReplacementLabelVersion: strRef("unstable")}, // is currently sid, which is considered rolling } } @@ -72,6 +74,12 @@ func KnownPackageSpecifierOverrides() []PackageSpecifierOverride { // jenkins plugins are a special case since they are always considered to be within the java ecosystem {Ecosystem: string(pkg.JenkinsPluginPkg), ReplacementEcosystem: ptr(string(pkg.JavaPkg))}, + + // legacy cases + {Ecosystem: "pecl", ReplacementEcosystem: ptr(string(pkg.PhpPeclPkg))}, + {Ecosystem: "kb", ReplacementEcosystem: ptr(string(pkg.KbPkg))}, + {Ecosystem: "dpkg", ReplacementEcosystem: ptr(string(pkg.DebPkg))}, + {Ecosystem: "apkg", ReplacementEcosystem: ptr(string(pkg.ApkPkg))}, } // remap package URL types to syft package types diff --git a/grype/db/v6/db.go b/grype/db/v6/db.go index 84630538302..e852f49469f 100644 --- a/grype/db/v6/db.go +++ b/grype/db/v6/db.go @@ -9,6 +9,7 @@ import ( "gorm.io/gorm" "github.com/anchore/grype/grype/db/internal/gormadapter" + "github.com/anchore/grype/grype/vulnerability" "github.com/anchore/grype/internal/log" ) @@ -53,10 +54,10 @@ type Reader interface { ProviderStoreReader VulnerabilityStoreReader VulnerabilityDecoratorStoreReader + OperatingSystemStoreReader AffectedPackageStoreReader AffectedCPEStoreReader io.Closer - getDB() *gorm.DB attachBlobValue(...blobable) error } @@ -72,7 +73,7 @@ type Writer interface { type Curator interface { Reader() (Reader, error) - Status() Status + Status() vulnerability.ProviderStatus Delete() error Update() (bool, error) Import(dbArchivePath string) error @@ -101,7 +102,9 @@ func Hydrater() func(string) error { // we don't pass any data initialization here because the data is already in the db archive and we do not want // to affect the entries themselves, only indexes and schema. s, err := newStore(Config{DBDirPath: path}, false, true) - log.CloseAndLogError(s, path) + if s != nil { + log.CloseAndLogError(s, path) + } return err } } diff --git a/grype/db/v6/db_metadata_store_test.go b/grype/db/v6/db_metadata_store_test.go index 8eb6ef12b21..048f007b26b 100644 --- a/grype/db/v6/db_metadata_store_test.go +++ b/grype/db/v6/db_metadata_store_test.go @@ -19,6 +19,17 @@ func TestDbMetadataStore_empty(t *testing.T) { require.NotNil(t, actualMetadata) } +func TestDbMetadataStore_oldDb(t *testing.T) { + db := setupTestStore(t).db + require.NoError(t, db.Where("true").Model(DBMetadata{}).Update("Model", "5").Error) // old database version + s := newDBMetadataStore(db) + + // attempt to fetch a non-existent record + actualMetadata, err := s.GetDBMetadata() + require.NoError(t, err) + require.NotNil(t, actualMetadata) +} + func TestDbMetadataStore(t *testing.T) { s := newDBMetadataStore(setupTestStore(t).db) diff --git a/grype/db/v6/distribution/client.go b/grype/db/v6/distribution/client.go index 171f575880e..92ec2e4afb7 100644 --- a/grype/db/v6/distribution/client.go +++ b/grype/db/v6/distribution/client.go @@ -40,7 +40,8 @@ type Config struct { type Client interface { Latest() (*LatestDocument, error) IsUpdateAvailable(current *v6.Description) (*Archive, error) - Download(archive Archive, dest string, downloadProgress *progress.Manual) (string, error) + ResolveArchiveURL(archive Archive) (string, error) + Download(url, dest string, downloadProgress *progress.Manual) (string, error) } type client struct { @@ -117,8 +118,8 @@ func (c client) isUpdateAvailable(current *v6.Description, candidate *LatestDocu } // compare created data to current db date - if isSupersededBy(current, candidate.Archive.Description) { - log.Debugf("database update available: %s", candidate.Archive.Description) + if isSupersededBy(current, candidate.Description) { + log.Debugf("database update available: %s", candidate.Description) return &candidate.Archive, message } @@ -126,23 +127,10 @@ func (c client) isUpdateAvailable(current *v6.Description, candidate *LatestDocu return nil, message } -func (c client) Download(archive Archive, dest string, downloadProgress *progress.Manual) (string, error) { - defer downloadProgress.SetCompleted() - - if err := os.MkdirAll(dest, 0700); err != nil { - return "", fmt.Errorf("unable to create db download root dir: %w", err) - } - - // note: as much as I'd like to use the afero FS abstraction here, the go-getter library does not support it - tempDir, err := os.MkdirTemp(dest, "grype-db-download") - if err != nil { - return "", fmt.Errorf("unable to create db client temp dir: %w", err) - } - +func (c client) ResolveArchiveURL(archive Archive) (string, error) { // download the db to the temp dir u, err := url.Parse(c.latestURL()) if err != nil { - removeAllOrLog(afero.NewOsFs(), tempDir) return "", fmt.Errorf("unable to parse db URL %q: %w", c.latestURL(), err) } @@ -156,8 +144,24 @@ func (c client) Download(archive Archive, dest string, downloadProgress *progres } u.RawQuery = query.Encode() + return u.String(), nil +} + +func (c client) Download(archiveURL, dest string, downloadProgress *progress.Manual) (string, error) { + defer downloadProgress.SetCompleted() + + if err := os.MkdirAll(dest, 0700); err != nil { + return "", fmt.Errorf("unable to create db download root dir: %w", err) + } + + // note: as much as I'd like to use the afero FS abstraction here, the go-getter library does not support it + tempDir, err := os.MkdirTemp(dest, "grype-db-download") + if err != nil { + return "", fmt.Errorf("unable to create db client temp dir: %w", err) + } + // go-getter will automatically extract all files within the archive to the temp dir - err = c.dbDownloader.GetToDir(tempDir, u.String(), downloadProgress) + err = c.dbDownloader.GetToDir(tempDir, archiveURL, downloadProgress) if err != nil { removeAllOrLog(afero.NewOsFs(), tempDir) return "", fmt.Errorf("unable to download db: %w", err) @@ -173,6 +177,7 @@ func (c client) Latest() (*LatestDocument, error) { return nil, fmt.Errorf("unable to create listing temp file: %w", err) } defer func() { + log.CloseAndLogError(tempFile, tempFile.Name()) err := c.fs.RemoveAll(tempFile.Name()) if err != nil { log.WithFields("error", err, "file", tempFile.Name()).Errorf("failed to remove file") diff --git a/grype/db/v6/distribution/client_test.go b/grype/db/v6/distribution/client_test.go index 183302e0d96..f8f72bbf1b3 100644 --- a/grype/db/v6/distribution/client_test.go +++ b/grype/db/v6/distribution/client_test.go @@ -131,10 +131,6 @@ func TestClient_Latest(t *testing.T) { func TestClient_Download(t *testing.T) { destDir := t.TempDir() - archive := &Archive{ - Path: "path/to/archive.tar.gz", - Checksum: "checksum123", - } setup := func() (Client, *mockGetter) { mg := new(mockGetter) @@ -152,9 +148,10 @@ func TestClient_Download(t *testing.T) { t.Run("successful download", func(t *testing.T) { c, mg := setup() - mg.On("GetToDir", mock.Anything, "http://localhost:8080/path/to/archive.tar.gz?checksum=checksum123", mock.Anything).Return(nil) + url := "http://localhost:8080/path/to/archive.tar.gz?checksum=checksum123" + mg.On("GetToDir", mock.Anything, url, mock.Anything).Return(nil) - tempDir, err := c.Download(*archive, destDir, &progress.Manual{}) + tempDir, err := c.Download(url, destDir, &progress.Manual{}) require.NoError(t, err) require.True(t, len(tempDir) > 0) @@ -163,9 +160,10 @@ func TestClient_Download(t *testing.T) { t.Run("download error", func(t *testing.T) { c, mg := setup() - mg.On("GetToDir", mock.Anything, "http://localhost:8080/path/to/archive.tar.gz?checksum=checksum123", mock.Anything).Return(errors.New("download failed")) + url := "http://localhost:8080/path/to/archive.tar.gz?checksum=checksum123" + mg.On("GetToDir", mock.Anything, url, mock.Anything).Return(errors.New("download failed")) - tempDir, err := c.Download(*archive, destDir, &progress.Manual{}) + tempDir, err := c.Download(url, destDir, &progress.Manual{}) require.Error(t, err) require.Empty(t, tempDir) require.Contains(t, err.Error(), "unable to download db") @@ -175,10 +173,11 @@ func TestClient_Download(t *testing.T) { t.Run("nested into dir that does not exist", func(t *testing.T) { c, mg := setup() - mg.On("GetToDir", mock.Anything, "http://localhost:8080/path/to/archive.tar.gz?checksum=checksum123", mock.Anything).Return(nil) + url := "http://localhost:8080/path/to/archive.tar.gz?checksum=checksum123" + mg.On("GetToDir", mock.Anything, url, mock.Anything).Return(nil) nestedPath := filepath.Join(destDir, "nested") - tempDir, err := c.Download(*archive, nestedPath, &progress.Manual{}) + tempDir, err := c.Download(url, nestedPath, &progress.Manual{}) require.NoError(t, err) require.True(t, len(tempDir) > 0) diff --git a/grype/db/v6/distribution/latest.go b/grype/db/v6/distribution/latest.go index 176ad3e40bd..fb9e5a1a646 100644 --- a/grype/db/v6/distribution/latest.go +++ b/grype/db/v6/distribution/latest.go @@ -52,7 +52,7 @@ func NewLatestDocument(entries ...Archive) *LatestDocument { // sort from most recent to the least recent sort.SliceStable(validEntries, func(i, j int) bool { - return validEntries[i].Description.Built.After(entries[j].Description.Built.Time) + return validEntries[i].Built.After(entries[j].Built.Time) }) return &LatestDocument{ @@ -109,15 +109,15 @@ func (l LatestDocument) Write(writer io.Writer) error { l.Status = LifecycleStatus } - if l.Archive.Path == "" { + if l.Path == "" { return fmt.Errorf("missing archive path") } - if l.Archive.Checksum == "" { + if l.Checksum == "" { return fmt.Errorf("missing archive checksum") } - if l.Archive.Description.Built.Time.IsZero() { + if l.Built.IsZero() { return fmt.Errorf("missing built time") } diff --git a/grype/db/v6/import_metadata.go b/grype/db/v6/import_metadata.go index 1456615a95e..b18e8fa22c0 100644 --- a/grype/db/v6/import_metadata.go +++ b/grype/db/v6/import_metadata.go @@ -18,8 +18,8 @@ import ( const ImportMetadataFileName = "import.json" type ImportMetadata struct { - Digest string `json:"digest"` - + Digest string `json:"digest"` + Source string `json:"source,omitempty"` ClientVersion string `json:"client_version"` } @@ -59,7 +59,7 @@ func CalculateDBDigest(fs afero.Fs, dbFilePath string) (string, error) { return fmt.Sprintf("xxh64:%s", digest), nil } -func WriteImportMetadata(fs afero.Fs, dbDir string) (*ImportMetadata, error) { +func WriteImportMetadata(fs afero.Fs, dbDir, source string) (*ImportMetadata, error) { metadataFilePath := filepath.Join(dbDir, ImportMetadataFileName) f, err := fs.OpenFile(metadataFilePath, os.O_TRUNC|os.O_WRONLY|os.O_CREATE, 0644) if err != nil { @@ -72,10 +72,10 @@ func WriteImportMetadata(fs afero.Fs, dbDir string) (*ImportMetadata, error) { return nil, fmt.Errorf("failed to calculate checksum for DB file: %w", err) } - return writeImportMetadata(f, checksums) + return writeImportMetadata(f, checksums, source) } -func writeImportMetadata(writer io.Writer, checksums string) (*ImportMetadata, error) { +func writeImportMetadata(writer io.Writer, checksums, source string) (*ImportMetadata, error) { if checksums == "" { return nil, fmt.Errorf("checksum is required") } @@ -89,6 +89,7 @@ func writeImportMetadata(writer io.Writer, checksums string) (*ImportMetadata, e doc := ImportMetadata{ Digest: checksums, + Source: source, ClientVersion: schemaver.New(ModelVersion, Revision, Addition).String(), } diff --git a/grype/db/v6/import_metadata_test.go b/grype/db/v6/import_metadata_test.go index d4bc8a58d48..5b1dd13a98e 100644 --- a/grype/db/v6/import_metadata_test.go +++ b/grype/db/v6/import_metadata_test.go @@ -44,9 +44,10 @@ func TestReadImportMetadata(t *testing.T) { }, { name: "valid metadata", - fileContent: `{"digest": "xxh64:testdigest", "client_version": "1.0.0"}`, + fileContent: `{"digest": "xxh64:testdigest", "source": "http://localhost:1234/archive.tar.gz", "client_version": "1.0.0"}`, expectedResult: &ImportMetadata{ Digest: "xxh64:testdigest", + Source: "http://localhost:1234/archive.tar.gz", ClientVersion: "1.0.0", }, }, @@ -106,7 +107,8 @@ func TestWriteImportMetadata(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { var buf bytes.Buffer - claim, err := writeImportMetadata(&buf, tc.checksum) + src := "source!" + claim, err := writeImportMetadata(&buf, tc.checksum, src) tc.wantErr(t, err) if err == nil { @@ -120,6 +122,7 @@ func TestWriteImportMetadata(t *testing.T) { assert.Equal(t, tc.checksum, claim.Digest) assert.Equal(t, tc.expectedVersion, doc.ClientVersion) assert.Equal(t, tc.expectedVersion, claim.ClientVersion) + assert.Equal(t, src, doc.Source) } }) } diff --git a/grype/db/v6/installation/curator.go b/grype/db/v6/installation/curator.go index 5050c2f3fe2..d52c36cb7b7 100644 --- a/grype/db/v6/installation/curator.go +++ b/grype/db/v6/installation/curator.go @@ -5,6 +5,7 @@ import ( "fmt" "os" "path/filepath" + "regexp" "strconv" "strings" "time" @@ -20,6 +21,7 @@ import ( db "github.com/anchore/grype/grype/db/v6" "github.com/anchore/grype/grype/db/v6/distribution" "github.com/anchore/grype/grype/event" + "github.com/anchore/grype/grype/vulnerability" "github.com/anchore/grype/internal/bus" "github.com/anchore/grype/internal/file" "github.com/anchore/grype/internal/log" @@ -116,10 +118,19 @@ func (c curator) Reader() (db.Reader, error) { mon.Set("rehydrating DB") log.Info("rehydrating DB") + // we're not changing the source of the DB, so we just want to use any existing value. + // if the source is empty/does not exist, it will be empty in the new metadata. + var source string + im, err := db.ReadImportMetadata(c.fs, c.config.DBDirectoryPath()) + if err == nil && im != nil { + // ignore errors, as this is just a best-effort to get the source + source = im.Source + } + // this is a condition where an old client imported a DB with additional capabilities than it can handle at hydration. // this could lead to missing indexes and degraded performance now that a newer client is running (that can handle these capabilities). // the only sensible thing to do is to rehydrate the existing DB to ensure indexes are up-to-date with the current client's capabilities. - if err := c.hydrate(c.config.DBDirectoryPath(), mon); err != nil { + if err := c.hydrate(c.config.DBDirectoryPath(), source, mon); err != nil { log.WithFields("error", err).Warn("unable to rehydrate DB") } mon.Set("rehydrated") @@ -139,38 +150,45 @@ func (c curator) Reader() (db.Reader, error) { return s, nil } -func (c curator) Status() db.Status { +func (c curator) Status() vulnerability.ProviderStatus { dbFile := c.config.DBFilePath() - d, err := db.ReadDescription(dbFile) - if err != nil { - return db.Status{ - Path: dbFile, - Err: err, + d, validateErr := db.ReadDescription(dbFile) + if validateErr != nil { + return vulnerability.ProviderStatus{ + Path: dbFile, + Error: validateErr, } } if d == nil { - return db.Status{ - Path: dbFile, - Err: fmt.Errorf("database not found at %q", dbFile), + return vulnerability.ProviderStatus{ + Path: dbFile, + Error: fmt.Errorf("database not found at %q", dbFile), } } - err = c.validateAge(d) - digest, checksumErr := c.validateIntegrity(d) + validateErr = c.validateAge(d) + _, checksumErr := c.validateIntegrity(d) if checksumErr != nil && c.config.ValidateChecksum { - if err != nil { - err = errors.Join(err, checksumErr) + if validateErr != nil { + validateErr = errors.Join(validateErr, checksumErr) } else { - err = checksumErr + validateErr = checksumErr } } - return db.Status{ - Built: db.Time{Time: d.Built.Time}, + var source string + im, readErr := db.ReadImportMetadata(c.fs, c.config.DBDirectoryPath()) + if readErr == nil && im != nil { + // only make a best-effort to get the source + source = im.Source + } + + return vulnerability.ProviderStatus{ + Built: d.Built.Time, SchemaVersion: d.SchemaVersion.String(), + From: source, Path: dbFile, - Checksum: digest, - Err: err, + Error: validateErr, } } @@ -254,6 +272,7 @@ func (c curator) isUpdateCheckAllowed() bool { func (c curator) update(current *db.Description) (*distribution.Archive, error) { mon := newMonitor() defer mon.SetCompleted() + startTime := time.Now() mon.Set("checking for update") update, checkErr := c.client.IsUpdateAvailable(current) @@ -273,14 +292,30 @@ func (c curator) update(current *db.Description) (*distribution.Archive, error) return nil, checkErr } - log.Infof("downloading new vulnerability DB") + log.Info("downloading new vulnerability DB") mon.Set("downloading") - dest, err := c.client.Download(*update, filepath.Dir(c.config.DBRootDir), mon.downloadProgress.Manual) + url, err := c.client.ResolveArchiveURL(*update) + if err != nil { + return nil, fmt.Errorf("unable to resolve vulnerability DB URL: %w", err) + } + + // Ensure parent of DBRootDir exists for the download client to create a temp dir within DBRootDir + // This might be redundant if DBRootDir must already exist, but good for safety. + if err := os.MkdirAll(c.config.DBRootDir, 0o700); err != nil { + return nil, fmt.Errorf("unable to create db root dir %s for download: %w", c.config.DBRootDir, err) + } + + dest, err := c.client.Download(url, c.config.DBRootDir, mon.downloadProgress.Manual) if err != nil { return nil, fmt.Errorf("unable to update vulnerability database: %w", err) } + + log.WithFields("url", url, "time", time.Since(startTime)).Info("downloaded vulnerability DB") + mon.downloadProgress.SetCompleted() - if err = c.activate(dest, mon); err != nil { + if err = c.activate(dest, url, mon); err != nil { + log.Warnf("Failed to activate downloaded database from %s, attempting cleanup of temporary download directory.", dest) + removeAllOrLog(c.fs, dest) return nil, fmt.Errorf("unable to activate new vulnerability database: %w", err) } @@ -308,7 +343,7 @@ func isRehydrationNeeded(fs afero.Fs, dirPath string, currentDBVersion *schemave return false, fmt.Errorf("unable to parse client version from import metadata: %w", err) } - hydratedWithOldClient := clientHydrationVersion.LessThan(*currentDBVersion) + hydratedWithOldClient := clientHydrationVersion.LessThanOrEqualTo(*currentDBVersion) haveNewerClient := clientHydrationVersion.LessThan(currentClientVersion) doRehydrate := hydratedWithOldClient && haveNewerClient @@ -371,7 +406,7 @@ func (c curator) setLastSuccessfulUpdateCheck() { // is a prerequisite for a successful update). filePath := filepath.Join(c.config.DBDirectoryPath(), lastUpdateCheckFileName) - fh, err := c.fs.OpenFile(filePath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0644) + fh, err := c.fs.OpenFile(filePath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o644) if err != nil { log.WithFields("error", err).Trace("unable to write last update check timestamp") return @@ -382,41 +417,57 @@ func (c curator) setLastSuccessfulUpdateCheck() { _, _ = fmt.Fprintf(fh, "%s", time.Now().UTC().Format(time.RFC3339)) } -// Import takes a DB archive file and imports it into the final DB location. -func (c curator) Import(path string) error { +// Import takes a DB file path, archive file path, or URL and imports it into the final DB location. +func (c curator) Import(reference string) error { mon := newMonitor() - mon.Set("unarchiving") + mon.Set("preparing") defer mon.SetCompleted() - if err := os.MkdirAll(c.config.DBRootDir, 0700); err != nil { + if err := os.MkdirAll(c.config.DBRootDir, 0o700); err != nil { return fmt.Errorf("unable to create db root dir: %w", err) } - // note: the temp directory is persisted upon download/validation/activation failure to allow for investigation - tempDir, err := os.MkdirTemp(c.config.DBRootDir, fmt.Sprintf("tmp-v%v-import", db.ModelVersion)) - if err != nil { - return fmt.Errorf("unable to create db import temp dir: %w", err) - } + var tempDir, url string + if isURL(reference) { + log.Info("downloading new vulnerability DB") + mon.Set("downloading") + var err error - if strings.HasSuffix(path, ".db") { - // this is a raw DB file, copy it to the temp dir - log.Trace("copying DB") - if err := file.CopyFile(afero.NewOsFs(), path, filepath.Join(tempDir, db.VulnerabilityDBFileName)); err != nil { - return fmt.Errorf("unable to copy DB file: %w", err) + tempDir, err = c.client.Download(reference, c.config.DBRootDir, mon.downloadProgress.Manual) + if err != nil { + return fmt.Errorf("unable to update vulnerability database: %w", err) } + + url = reference } else { - // assume it is an archive - log.Trace("unarchiving DB") - err = archiver.Unarchive(path, tempDir) + // note: the temp directory is persisted upon download/validation/activation failure to allow for investigation + var err error + tempDir, err = os.MkdirTemp(c.config.DBRootDir, fmt.Sprintf("tmp-v%v-import", db.ModelVersion)) if err != nil { - return err + return fmt.Errorf("unable to create db import temp dir: %w", err) + } + + url = "manual import" + + if strings.HasSuffix(reference, ".db") { + // this is a raw DB file, copy it to the temp dir + log.Trace("copying DB") + if err := file.CopyFile(afero.NewOsFs(), reference, filepath.Join(tempDir, db.VulnerabilityDBFileName)); err != nil { + return fmt.Errorf("unable to copy DB file: %w", err) + } + } else { + // assume it is an archive + log.Info("unarchiving DB") + err := archiver.Unarchive(reference, tempDir) + if err != nil { + return err + } } } mon.downloadProgress.SetCompleted() - err = c.activate(tempDir, mon) - if err != nil { + if err := c.activate(tempDir, url, mon); err != nil { removeAllOrLog(c.fs, tempDir) return err } @@ -426,20 +477,31 @@ func (c curator) Import(path string) error { return nil } +var urlPrefixPattern = regexp.MustCompile("^[a-zA-Z]+://") + +func isURL(reference string) bool { + return urlPrefixPattern.MatchString(reference) +} + // activate swaps over the downloaded db to the application directory, calculates the checksum, and records the checksums to a file. -func (c curator) activate(dbDirPath string, mon monitor) error { +func (c curator) activate(dbDirPath, url string, mon monitor) error { defer mon.SetCompleted() - if err := c.hydrate(dbDirPath, mon); err != nil { + startTime := time.Now() + if err := c.hydrate(dbDirPath, url, mon); err != nil { return fmt.Errorf("failed to hydrate database: %w", err) } + log.WithFields("time", time.Since(startTime)).Trace("hydrated db") + startTime = time.Now() + defer func() { log.WithFields("time", time.Since(startTime)).Trace("replaced db") }() + mon.Set("activating") return c.replaceDB(dbDirPath) } -func (c curator) hydrate(dbDirPath string, mon monitor) error { +func (c curator) hydrate(dbDirPath, from string, mon monitor) error { if c.hydrator != nil { mon.Set("hydrating") if err := c.hydrator(dbDirPath); err != nil { @@ -450,7 +512,7 @@ func (c curator) hydrate(dbDirPath string, mon monitor) error { mon.Set("hashing") - doc, err := db.WriteImportMetadata(c.fs, dbDirPath) + doc, err := db.WriteImportMetadata(c.fs, dbDirPath, from) if err != nil { return fmt.Errorf("failed to write checksums file: %w", err) } @@ -473,12 +535,17 @@ func (c curator) replaceDB(dbDirPath string) error { } // ensure parent db directory exists - if err := c.fs.MkdirAll(filepath.Dir(dbDir), 0700); err != nil { + if err = c.fs.MkdirAll(filepath.Dir(dbDir), 0o700); err != nil { return fmt.Errorf("unable to create db parent directory: %w", err) } // activate the new db cache by moving the temp dir to final location + // the rename should be safe because the temp dir is under GRYPE_DB_CACHE_DIR + // and so on the same filesystem as the final location err = c.fs.Rename(dbDirPath, dbDir) + if err != nil { + err = fmt.Errorf("failed to move database directory to activate: %w", err) + } log.WithFields("from", dbDirPath, "to", dbDir, "error", err).Debug("moved database directory to activate") return err } diff --git a/grype/db/v6/installation/curator_test.go b/grype/db/v6/installation/curator_test.go index e207575d9da..e9d3096401e 100644 --- a/grype/db/v6/installation/curator_test.go +++ b/grype/db/v6/installation/curator_test.go @@ -36,8 +36,12 @@ func (m *mockClient) IsUpdateAvailable(current *db.Description) (*distribution.A return args.Get(0).(*distribution.Archive), nil } -func (m *mockClient) Download(archive distribution.Archive, dest string, downloadProgress *progress.Manual) (string, error) { - args := m.Called(archive, dest, downloadProgress) +func (m *mockClient) ResolveArchiveURL(_ distribution.Archive) (string, error) { + return "http://localhost/archive.tar.zst", nil +} + +func (m *mockClient) Download(url, dest string, downloadProgress *progress.Manual) (string, error) { + args := m.Called(url, dest, downloadProgress) return args.String(0), args.Error(1) } @@ -175,7 +179,7 @@ func writeTestDB(t *testing.T, fs afero.Fs, dir string) string { require.NoError(t, rw.SetDBMetadata()) require.NoError(t, rw.Close()) - doc, err := db.WriteImportMetadata(fs, dir) + doc, err := db.WriteImportMetadata(fs, dir, "source") require.NoError(t, err) require.NotNil(t, doc) @@ -725,6 +729,16 @@ func Test_isRehydrationNeeded(t *testing.T) { currentClientVer: schemaver.New(6, 2, 0), expectedResult: false, }, + { + // there are cases where new features will result in new columns, thus an old client downloading and hydrating + // a DB should function, however, when the new client is downloaded it should trigger at least a rehydration + // of the existing DB (in cases where the new DB is not availabl for download yet). + name: "rehydration needed - we have a new client version, with an old DB version", + currentDBVersion: schemaver.New(6, 0, 2), + hydrationClientVer: schemaver.New(6, 0, 2), + currentClientVer: schemaver.New(6, 0, 3), + expectedResult: true, + }, } for _, tt := range tests { @@ -754,6 +768,153 @@ func Test_isRehydrationNeeded(t *testing.T) { } } +func TestCurator_Update_UsesDBRootDirForDownloadTempBase(t *testing.T) { + c := newTestCurator(t) // This sets up c.fs as afero.NewOsFs() rooted in t.TempDir() + mc := c.client.(*mockClient) + + // This is the path that the mocked Download method will return. + // It simulates a temporary directory created by the download client within DBRootDir. + expectedDownloadedContentPath := filepath.Join(c.config.DBRootDir, "temp-downloaded-db-content-123") + + // Pre-create this directory and make it look like a valid DB source for the hydrator and replaceDB. + require.NoError(t, c.fs.MkdirAll(expectedDownloadedContentPath, 0755)) + // Write minimal valid DB metadata so that hydration/activation can proceed far enough. + // Using existing helpers to create a semblance of a DB. + writeTestDB(t, c.fs, expectedDownloadedContentPath) // This creates a basic DB file and import metadata. + + // Mock client responses + mc.On("IsUpdateAvailable", mock.Anything).Return(&distribution.Archive{}, nil) + // CRUCIAL ASSERTION: + // Verify that Download is called with c.config.DBRootDir as its second argument (baseDirForTemp). + // It will return the expectedDownloadedContentPath, simulating successful download and extraction. + mc.On("Download", mock.Anything, c.config.DBRootDir, mock.Anything).Return(expectedDownloadedContentPath, nil) + + hydrateCalled := false + c.hydrator = func(path string) error { + // Ensure hydrator is called with the path returned by Download + assert.Equal(t, expectedDownloadedContentPath, path, "hydrator called with incorrect path") + hydrateCalled = true + return nil // Simulate successful hydration + } + + // Call Update to trigger the download and activation sequence + updated, err := c.Update() + + // Assertions + require.NoError(t, err, "Update should succeed") + require.True(t, updated, "Update should report true") + mc.AssertExpectations(t) // Verifies that Download was called with the expected arguments + assert.True(t, hydrateCalled, "expected hydrator to be called") + + // Check if the DB was "activated" (i.e., renamed) + finalDBPath := c.config.DBDirectoryPath() + _, err = c.fs.Stat(finalDBPath) + require.NoError(t, err, "final DB directory should exist after successful update") + // And the temporary downloaded content path should no longer exist as it was renamed + _, err = c.fs.Stat(expectedDownloadedContentPath) + require.True(t, os.IsNotExist(err), "temporary download path should not exist after rename") +} + +func TestCurator_Update_CleansUpDownloadDirOnActivationFailure(t *testing.T) { + c := newTestCurator(t) // Sets up c.fs as afero.NewOsFs() rooted in t.TempDir() + mc := c.client.(*mockClient) + + // This is the path that the mocked Download method will return. + // This directory should be cleaned up if activation fails. + downloadedContentPath := filepath.Join(c.config.DBRootDir, "temp-download-to-be-cleaned-up") + + // Simulate the download client successfully creating this directory. + require.NoError(t, c.fs.MkdirAll(downloadedContentPath, 0755)) + // Optionally, put a dummy file inside to make the cleanup more tangible. + require.NoError(t, afero.WriteFile(c.fs, filepath.Join(downloadedContentPath, "dummy_file.txt"), []byte("test data"), 0644)) + + // Mock client responses + mc.On("IsUpdateAvailable", mock.Anything).Return(&distribution.Archive{}, nil) + // Download is called with DBRootDir as base, and returns the path to the (simulated) downloaded content. + mc.On("Download", mock.Anything, c.config.DBRootDir, mock.Anything).Return(downloadedContentPath, nil) + + // Configure the hydrator to fail, which will cause c.activate() to fail. + expectedHydrationError := "simulated hydration failure" + c.hydrator = func(path string) error { + assert.Equal(t, downloadedContentPath, path, "hydrator called with incorrect path") + return errors.New(expectedHydrationError) + } + + // Call Update, expecting it to fail during activation. + updated, err := c.Update() + + // Assertions + require.Error(t, err, "Update should fail due to activation error") + require.Contains(t, err.Error(), expectedHydrationError, "Error message should reflect hydration failure") + require.False(t, updated, "Update should report false on failure") + mc.AssertExpectations(t) // Verifies Download was called as expected. + + // CRUCIAL ASSERTION: + // Verify that the temporary download directory was cleaned up. + _, statErr := c.fs.Stat(downloadedContentPath) + require.True(t, os.IsNotExist(statErr), "expected temporary download directory to be cleaned up after activation failure") +} + +// Test for the Import path (URL case) - very similar to the Update tests +func TestCurator_Import_URL_UsesDBRootDirForDownloadTempBaseAndCleansUp(t *testing.T) { + t.Run("successful import from URL", func(t *testing.T) { + c := newTestCurator(t) + mc := c.client.(*mockClient) + + importURL := "http://localhost/some/db.tar.gz" + expectedDownloadedContentPath := filepath.Join(c.config.DBRootDir, "temp-imported-db-content-url") + + require.NoError(t, c.fs.MkdirAll(expectedDownloadedContentPath, 0755)) + writeTestDB(t, c.fs, expectedDownloadedContentPath) + + mc.On("Download", importURL, c.config.DBRootDir, mock.Anything).Return(expectedDownloadedContentPath, nil) + + hydrateCalled := false + c.hydrator = func(path string) error { + assert.Equal(t, expectedDownloadedContentPath, path) + hydrateCalled = true + return nil + } + + err := c.Import(importURL) + + require.NoError(t, err) + mc.AssertExpectations(t) + assert.True(t, hydrateCalled) + _, err = c.fs.Stat(c.config.DBDirectoryPath()) + require.NoError(t, err, "final DB directory should exist") + _, err = c.fs.Stat(expectedDownloadedContentPath) + require.True(t, os.IsNotExist(err), "temp import path should not exist after rename") + }) + + t.Run("import from URL fails activation", func(t *testing.T) { + c := newTestCurator(t) + mc := c.client.(*mockClient) + + importURL := "http://localhost/some/other/db.tar.gz" + downloadedContentPath := filepath.Join(c.config.DBRootDir, "temp-imported-to-cleanup-url") + + require.NoError(t, c.fs.MkdirAll(downloadedContentPath, 0755)) + require.NoError(t, afero.WriteFile(c.fs, filepath.Join(downloadedContentPath, "dummy.txt"), []byte("test"), 0644)) + + mc.On("Download", importURL, c.config.DBRootDir, mock.Anything).Return(downloadedContentPath, nil) + + expectedHydrationError := "simulated hydration failure for import" + c.hydrator = func(path string) error { + return errors.New(expectedHydrationError) + } + + err := c.Import(importURL) + + require.Error(t, err) + require.Contains(t, err.Error(), expectedHydrationError) + mc.AssertExpectations(t) + + _, statErr := c.fs.Stat(downloadedContentPath) + require.True(t, os.IsNotExist(statErr), "expected temp import directory to be cleaned up") + }) +} + func setupTestDB(t *testing.T, dbDir string) db.ReadWriter { s, err := db.NewWriter(db.Config{ DBDirPath: dbDir, diff --git a/grype/db/v6/models.go b/grype/db/v6/models.go index 6c4ca1ede7c..8c36731badf 100644 --- a/grype/db/v6/models.go +++ b/grype/db/v6/models.go @@ -522,8 +522,8 @@ func (o *OperatingSystem) setRowID(i ID) { } func (o *OperatingSystem) clean() { - o.MajorVersion = strings.TrimLeft(o.MajorVersion, "0") - o.MinorVersion = strings.TrimLeft(o.MinorVersion, "0") + o.MajorVersion = trimZeroes(o.MajorVersion) + o.MinorVersion = trimZeroes(o.MinorVersion) } func (o *OperatingSystem) BeforeCreate(tx *gorm.DB) (err error) { @@ -675,7 +675,7 @@ type Cpe struct { } func (c Cpe) String() string { - parts := []string{"cpe:2.3", c.Part, c.Vendor, c.Product, c.Edition, c.Language, c.SoftwareEdition, c.TargetHardware, c.TargetSoftware, c.Other} + parts := []string{"cpe:2.3", c.Part, c.Vendor, c.Product, "*", "*", c.Edition, c.Language, c.SoftwareEdition, c.TargetSoftware, c.TargetHardware, c.Other} for i, part := range parts { if part == "" { parts[i] = "*" diff --git a/grype/db/v6/models_test.go b/grype/db/v6/models_test.go index b20736045e1..c5410433060 100644 --- a/grype/db/v6/models_test.go +++ b/grype/db/v6/models_test.go @@ -1,6 +1,7 @@ package v6 import ( + "github.com/google/go-cmp/cmp" "testing" "github.com/stretchr/testify/assert" @@ -126,3 +127,48 @@ func TestOperatingSystem_Version(t *testing.T) { }) } } + +func TestOperatingSystem_clean(t *testing.T) { + + tests := []struct { + name string + input OperatingSystem + want OperatingSystem + }{ + { + name: "trim 0s", + input: OperatingSystem{ + Name: "Ubuntu", + MajorVersion: "20", + MinorVersion: "04", + }, + want: OperatingSystem{ + Name: "Ubuntu", + MajorVersion: "20", + MinorVersion: "4", + }, + }, + { + name: "preserve 0 value", + input: OperatingSystem{ + Name: "Redhat", + MajorVersion: "9", + MinorVersion: "0", + }, + want: OperatingSystem{ + Name: "Redhat", + MajorVersion: "9", + MinorVersion: "0", // important! ...9 != 9.0 since 9 includes multiple minor versions + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + o := tt.input + o.clean() + if d := cmp.Diff(tt.want, o); d != "" { + t.Errorf("OperatingSystem.clean() mismatch (-want +got):\n%s", d) + } + }) + } +} diff --git a/grype/db/v6/operating_system_store.go b/grype/db/v6/operating_system_store.go new file mode 100644 index 00000000000..fd187757198 --- /dev/null +++ b/grype/db/v6/operating_system_store.go @@ -0,0 +1,353 @@ +package v6 + +import ( + "errors" + "fmt" + "regexp" + "strings" + + "gorm.io/gorm" + + "github.com/anchore/grype/internal/log" +) + +type OSSpecifiers []*OSSpecifier + +// OSSpecifier is a struct that represents a distro in a way that can be used to query the affected package store. +type OSSpecifier struct { + // Name of the distro as identified by the ID field in /etc/os-release (or similar normalized name, e.g. "oracle" instead of "ol") + Name string + + // MajorVersion is the first field in the VERSION_ID field in /etc/os-release (e.g. 7 in "7.0.1406") + MajorVersion string + + // MinorVersion is the second field in the VERSION_ID field in /etc/os-release (e.g. 0 in "7.0.1406") + MinorVersion string + + // RemainingVersion is anything after the minor version in the VERSION_ID field in /etc/os-release (e.g. 1406 in "7.0.1406") + RemainingVersion string + + // LabelVersion is a string that represents a floating version (e.g. "edge" or "unstable") or is the CODENAME field in /etc/os-release (e.g. "wheezy" for debian 7) + LabelVersion string +} + +func (d *OSSpecifier) clean() { + d.MajorVersion = trimZeroes(d.MajorVersion) + d.MinorVersion = trimZeroes(d.MinorVersion) +} + +func (d *OSSpecifier) String() string { + if d == nil { + return anyOS + } + + if *d == *NoOSSpecified { + return "none" + } + + var ver string + if d.MajorVersion != "" { + ver = d.version() + } else { + ver = d.LabelVersion + } + + distroDisplayName := d.Name + if ver != "" { + distroDisplayName += "@" + ver + } + if ver == d.MajorVersion && d.LabelVersion != "" { + distroDisplayName += " (" + d.LabelVersion + ")" + } + + return distroDisplayName +} + +func (d OSSpecifier) version() string { + if d.MajorVersion != "" { + if d.MinorVersion != "" { + if d.RemainingVersion != "" { + return d.MajorVersion + "." + d.MinorVersion + "." + d.RemainingVersion + } + return d.MajorVersion + "." + d.MinorVersion + } + return d.MajorVersion + } + + return d.LabelVersion +} + +func (d OSSpecifiers) String() string { + if d.IsAny() { + return anyOS + } + var parts []string + for _, v := range d { + parts = append(parts, v.String()) + } + return strings.Join(parts, ", ") +} + +func (d OSSpecifiers) IsAny() bool { + if len(d) == 0 { + return true + } + if len(d) == 1 && d[0] == AnyOSSpecified { + return true + } + return false +} + +func (d OSSpecifier) matchesVersionPattern(pattern string) bool { + // check if version or version label matches the given regex + r, err := regexp.Compile(pattern) + if err != nil { + log.Tracef("failed to compile distro specifier regex pattern %q: %v", pattern, err) + return false + } + + if r.MatchString(d.version()) { + return true + } + + if d.LabelVersion != "" { + return r.MatchString(d.LabelVersion) + } + return false +} + +type OperatingSystemStoreReader interface { + GetOperatingSystems(OSSpecifier) ([]OperatingSystem, error) +} + +type operatingSystemStore struct { + db *gorm.DB + blobStore *blobStore +} + +func newOperatingSystemStore(db *gorm.DB, bs *blobStore) *operatingSystemStore { + return &operatingSystemStore{ + db: db, + blobStore: bs, + } +} + +func (s *operatingSystemStore) addOsFromPackages(packages ...*AffectedPackageHandle) error { // nolint:dupl + cacheInst, ok := cacheFromContext(s.db.Statement.Context) + if !ok { + return fmt.Errorf("unable to fetch OS cache from context") + } + + var final []*OperatingSystem + byCacheKey := make(map[string][]*OperatingSystem) + for _, p := range packages { + if p.OperatingSystem != nil { + p.OperatingSystem.clean() + key := p.OperatingSystem.cacheKey() + if existingID, ok := cacheInst.getID(p.OperatingSystem); ok { + // seen in a previous transaction... + p.OperatingSystemID = &existingID + } else if _, ok := byCacheKey[key]; !ok { + // not seen within this transaction + final = append(final, p.OperatingSystem) + } + byCacheKey[key] = append(byCacheKey[key], p.OperatingSystem) + } + } + + if len(final) == 0 { + return nil + } + + if err := s.db.Create(final).Error; err != nil { + return fmt.Errorf("unable to create OS records: %w", err) + } + + // update the cache with the new records + for _, ref := range final { + cacheInst.set(ref) + } + + // update all references with the IDs from the cache + for _, refs := range byCacheKey { + for _, ref := range refs { + id, ok := cacheInst.getID(ref) + if ok { + ref.setRowID(id) + } + } + } + + // update the parent objects with the FK ID + for _, p := range packages { + if p.OperatingSystem != nil { + p.OperatingSystemID = &p.OperatingSystem.ID + } + } + return nil +} + +func (s *operatingSystemStore) GetOperatingSystems(d OSSpecifier) ([]OperatingSystem, error) { + if d.Name == "" && d.LabelVersion == "" { + return nil, ErrMissingOSIdentification + } + + // search for aliases for the given distro; we intentionally map some OSs to other OSs in terms of + // vulnerability (e.g. `centos` is an alias for `rhel`). If an alias is found always use that alias in + // searches (there will never be anything in the DB for aliased distros). + if err := s.applyOSAlias(&d); err != nil { + return nil, err + } + + d.clean() + + // handle non-version fields + query := s.prepareQuery(d) + + // handle version-like fields + return s.searchForOSExactVersions(query, d) +} + +func (s *operatingSystemStore) applyOSAlias(d *OSSpecifier) error { + if d.Name == "" { + return nil + } + + var aliases []OperatingSystemSpecifierOverride + err := s.db.Where("alias = ? collate nocase", d.Name).Find(&aliases).Error + if err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return fmt.Errorf("failed to resolve alias for distro %q: %w", d.Name, err) + } + return nil + } + + var alias *OperatingSystemSpecifierOverride + + for _, a := range aliases { + if a.Codename != "" && a.Codename != d.LabelVersion { + continue + } + + if a.Version != "" && a.Version != d.version() { + continue + } + + if a.VersionPattern != "" && !d.matchesVersionPattern(a.VersionPattern) { + continue + } + + alias = &a + break + } + + if alias == nil { + return nil + } + + if alias.ReplacementName != nil { + d.Name = *alias.ReplacementName + } + + if alias.Rolling { + d.MajorVersion = "" + d.MinorVersion = "" + } + + if alias.ReplacementMajorVersion != nil { + d.MajorVersion = *alias.ReplacementMajorVersion + } + + if alias.ReplacementMinorVersion != nil { + d.MinorVersion = *alias.ReplacementMinorVersion + } + + if alias.ReplacementLabelVersion != nil { + d.LabelVersion = *alias.ReplacementLabelVersion + } + + return nil +} + +func (s *operatingSystemStore) prepareQuery(d OSSpecifier) *gorm.DB { + query := s.db.Model(&OperatingSystem{}) + + if d.Name != "" { + query = query.Where("name = ? collate nocase OR release_id = ? collate nocase", d.Name, d.Name) + } + + if d.LabelVersion != "" { + query = query.Where("codename = ? collate nocase OR label_version = ? collate nocase", d.LabelVersion, d.LabelVersion) + } + + return query +} + +func (s *operatingSystemStore) searchForOSExactVersions(query *gorm.DB, d OSSpecifier) ([]OperatingSystem, error) { + var allOs []OperatingSystem + + handleQuery := func(q *gorm.DB, desc string) ([]OperatingSystem, error) { + err := q.Find(&allOs).Error + if err == nil { + return allOs, nil + } + if !errors.Is(err, gorm.ErrRecordNotFound) { + return nil, fmt.Errorf("failed to query distro by %s: %w", desc, err) + } + return nil, nil + } + + if d.MajorVersion == "" && d.MinorVersion == "" { + return handleQuery(query, "name and codename only") + } + + // search by the most specific criteria first, then fallback + var result []OperatingSystem + var err error + if d.MajorVersion != "" { + if d.MinorVersion != "" { + // non-empty major and minor versions + specificQuery := query.Session(&gorm.Session{}).Where("major_version = ? AND minor_version = ?", d.MajorVersion, d.MinorVersion) + result, err = handleQuery(specificQuery, "major and minor versions") + if err != nil || len(result) > 0 { + return result, err + } + } + + // fallback to major version only, requiring the minor version to be blank. Note: it is important that we don't + // match on any record with the given major version, we must only match on records that are intentionally empty + // minor version. For instance, the DB may have rhel 8.1, 8.2, 8.3, 8.4, etc. We don't want to arbitrarily match + // on one of these or match even the latest version, as even that may yield incorrect vulnerability matching + // results. We are only intending to allow matches for when the vulnerability data is only specified at the major version level. + majorExclusiveQuery := query.Session(&gorm.Session{}).Where("major_version = ? AND minor_version = ?", d.MajorVersion, "") + result, err = handleQuery(majorExclusiveQuery, "exclusively major version") + if err != nil || len(result) > 0 { + return result, err + } + + // fallback to major version for any minor version + majorQuery := query.Session(&gorm.Session{}).Where("major_version = ?", d.MajorVersion) + result, err = handleQuery(majorQuery, "major version with any minor version") + if err != nil || len(result) > 0 { + return result, err + } + } + + return allOs, nil +} + +func trimZeroes(s string) string { + // trim leading zeros from the version components + if s == "" { + return s + } + if s[0] == '0' { + s = strings.TrimLeft(s, "0") + } + if s == "" { + // we've not only trimmed leading zeros, but also the entire string + // we should preserve the zero value for the version + return "0" + } + return s +} diff --git a/grype/db/v6/operating_system_store_test.go b/grype/db/v6/operating_system_store_test.go new file mode 100644 index 00000000000..28dfe66ede1 --- /dev/null +++ b/grype/db/v6/operating_system_store_test.go @@ -0,0 +1,524 @@ +package v6 + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperatingSystemStore_ResolveOperatingSystem(t *testing.T) { + // we always preload the OS aliases into the DB when staging for writing + db := setupTestStore(t).db + bs := newBlobStore(db) + oss := newOperatingSystemStore(db, bs) + + ubuntu2004 := &OperatingSystem{Name: "ubuntu", ReleaseID: "ubuntu", MajorVersion: "20", MinorVersion: "04", LabelVersion: "focal"} + ubuntu2010 := &OperatingSystem{Name: "ubuntu", MajorVersion: "20", MinorVersion: "10", LabelVersion: "groovy"} + rhel8 := &OperatingSystem{Name: "rhel", ReleaseID: "rhel", MajorVersion: "8"} + rhel81 := &OperatingSystem{Name: "rhel", ReleaseID: "rhel", MajorVersion: "8", MinorVersion: "1"} + debian10 := &OperatingSystem{Name: "debian", ReleaseID: "debian", MajorVersion: "10"} + echo := &OperatingSystem{Name: "echo", ReleaseID: "echo", MajorVersion: "1"} + alpine318 := &OperatingSystem{Name: "alpine", ReleaseID: "alpine", MajorVersion: "3", MinorVersion: "18"} + alpineEdge := &OperatingSystem{Name: "alpine", ReleaseID: "alpine", LabelVersion: "edge"} + debianUnstable := &OperatingSystem{Name: "debian", ReleaseID: "debian", LabelVersion: "unstable"} + debian7 := &OperatingSystem{Name: "debian", ReleaseID: "debian", MajorVersion: "7", LabelVersion: "wheezy"} + wolfi := &OperatingSystem{Name: "wolfi", ReleaseID: "wolfi", MajorVersion: "20230201"} + arch := &OperatingSystem{Name: "arch", ReleaseID: "arch", MajorVersion: "20241110", MinorVersion: "0"} + oracle5 := &OperatingSystem{Name: "oracle", ReleaseID: "ol", MajorVersion: "5"} + oracle6 := &OperatingSystem{Name: "oracle", ReleaseID: "ol", MajorVersion: "6"} + amazon2 := &OperatingSystem{Name: "amazon", ReleaseID: "amzn", MajorVersion: "2"} + minimos := &OperatingSystem{Name: "minimos", ReleaseID: "minimos", MajorVersion: "20241031"} + rocky8 := &OperatingSystem{Name: "rocky", ReleaseID: "rocky", MajorVersion: "8"} // should not be matched + alma8 := &OperatingSystem{Name: "almalinux", ReleaseID: "almalinux", MajorVersion: "8"} // should not be matched + + operatingSystems := []*OperatingSystem{ + ubuntu2004, + ubuntu2010, + rhel8, + rhel81, + debian10, + alpine318, + alpineEdge, + debianUnstable, + debian7, + wolfi, + arch, + oracle5, + oracle6, + amazon2, + minimos, + rocky8, + alma8, + echo, + } + require.NoError(t, db.Create(&operatingSystems).Error) + + tests := []struct { + name string + os OSSpecifier + expected []OperatingSystem + expectErr require.ErrorAssertionFunc + }{ + { + name: "specific distro with major and minor version", + os: OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + MinorVersion: "04", + }, + expected: []OperatingSystem{*ubuntu2004}, + }, + { + name: "specific distro with major and minor version (missing left padding)", + os: OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + MinorVersion: "4", + }, + expected: []OperatingSystem{*ubuntu2004}, + }, + { + name: "alias resolution with major version", + os: OSSpecifier{ + Name: "centos", + MajorVersion: "8", + }, + expected: []OperatingSystem{*rhel8}, + }, + { + name: "alias resolution with major and minor version", + os: OSSpecifier{ + Name: "centos", + MajorVersion: "8", + MinorVersion: "1", + }, + expected: []OperatingSystem{*rhel81}, + }, + { + name: "distro with major version only", + os: OSSpecifier{ + Name: "debian", + MajorVersion: "10", + }, + expected: []OperatingSystem{*debian10}, + }, + { + name: "codename resolution", + os: OSSpecifier{ + Name: "ubuntu", + LabelVersion: "focal", + }, + expected: []OperatingSystem{*ubuntu2004}, + }, + { + name: "codename and version info", + os: OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + MinorVersion: "04", + LabelVersion: "focal", + }, + expected: []OperatingSystem{*ubuntu2004}, + }, + { + name: "conflicting codename and version info", + os: OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + MinorVersion: "04", + LabelVersion: "fake", + }, + }, + { + name: "alpine edge version", + os: OSSpecifier{ + Name: "alpine", + MajorVersion: "3", + MinorVersion: "21", + LabelVersion: "3.21.0_alpha20240807", + }, + expected: []OperatingSystem{*alpineEdge}, + }, + { + name: "arch rolling variant", + os: OSSpecifier{ + Name: "arch", + }, + expected: []OperatingSystem{*arch}, + }, + { + name: "wolfi rolling variant", + os: OSSpecifier{ + Name: "wolfi", + MajorVersion: "20221018", + }, + expected: []OperatingSystem{*wolfi}, + }, + { + name: "debian by codename for rolling alias", + os: OSSpecifier{ + Name: "debian", + MajorVersion: "13", + LabelVersion: "trixie", + }, + expected: []OperatingSystem{*debianUnstable}, + }, + { + name: "debian by codename", + os: OSSpecifier{ + Name: "debian", + LabelVersion: "wheezy", + }, + expected: []OperatingSystem{*debian7}, + }, + { + name: "debian by major version", + os: OSSpecifier{ + Name: "debian", + MajorVersion: "7", + }, + expected: []OperatingSystem{*debian7}, + }, + { + name: "debian by major.minor version", + os: OSSpecifier{ + Name: "debian", + MajorVersion: "7", + MinorVersion: "2", + }, + expected: []OperatingSystem{*debian7}, + }, + { + name: "alpine with major and minor version", + os: OSSpecifier{ + Name: "alpine", + MajorVersion: "3", + MinorVersion: "18", + }, + expected: []OperatingSystem{*alpine318}, + }, + { + name: "lookup by release ID (not name)", + os: OSSpecifier{ + Name: "ol", + MajorVersion: "5", + }, + expected: []OperatingSystem{*oracle5}, + }, + { + name: "lookup by non-standard name (oraclelinux)", + os: OSSpecifier{ + Name: "oraclelinux", // based on the grype distro names + MajorVersion: "5", + }, + expected: []OperatingSystem{*oracle5}, + }, + { + name: "lookup by non-standard name (amazonlinux)", + os: OSSpecifier{ + Name: "amazonlinux", // based on the grype distro names + MajorVersion: "2", + }, + expected: []OperatingSystem{*amazon2}, + }, + { + name: "lookup by non-standard name (oracle)", + os: OSSpecifier{ + Name: "oracle", + MajorVersion: "5", + }, + expected: []OperatingSystem{*oracle5}, + }, + { + name: "lookup by non-standard name (amazon)", + os: OSSpecifier{ + Name: "amazon", + MajorVersion: "2", + }, + expected: []OperatingSystem{*amazon2}, + }, + { + name: "lookup by non-standard name (rocky)", + os: OSSpecifier{ + Name: "rocky", + MajorVersion: "8", + }, + expected: []OperatingSystem{*rhel8}, + }, + { + name: "lookup by non-standard name (rockylinux)", + os: OSSpecifier{ + Name: "rockylinux", + MajorVersion: "8", + }, + expected: []OperatingSystem{*rhel8}, + }, + { + name: "lookup by non-standard name (alma)", + os: OSSpecifier{ + Name: "alma", + MajorVersion: "8", + }, + expected: []OperatingSystem{*rhel8}, + }, + { + name: "lookup by non-standard name (almalinux)", + os: OSSpecifier{ + Name: "almalinux", + MajorVersion: "8", + }, + expected: []OperatingSystem{*rhel8}, + }, + { + name: "echo rolling variant", + os: OSSpecifier{ + Name: "echo", + MajorVersion: "1", + }, + expected: []OperatingSystem{*echo}, + }, + { + name: "missing distro name", + os: OSSpecifier{ + MajorVersion: "8", + }, + expectErr: expectErrIs(t, ErrMissingOSIdentification), + }, + { + name: "nonexistent distro", + os: OSSpecifier{ + Name: "madeup", + MajorVersion: "99", + }, + }, + { + name: "minimos rolling variant", + os: OSSpecifier{ + Name: "minimos", + }, + expected: []OperatingSystem{*minimos}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.expectErr == nil { + tt.expectErr = require.NoError + } + result, err := oss.GetOperatingSystems(tt.os) + tt.expectErr(t, err) + if err != nil { + return + } + + if diff := cmp.Diff(tt.expected, result, cmpopts.EquateEmpty()); diff != "" { + t.Errorf("unexpected result (-want +got):\n%s", diff) + } + }) + } +} + +func TestOSSpecifier_String(t *testing.T) { + tests := []struct { + name string + os *OSSpecifier + expected string + }{ + { + name: "nil distro", + os: AnyOSSpecified, + expected: "any", + }, + { + name: "no distro specified", + os: NoOSSpecified, + expected: "none", + }, + { + name: "only name specified", + os: &OSSpecifier{ + Name: "ubuntu", + }, + expected: "ubuntu", + }, + { + name: "name and major version specified", + os: &OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + }, + expected: "ubuntu@20", + }, + { + name: "name, major, and minor version specified", + os: &OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + MinorVersion: "04", + }, + expected: "ubuntu@20.04", + }, + { + name: "name, major version, and codename specified", + os: &OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + LabelVersion: "focal", + }, + expected: "ubuntu@20 (focal)", + }, + { + name: "name and codename specified", + os: &OSSpecifier{ + Name: "ubuntu", + LabelVersion: "focal", + }, + expected: "ubuntu@focal", + }, + { + name: "name, major version, minor version, and codename specified", + os: &OSSpecifier{ + Name: "ubuntu", + MajorVersion: "20", + MinorVersion: "04", + LabelVersion: "focal", + }, + expected: "ubuntu@20.04", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.os.String() + require.Equal(t, tt.expected, result) + }) + } +} + +func TestTrimZeroes(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + { + name: "empty string", + input: "", + expected: "", + }, + { + name: "single zero", + input: "0", + expected: "0", + }, + { + name: "multiple zeros only", + input: "000", + expected: "0", + }, + { + name: "single non-zero digit", + input: "5", + expected: "5", + }, + { + name: "no leading zeros", + input: "123", + expected: "123", + }, + { + name: "single leading zero", + input: "0123", + expected: "123", + }, + { + name: "multiple leading zeros", + input: "000123", + expected: "123", + }, + { + name: "leading zeros with trailing zeros", + input: "001230", + expected: "1230", + }, + { + name: "string starting with non-zero", + input: "1000", + expected: "1000", + }, + { + name: "mixed digits with leading zeros", + input: "00042", + expected: "42", + }, + { + name: "very long leading zeros", + input: "00000000001", + expected: "1", + }, + { + name: "alphanumeric with leading zero", + input: "0abc", + expected: "abc", + }, + { + name: "special characters with leading zeros", + input: "00.123", + expected: ".123", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := trimZeroes(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestOSSpecifier_clean(t *testing.T) { + tests := []struct { + name string + input OSSpecifier + want OSSpecifier + }{ + { + name: "trim 0s", + input: OSSpecifier{ + Name: "Ubuntu", + MajorVersion: "20", + MinorVersion: "04", + }, + want: OSSpecifier{ + Name: "Ubuntu", + MajorVersion: "20", + MinorVersion: "4", + }, + }, + { + name: "preserve 0 value", + input: OSSpecifier{ + Name: "Redhat", + MajorVersion: "9", + MinorVersion: "0", + }, + want: OSSpecifier{ + Name: "Redhat", + MajorVersion: "9", + MinorVersion: "0", // important! ...9 != 9.0 since 9 includes multiple minor versions + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + o := tt.input + o.clean() + if d := cmp.Diff(tt.want, o); d != "" { + t.Errorf("OSSpecifier.clean() mismatch (-want +got):\n%s", d) + } + }) + } +} diff --git a/grype/db/v6/refs.go b/grype/db/v6/refs.go index 228c1479965..6a2df8975ed 100644 --- a/grype/db/v6/refs.go +++ b/grype/db/v6/refs.go @@ -2,6 +2,8 @@ package v6 import ( "slices" + + "gorm.io/gorm" ) type ref[ID, T any] struct { @@ -38,7 +40,7 @@ func fillRefs[T, R any](reader Reader, handles []*T, getRef refProvider[T, R], r // load a map with all id -> ref results var values []R - tx := reader.getDB().Where("id IN (?)", ids) + tx := reader.(lowLevelReader).GetDB().Where("id IN (?)", ids) err := tx.Find(&values).Error if err != nil { return err @@ -73,3 +75,7 @@ func ptrs[T any](values []T) []*T { } return out } + +type lowLevelReader interface { + GetDB() *gorm.DB +} diff --git a/grype/db/v6/severity.go b/grype/db/v6/severity.go index 0c7c4b36979..74b14fefe18 100644 --- a/grype/db/v6/severity.go +++ b/grype/db/v6/severity.go @@ -2,15 +2,9 @@ package v6 import ( "fmt" - "math" - "strings" - - gocvss20 "github.com/pandatix/go-cvss/20" - gocvss30 "github.com/pandatix/go-cvss/30" - gocvss31 "github.com/pandatix/go-cvss/31" - gocvss40 "github.com/pandatix/go-cvss/40" "github.com/anchore/grype/grype/vulnerability" + "github.com/anchore/grype/internal/cvss" "github.com/anchore/grype/internal/log" ) @@ -35,7 +29,7 @@ func extractSeverity(severity any) (vulnerability.Severity, error) { case string: return vulnerability.ParseSeverity(sev), nil case CVSSSeverity: - metrics, err := parseCVSS(sev.Vector) + metrics, err := cvss.ParseMetricsFromVector(sev.Vector) if err != nil { return vulnerability.UnknownSeverity, fmt.Errorf("unable to parse CVSS vector: %w", err) } @@ -48,67 +42,6 @@ func extractSeverity(severity any) (vulnerability.Severity, error) { } } -func parseCVSS(vector string) (*vulnerability.CvssMetrics, error) { - switch { - case strings.HasPrefix(vector, "CVSS:3.0"): - cvss, err := gocvss30.ParseVector(vector) - if err != nil { - return nil, fmt.Errorf("unable to parse CVSS v3 vector: %w", err) - } - ex := roundScore(cvss.Exploitability()) - im := roundScore(cvss.Impact()) - return &vulnerability.CvssMetrics{ - BaseScore: roundScore(cvss.BaseScore()), - ExploitabilityScore: &ex, - ImpactScore: &im, - }, nil - case strings.HasPrefix(vector, "CVSS:3.1"): - cvss, err := gocvss31.ParseVector(vector) - if err != nil { - return nil, fmt.Errorf("unable to parse CVSS v3.1 vector: %w", err) - } - ex := roundScore(cvss.Exploitability()) - im := roundScore(cvss.Impact()) - return &vulnerability.CvssMetrics{ - BaseScore: roundScore(cvss.BaseScore()), - ExploitabilityScore: &ex, - ImpactScore: &im, - }, nil - case strings.HasPrefix(vector, "CVSS:4.0"): - cvss, err := gocvss40.ParseVector(vector) - if err != nil { - return nil, fmt.Errorf("unable to parse CVSS v4.0 vector: %w", err) - } - // there are no exploitability and impact scores in CVSS v4.0 - return &vulnerability.CvssMetrics{ - BaseScore: roundScore(cvss.Score()), - }, nil - default: - // should be CVSS v2.0 or is invalid - cvss, err := gocvss20.ParseVector(vector) - if err != nil { - return nil, fmt.Errorf("unable to parse CVSS v2 vector: %w", err) - } - ex := roundScore(cvss.Exploitability()) - im := roundScore(cvss.Impact()) - return &vulnerability.CvssMetrics{ - BaseScore: roundScore(cvss.BaseScore()), - ExploitabilityScore: &ex, - ImpactScore: &im, - }, nil - } -} - -// roundScore rounds the score to the nearest tenth based on first.org rounding rules -// see https://www.first.org/cvss/v3.1/specification-document#Appendix-A---Floating-Point-Rounding -func roundScore(score float64) float64 { - intInput := int(math.Round(score * 100000)) - if intInput%10000 == 0 { - return float64(intInput) / 100000.0 - } - return (math.Floor(float64(intInput)/10000.0) + 1) / 10.0 -} - func interpretCVSS(score float64, version string) vulnerability.Severity { switch version { case "2.0": @@ -178,7 +111,7 @@ func toCvss(severities ...Severity) []vulnerability.Cvss { } var usedMetrics vulnerability.CvssMetrics // though the DB has the base score, we parse the vector for all metrics - metrics, err := parseCVSS(cvssSev.Vector) + metrics, err := cvss.ParseMetricsFromVector(cvssSev.Vector) if err != nil { log.WithFields("vector", cvssSev.Vector, "error", err).Warn("unable to parse CVSS vector") continue diff --git a/grype/db/v6/severity_test.go b/grype/db/v6/severity_test.go index e8b3b41f63a..3b943d0f448 100644 --- a/grype/db/v6/severity_test.go +++ b/grype/db/v6/severity_test.go @@ -126,107 +126,6 @@ func TestExtractSeverity(t *testing.T) { } } -func TestParseCVSS(t *testing.T) { - tests := []struct { - name string - vector string - expectedMetrics *vulnerability.CvssMetrics - wantErr require.ErrorAssertionFunc - }{ - { - name: "valid CVSS 2.0", - vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P", - expectedMetrics: &vulnerability.CvssMetrics{ - BaseScore: 7.5, - ExploitabilityScore: ptr(10.0), - ImpactScore: ptr(6.5), - }, - }, - { - name: "valid CVSS 3.0", - vector: "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", - expectedMetrics: &vulnerability.CvssMetrics{ - BaseScore: 9.8, - ExploitabilityScore: ptr(3.9), - ImpactScore: ptr(5.9), - }, - }, - { - name: "valid CVSS 3.1", - vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", - expectedMetrics: &vulnerability.CvssMetrics{ - BaseScore: 9.8, - ExploitabilityScore: ptr(3.9), - ImpactScore: ptr(5.9), - }, - }, - { - name: "valid CVSS 4.0", - vector: "CVSS:4.0/AV:N/AC:H/AT:P/PR:L/UI:N/VC:N/VI:H/VA:L/SC:L/SI:H/SA:L/MAC:L/MAT:P/MPR:N/S:N/R:A/RE:L/U:Clear", - expectedMetrics: &vulnerability.CvssMetrics{ - BaseScore: 9.1, - }, - }, - { - name: "invalid CVSS 2.0", - vector: "AV:N/AC:INVALID", - wantErr: require.Error, - }, - { - name: "invalid CVSS 3.0", - vector: "CVSS:3.0/AV:INVALID", - wantErr: require.Error, - }, - { - name: "invalid CVSS 3.1", - vector: "CVSS:3.1/AV:INVALID", - wantErr: require.Error, - }, - { - name: "invalid CVSS 4.0", - vector: "CVSS:4.0/AV:INVALID", - wantErr: require.Error, - }, - { - name: "empty vector", - vector: "", - wantErr: require.Error, - }, - { - name: "malformed vector", - vector: "INVALID:VECTOR", - wantErr: require.Error, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if tt.wantErr == nil { - tt.wantErr = require.NoError - } - result, err := parseCVSS(tt.vector) - tt.wantErr(t, err) - if err != nil { - assert.Nil(t, result) - return - } - - require.NotNil(t, result) - assert.Equal(t, tt.expectedMetrics.BaseScore, result.BaseScore, "given vector: %s", tt.vector) - - if tt.expectedMetrics.ExploitabilityScore != nil { - require.NotNil(t, result.ExploitabilityScore) - assert.Equal(t, *tt.expectedMetrics.ExploitabilityScore, *result.ExploitabilityScore, "given vector: %s", tt.vector) - } - - if tt.expectedMetrics.ImpactScore != nil { - require.NotNil(t, result.ImpactScore) - assert.Equal(t, *tt.expectedMetrics.ImpactScore, *result.ImpactScore, "given vector: %s", tt.vector) - } - }) - } -} - func TestExtractSeverities(t *testing.T) { tests := []struct { name string diff --git a/grype/db/v6/status.go b/grype/db/v6/status.go deleted file mode 100644 index ee90524ca65..00000000000 --- a/grype/db/v6/status.go +++ /dev/null @@ -1,39 +0,0 @@ -package v6 - -import "encoding/json" - -type Status struct { - SchemaVersion string `json:"schemaVersion"` - Built Time `json:"built"` - Path string `json:"path"` - Checksum string `json:"checksum"` - Err error `json:"error"` -} - -func (s Status) Status() string { - if s.Err != nil { - return "invalid" - } - return "valid" -} - -func (s Status) MarshalJSON() ([]byte, error) { - errStr := "" - if s.Err != nil { - errStr = s.Err.Error() - } - - return json.Marshal(&struct { - SchemaVersion string `json:"schemaVersion"` - Built Time `json:"built"` - Path string `json:"path"` - Checksum string `json:"checksum"` - Err string `json:"error"` - }{ - SchemaVersion: s.SchemaVersion, - Built: s.Built, - Path: s.Path, - Checksum: s.Checksum, - Err: errStr, - }) -} diff --git a/grype/db/v6/store.go b/grype/db/v6/store.go index dfc39def34c..b48ee4ab319 100644 --- a/grype/db/v6/store.go +++ b/grype/db/v6/store.go @@ -13,6 +13,7 @@ type store struct { *dbMetadataStore *providerStore *vulnerabilityStore + *operatingSystemStore *affectedPackageStore *affectedCPEStore *vulnerabilityDecoratorStore @@ -23,7 +24,7 @@ type store struct { writable bool } -func (s *store) getDB() *gorm.DB { +func (s *store) GetDB() *gorm.DB { return s.db } @@ -65,27 +66,30 @@ func newStore(cfg Config, empty, writable bool) (*store, error) { } meta, err := metadataStore.GetDBMetadata() - if err != nil { + if err != nil || meta == nil || meta.Model != ModelVersion { // db.Close must be called, or we will get stale reads d, _ := db.DB() if d != nil { _ = d.Close() } - return nil, fmt.Errorf("failed to get db metadata: %w", err) - } - - if meta == nil { - return nil, fmt.Errorf("no DB metadata found") + if err != nil { + return nil, fmt.Errorf("not a v%d database: %w", ModelVersion, err) + } + return nil, fmt.Errorf("not a v%d database", ModelVersion) } dbVersion := newSchemaVerFromDBMetadata(*meta) bs := newBlobStore(db) + + osStore := newOperatingSystemStore(db, bs) + return &store{ dbMetadataStore: metadataStore, providerStore: newProviderStore(db), vulnerabilityStore: newVulnerabilityStore(db, bs), - affectedPackageStore: newAffectedPackageStore(db, bs), + operatingSystemStore: osStore, + affectedPackageStore: newAffectedPackageStore(db, bs, osStore), affectedCPEStore: newAffectedCPEStore(db, bs), vulnerabilityDecoratorStore: newVulnerabilityDecoratorStore(db, bs, dbVersion), blobStore: bs, diff --git a/grype/db/v6/store_test.go b/grype/db/v6/store_test.go index b508d6155a1..16261061a6f 100644 --- a/grype/db/v6/store_test.go +++ b/grype/db/v6/store_test.go @@ -1,10 +1,12 @@ package v6 import ( + "fmt" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "gorm.io/gorm" ) func TestStoreClose(t *testing.T) { @@ -54,3 +56,23 @@ func TestStoreClose(t *testing.T) { assert.Empty(t, indexes) }) } + +func Test_oldDbV5(t *testing.T) { + s := setupTestStore(t) + require.NoError(t, s.db.Where("true").Delete(&DBMetadata{}).Error) // delete all existing records + require.NoError(t, s.Close()) + s, err := newStore(s.config, false, true) + require.Nil(t, s) + require.ErrorIs(t, err, gorm.ErrRecordNotFound) + require.ErrorContains(t, err, fmt.Sprintf("not a v%d database", ModelVersion)) +} + +func Test_oldDbWithMetadata(t *testing.T) { + s := setupTestStore(t) + require.NoError(t, s.db.Where("true").Model(DBMetadata{}).Update("Model", "5").Error) // old database version + require.NoError(t, s.Close()) + s, err := newStore(s.config, false, true) + require.Nil(t, s) + require.NotErrorIs(t, err, gorm.ErrRecordNotFound) + require.ErrorContains(t, err, fmt.Sprintf("not a v%d database", ModelVersion)) +} diff --git a/grype/db/v6/testutil/server.go b/grype/db/v6/testutil/server.go index 1188e4c2043..69626857db8 100644 --- a/grype/db/v6/testutil/server.go +++ b/grype/db/v6/testutil/server.go @@ -109,10 +109,10 @@ func (s *ServerBuilder) Start() (url string) { case serverSubdir + s.LatestDocFile: latestDoc := *s.LatestDoc latestDoc.Built.Time = s.DBBuildTime - latestDoc.Archive.SchemaVersion = s.DBVersion - latestDoc.Archive.Built.Time = s.DBBuildTime - latestDoc.Archive.Path = archivePath - latestDoc.Archive.Checksum = sha(s.dbContents) + latestDoc.SchemaVersion = s.DBVersion + latestDoc.Built.Time = s.DBBuildTime + latestDoc.Path = archivePath + latestDoc.Checksum = sha(s.dbContents) w.WriteHeader(http.StatusOK) _ = json.NewEncoder(w).Encode(latestDoc) case serverSubdir + archivePath: diff --git a/grype/db/v6/vulnerability.go b/grype/db/v6/vulnerability.go index 2da7700fc1e..d3e684a8563 100644 --- a/grype/db/v6/vulnerability.go +++ b/grype/db/v6/vulnerability.go @@ -17,7 +17,10 @@ import ( "github.com/anchore/syft/syft/pkg" ) -const v5NvdNamespace = "nvd:cpe" +const ( + nvdProvider = "nvd" + v5NvdNamespace = "nvd:cpe" +) func newVulnerabilityFromAffectedPackageHandle(affected AffectedPackageHandle, affectedRanges []AffectedRange) (*vulnerability.Vulnerability, error) { packageName := "" @@ -101,36 +104,31 @@ func getVersionConstraint(affectedRanges []AffectedRange) (version.Constraint, e } func getRelatedVulnerabilities(vuln *VulnerabilityHandle, affected *AffectedPackageBlob) []vulnerability.Reference { - cveSet := strset.New() var relatedVulnerabilities []vulnerability.Reference - for _, alias := range vuln.BlobValue.Aliases { - if cveSet.Has(alias) { + idsToProcess := append([]string{vuln.Name}, vuln.BlobValue.Aliases...) + + if affected != nil { + idsToProcess = append(idsToProcess, affected.CVEs...) + } + + encountered := strset.New() + for _, id := range idsToProcess { + if encountered.Has(id) { continue } - if !strings.HasPrefix(strings.ToLower(alias), "cve-") { + if vuln.ProviderID == nvdProvider && strings.EqualFold(vuln.Name, id) { + continue + } + if !strings.HasPrefix(strings.ToLower(id), "cve-") { continue } relatedVulnerabilities = append(relatedVulnerabilities, vulnerability.Reference{ - ID: alias, + ID: id, Namespace: v5NvdNamespace, }) - cveSet.Add(alias) - } - if affected != nil { - for _, cve := range affected.CVEs { - if cveSet.Has(cve) { - continue - } - if !strings.HasPrefix(strings.ToLower(cve), "cve-") { - continue - } - relatedVulnerabilities = append(relatedVulnerabilities, vulnerability.Reference{ - ID: cve, - Namespace: v5NvdNamespace, - }) - cveSet.Add(cve) - } + encountered.Add(id) } + return relatedVulnerabilities } @@ -146,39 +144,10 @@ func getPackageQualifiers(affected *AffectedPackageBlob) []qualifier.Qualifier { // //nolint:funlen func MimicV5Namespace(vuln *VulnerabilityHandle, affected *AffectedPackageHandle) string { - if affected == nil { // for CPE matches - return v5NvdNamespace - } - switch vuln.Provider.ID { - case "nvd": - return v5NvdNamespace - case "github": - language := affected.Package.Ecosystem - // normalize from purl type, github ecosystem types, and vunnel mappings - switch strings.ToLower(language) { - case "golang", string(pkg.GoModulePkg): - language = "go" - case "composer", string(pkg.PhpComposerPkg): - language = "php" - case "cargo", string(pkg.RustPkg): - language = "rust" - case "pub", string(pkg.DartPubPkg): - language = "dart" - case "nuget", string(pkg.DotnetPkg): - language = "dotnet" - case "maven", string(pkg.JavaPkg), string(pkg.JenkinsPluginPkg): - language = "java" - case "swifturl", string(pkg.SwiplPackPkg), string(pkg.SwiftPkg): - language = "swift" - case "node", string(pkg.NpmPkg): - language = "javascript" - case "pypi", "pip", string(pkg.PythonPkg): - language = "python" - case "rubygems", string(pkg.GemPkg): - language = "ruby" - } - return fmt.Sprintf("github:language:%s", language) + if affected == nil || affected.Package == nil { // for CPE matches + return fmt.Sprintf("%s:cpe", vuln.Provider.ID) } + if affected.OperatingSystem != nil { // distro family fixes family := affected.OperatingSystem.Name @@ -229,6 +198,42 @@ func MimicV5Namespace(vuln *VulnerabilityHandle, affected *AffectedPackageHandle return fmt.Sprintf("%s:distro:%s:%s", pr, family, ver) } + + if affected.Package != nil { + language := affected.Package.Ecosystem + // normalize from purl type, github ecosystem types, and vunnel mappings + switch strings.ToLower(language) { + case "golang", string(pkg.GoModulePkg): + language = "go" + case "composer", string(pkg.PhpComposerPkg): + language = "php" + case "cargo", string(pkg.RustPkg): + language = "rust" + case "pub", string(pkg.DartPubPkg): + language = "dart" + case "nuget", string(pkg.DotnetPkg): + language = "dotnet" + case "maven", string(pkg.JavaPkg), string(pkg.JenkinsPluginPkg): + language = "java" + case "swifturl", string(pkg.SwiplPackPkg), string(pkg.SwiftPkg): + language = "swift" + case "node", string(pkg.NpmPkg): + language = "javascript" + case "pypi", "pip", string(pkg.PythonPkg): + language = "python" + case "rubygems", string(pkg.GemPkg): + language = "ruby" + case "msrc", string(pkg.KbPkg): // msrc packages were previously modelled as distro + return fmt.Sprintf("%s:distro:windows:%s", vuln.Provider.ID, affected.Package.Name) + case string(pkg.BitnamiPkg): // bitnami packages were previously modelled as distro + return "bitnami" + case "": // CPE + return fmt.Sprintf("%s:cpe", vuln.Provider.ID) + } + return fmt.Sprintf("%s:language:%s", vuln.Provider.ID, language) + } + + // this shouldn't happen and is not a valid v5 namespace, but some information is better than none return vuln.Provider.ID } diff --git a/grype/db/v6/vulnerability_provider.go b/grype/db/v6/vulnerability_provider.go index aa5eff83c48..3d9eac6cdc8 100644 --- a/grype/db/v6/vulnerability_provider.go +++ b/grype/db/v6/vulnerability_provider.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "strings" + "time" "github.com/hashicorp/go-multierror" "github.com/iancoleman/strcase" @@ -21,6 +22,11 @@ import ( syftPkg "github.com/anchore/syft/syft/pkg" ) +var ( + _ vulnerability.Provider = (*vulnerabilityProvider)(nil) + _ vulnerability.StoreMetadataProvider = (*vulnerabilityProvider)(nil) +) + func NewVulnerabilityProvider(rdr Reader) vulnerability.Provider { return &vulnerabilityProvider{ reader: rdr, @@ -36,11 +42,11 @@ var _ interface { } = (*vulnerabilityProvider)(nil) // Deprecated: vulnerability.Vulnerability objects now have metadata included -func (s vulnerabilityProvider) VulnerabilityMetadata(ref vulnerability.Reference) (*vulnerability.Metadata, error) { +func (vp vulnerabilityProvider) VulnerabilityMetadata(ref vulnerability.Reference) (*vulnerability.Metadata, error) { vuln, ok := ref.Internal.(*VulnerabilityHandle) if !ok { var err error - vuln, err = s.fetchVulnerability(ref) + vuln, err = vp.fetchVulnerability(ref) if err != nil { return nil, err } @@ -56,18 +62,18 @@ func (s vulnerabilityProvider) VulnerabilityMetadata(ref vulnerability.Reference }, nil } - return s.getVulnerabilityMetadata(vuln, ref.Namespace) + return vp.getVulnerabilityMetadata(vuln, ref.Namespace) } -func (s vulnerabilityProvider) getVulnerabilityMetadata(vuln *VulnerabilityHandle, namespace string) (*vulnerability.Metadata, error) { +func (vp vulnerabilityProvider) getVulnerabilityMetadata(vuln *VulnerabilityHandle, namespace string) (*vulnerability.Metadata, error) { cves := getCVEs(vuln) - kevs, err := s.fetchKnownExploited(cves) + kevs, err := vp.fetchKnownExploited(cves) if err != nil { log.WithFields("id", vuln.Name, "vulnerability", vuln.String(), "error", err).Debug("unable to fetch known exploited from vulnerability") } - epss, err := s.fetchEpss(cves) + epss, err := vp.fetchEpss(cves) if err != nil { log.WithFields("id", vuln.Name, "vulnerability", vuln.String(), "error", err).Debug("unable to fetch epss from vulnerability") } @@ -83,20 +89,14 @@ func newVulnerabilityMetadata(vuln *VulnerabilityHandle, namespace string, kevs sev, cvss, err := extractSeverities(vuln) if err != nil { log.WithFields("id", vuln.Name, "vulnerability", vuln.String()).Debug("unable to extract severity from vulnerability") - return &vulnerability.Metadata{ - ID: vuln.Name, - DataSource: strings.Split(namespace, ":")[0], - Namespace: namespace, - Severity: toSeverityString(vulnerability.UnknownSeverity), - }, nil } return &vulnerability.Metadata{ ID: vuln.Name, - DataSource: vuln.Provider.ID, + DataSource: firstReferenceURL(vuln), Namespace: namespace, Severity: toSeverityString(sev), - URLs: toURLs(vuln), + URLs: lastReferenceURLs(vuln), Description: vuln.BlobValue.Description, Cvss: cvss, KnownExploited: kevs, @@ -104,9 +104,29 @@ func newVulnerabilityMetadata(vuln *VulnerabilityHandle, namespace string, kevs }, nil } -func (s vulnerabilityProvider) fetchVulnerability(ref vulnerability.Reference) (*VulnerabilityHandle, error) { +func (vp vulnerabilityProvider) DataProvenance() (map[string]vulnerability.DataProvenance, error) { + providers, err := vp.reader.AllProviders() + if err != nil { + return nil, err + } + dps := make(map[string]vulnerability.DataProvenance) + + for _, p := range providers { + var date time.Time + if p.DateCaptured != nil { + date = *p.DateCaptured + } + dps[p.ID] = vulnerability.DataProvenance{ + DateCaptured: date, + InputDigest: p.InputDigest, + } + } + return dps, nil +} + +func (vp vulnerabilityProvider) fetchVulnerability(ref vulnerability.Reference) (*VulnerabilityHandle, error) { provider := strings.Split(ref.Namespace, ":")[0] - vulns, err := s.reader.GetVulnerabilities(&VulnerabilitySpecifier{Name: ref.ID, Providers: []string{provider}}, &GetVulnerabilityOptions{Preload: true}) + vulns, err := vp.reader.GetVulnerabilities(&VulnerabilitySpecifier{Name: ref.ID, Providers: []string{provider}}, &GetVulnerabilityOptions{Preload: true}) if err != nil { return nil, err } @@ -116,11 +136,11 @@ func (s vulnerabilityProvider) fetchVulnerability(ref vulnerability.Reference) ( return nil, nil } -func (s vulnerabilityProvider) fetchKnownExploited(cves []string) ([]vulnerability.KnownExploited, error) { +func (vp vulnerabilityProvider) fetchKnownExploited(cves []string) ([]vulnerability.KnownExploited, error) { var out []vulnerability.KnownExploited var errs error for _, cve := range cves { - kevs, err := s.reader.GetKnownExploitedVulnerabilities(cve) + kevs, err := vp.reader.GetKnownExploitedVulnerabilities(cve) if err != nil { errs = multierror.Append(errs, err) continue @@ -143,11 +163,11 @@ func (s vulnerabilityProvider) fetchKnownExploited(cves []string) ([]vulnerabili return out, errs } -func (s vulnerabilityProvider) fetchEpss(cves []string) ([]vulnerability.EPSS, error) { +func (vp vulnerabilityProvider) fetchEpss(cves []string) ([]vulnerability.EPSS, error) { var out []vulnerability.EPSS var errs error for _, cve := range cves { - entries, err := s.reader.GetEpss(cve) + entries, err := vp.reader.GetEpss(cve) if err != nil { errs = multierror.Append(errs, err) continue @@ -164,16 +184,16 @@ func (s vulnerabilityProvider) fetchEpss(cves []string) ([]vulnerability.EPSS, e return out, errs } -func (s vulnerabilityProvider) PackageSearchNames(p pkg.Package) []string { +func (vp vulnerabilityProvider) PackageSearchNames(p pkg.Package) []string { return name.PackageNames(p) } -func (s vulnerabilityProvider) Close() error { - return s.reader.(io.Closer).Close() +func (vp vulnerabilityProvider) Close() error { + return vp.reader.(io.Closer).Close() } //nolint:funlen,gocognit,gocyclo -func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Criteria) ([]vulnerability.Vulnerability, error) { +func (vp vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Criteria) ([]vulnerability.Vulnerability, error) { if err := search.ValidateCriteria(criteria); err != nil { return nil, err } @@ -202,11 +222,20 @@ func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Cri pkgSpec = &PackageSpecifier{} } // the v6 store normalizes ecosystems around the syft package type, so that field is preferred - if c.PackageType != "" { - pkgSpec.Ecosystem = string(c.PackageType) + switch { + case c.PackageType != "" && c.PackageType != syftPkg.UnknownPkg: + // prefer to match by a non-blank, known package type pkgType = c.PackageType - } else { + pkgSpec.Ecosystem = string(c.PackageType) + case c.Language != "": + // if there's no known package type, but there is a non-blank language + // try that. pkgSpec.Ecosystem = string(c.Language) + case c.PackageType == syftPkg.UnknownPkg: + // if language is blank, and package type is explicitly "UnknownPkg" and not + // just blank, use that. + pkgType = c.PackageType + pkgSpec.Ecosystem = string(c.PackageType) } applied = true case *search.IDCriteria: @@ -230,9 +259,11 @@ func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Cri case *search.DistroCriteria: for _, d := range c.Distros { osSpecs = append(osSpecs, &OSSpecifier{ - Name: d.Name(), - MajorVersion: d.MajorVersion(), - MinorVersion: d.MinorVersion(), + Name: d.Name(), + MajorVersion: d.MajorVersion(), + MinorVersion: d.MinorVersion(), + RemainingVersion: d.RemainingVersion(), + LabelVersion: d.Codename, }) } applied = true @@ -263,7 +294,7 @@ func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Cri var affectedCPEs []AffectedCPEHandle if pkgSpec != nil || len(vulnSpecs) > 0 { - affectedPackages, err = s.reader.GetAffectedPackages(pkgSpec, &GetAffectedPackageOptions{ + affectedPackages, err = vp.reader.GetAffectedPackages(pkgSpec, &GetAffectedPackageOptions{ OSs: osSpecs, Vulnerabilities: vulnSpecs, PreloadBlob: true, @@ -279,13 +310,13 @@ func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Cri affectedPackages = filterAffectedPackageVersions(versionMatcher, affectedPackages) // after filtering, read vulnerability data - if err = fillAffectedPackageHandles(s.reader, ptrs(affectedPackages)); err != nil { + if err = fillAffectedPackageHandles(vp.reader, ptrs(affectedPackages)); err != nil { return nil, err } } if cpeSpec != nil { - affectedCPEs, err = s.reader.GetAffectedCPEs(cpeSpec, &GetAffectedCPEOptions{ + affectedCPEs, err = vp.reader.GetAffectedCPEs(cpeSpec, &GetAffectedCPEOptions{ Vulnerabilities: vulnSpecs, PreloadBlob: true, }) @@ -296,19 +327,19 @@ func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Cri affectedCPEs = filterAffectedCPEVersions(versionMatcher, affectedCPEs, cpeSpec) // after filtering, read vulnerability data - if err = fillAffectedCPEHandles(s.reader, ptrs(affectedCPEs)); err != nil { + if err = fillAffectedCPEHandles(vp.reader, ptrs(affectedCPEs)); err != nil { return nil, err } } // fill complete vulnerabilities for this set -- these should have already had all properties lazy loaded - vulns, err := s.toVulnerabilities(affectedPackages, affectedCPEs) + vulns, err := vp.toVulnerabilities(affectedPackages, affectedCPEs) if err != nil { return nil, err } // filter vulnerabilities by any remaining criteria such as ByQualifiedPackages - vulns, err = s.filterVulnerabilities(vulns, remainingCriteria...) + vulns, err = vp.filterVulnerabilities(vulns, remainingCriteria...) if err != nil { return nil, err } @@ -319,7 +350,7 @@ func (s vulnerabilityProvider) FindVulnerabilities(criteria ...vulnerability.Cri return out, nil } -func (s vulnerabilityProvider) filterVulnerabilities(vulns []vulnerability.Vulnerability, criteria ...vulnerability.Criteria) ([]vulnerability.Vulnerability, error) { +func (vp vulnerabilityProvider) filterVulnerabilities(vulns []vulnerability.Vulnerability, criteria ...vulnerability.Criteria) ([]vulnerability.Vulnerability, error) { isMatch := func(v vulnerability.Vulnerability) (bool, error) { for _, c := range criteria { if _, ok := c.(search.VersionConstraintMatcher); ok { @@ -354,7 +385,7 @@ func (s vulnerabilityProvider) filterVulnerabilities(vulns []vulnerability.Vulne } // toVulnerabilities takes fully-filled handles and returns all vulnerabilities from them -func (s vulnerabilityProvider) toVulnerabilities(packageHandles []AffectedPackageHandle, cpeHandles []AffectedCPEHandle) ([]vulnerability.Vulnerability, error) { //nolint:funlen,gocognit +func (vp vulnerabilityProvider) toVulnerabilities(packageHandles []AffectedPackageHandle, cpeHandles []AffectedCPEHandle) ([]vulnerability.Vulnerability, error) { //nolint:funlen,gocognit var out []vulnerability.Vulnerability metadataByCVE := make(map[string]*vulnerability.Metadata) @@ -368,7 +399,7 @@ func (s vulnerabilityProvider) toVulnerabilities(packageHandles []AffectedPackag return metadata, nil } - metadata, err := s.getVulnerabilityMetadata(vuln, namespace) + metadata, err := vp.getVulnerabilityMetadata(vuln, namespace) if err != nil { return nil, err } @@ -390,7 +421,7 @@ func (s vulnerabilityProvider) toVulnerabilities(packageHandles []AffectedPackag continue } - meta, err := getMetadata(packageHandle.Vulnerability, v.Reference.Namespace) + meta, err := getMetadata(packageHandle.Vulnerability, v.Namespace) if err != nil { log.WithFields("error", err, "vulnerability", v.String()).Debug("unable to fetch metadata for vulnerability") } else { @@ -413,7 +444,7 @@ func (s vulnerabilityProvider) toVulnerabilities(packageHandles []AffectedPackag continue } - meta, err := getMetadata(c.Vulnerability, v.Reference.Namespace) + meta, err := getMetadata(c.Vulnerability, v.Namespace) if err != nil { log.WithFields("error", err, "vulnerability", v.String()).Debug("unable to fetch metadata for vulnerability") } else { @@ -497,6 +528,10 @@ func filterAffectedCPEVersions(constraintMatcher search.VersionConstraintMatcher // filterAffectedPackageRanges returns true if all ranges removed func filterAffectedPackageRanges(matcher search.VersionConstraintMatcher, b *AffectedPackageBlob) (bool, []string) { + if len(b.Ranges) == 0 { + // no ranges means that we're implicitly vulnerable to all versions + return false, nil + } var unmatchedConstraints []string for _, r := range b.Ranges { v := r.Version @@ -522,9 +557,21 @@ func toSeverityString(sev vulnerability.Severity) string { return strcase.ToCamel(sev.String()) } -func toURLs(vuln *VulnerabilityHandle) []string { - var out []string +// returns the first reference url to populate the DataSource +func firstReferenceURL(vuln *VulnerabilityHandle) string { for _, v := range vuln.BlobValue.References { + return v.URL + } + return "" +} + +// skip the first reference URL and return the remainder to populate the URLs +func lastReferenceURLs(vuln *VulnerabilityHandle) []string { + var out []string + for i, v := range vuln.BlobValue.References { + if i == 0 { + continue + } out = append(out, v.URL) } return out diff --git a/grype/db/v6/vulnerability_provider_mocks_test.go b/grype/db/v6/vulnerability_provider_mocks_test.go index d2a76838be1..339fe650fc5 100644 --- a/grype/db/v6/vulnerability_provider_mocks_test.go +++ b/grype/db/v6/vulnerability_provider_mocks_test.go @@ -29,7 +29,7 @@ func testVulnerabilityProvider(t *testing.T) vulnerability.Provider { aWeekAgo := time.Now().Add(-7 * 24 * time.Hour) twoWeeksAgo := time.Now().Add(-14 * 24 * time.Hour) - prov := &Provider{ + debianProvider := &Provider{ ID: "debian", Version: "1", Processor: "debian-processor", @@ -37,6 +37,14 @@ func testVulnerabilityProvider(t *testing.T) vulnerability.Provider { InputDigest: hex.EncodeToString([]byte("debian")), } + nvdProvider := &Provider{ + ID: "nvd", + Version: "1", + Processor: "nvd-processor", + DateCaptured: &aDayAgo, + InputDigest: hex.EncodeToString([]byte("nvd")), + } + v5vulns := []v5.Vulnerability{ // neutron { @@ -103,6 +111,13 @@ func testVulnerabilityProvider(t *testing.T) vulnerability.Provider { "cpe:2.3:*:awesome:awesome:*:*:*:*:*:*:*:*", // shouldn't match on this }, }, + { + PackageName: "Newtonsoft.Json", + Namespace: "github:language:dotnet", + ID: "GHSA-5crp-9r3c-p9vr", + VersionFormat: "unknown", + VersionConstraint: "<13.0.1", + }, // poison the well! this is not a valid entry, but we want the matching process to survive and find other good results... { PackageName: "activerecord", @@ -118,10 +133,12 @@ func testVulnerabilityProvider(t *testing.T) vulnerability.Provider { for _, v := range v5vulns { var os *OperatingSystem + prov := nvdProvider switch v.Namespace { case "nvd:cpe": case "debian:distro:debian:8": + prov = debianProvider os = &OperatingSystem{ Name: "debian", MajorVersion: "8", diff --git a/grype/db/v6/vulnerability_provider_test.go b/grype/db/v6/vulnerability_provider_test.go index 010ec973215..b4c04418f0d 100644 --- a/grype/db/v6/vulnerability_provider_test.go +++ b/grype/db/v6/vulnerability_provider_test.go @@ -17,20 +17,22 @@ import ( "github.com/anchore/grype/grype/version" "github.com/anchore/grype/grype/vulnerability" "github.com/anchore/syft/syft/cpe" + syftPkg "github.com/anchore/syft/syft/pkg" ) func Test_FindVulnerabilitiesByDistro(t *testing.T) { provider := testVulnerabilityProvider(t) - d, err := distro.New(distro.Debian, "8", "") - require.NoError(t, err) + d := distro.New(distro.Debian, "8", "") p := pkg.Package{ - ID: pkg.ID(uuid.NewString()), - Name: "neutron", + ID: pkg.ID(uuid.NewString()), + Name: "neutron", + Version: "1.0.0", + Type: syftPkg.DebPkg, } - actual, err := provider.FindVulnerabilities(search.ByDistro(*d), search.ByPackageName(p.Name)) + actual, err := provider.FindVulnerabilities(search.ByDistro(*d), search.ByPackageName(p.Name), search.ByVersion(*version.NewVersionFromPkg(p))) require.NoError(t, err) expected := []vulnerability.Vulnerability{ @@ -46,12 +48,13 @@ func Test_FindVulnerabilitiesByDistro(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2014-fake-1", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2014-fake-1", Namespace: "debian:distro:debian:8", Severity: "High", - URLs: []string{"http://somewhere/CVE-2014-fake-1"}, + URLs: nil, Description: "CVE-2014-fake-1-description", }, + RelatedVulnerabilities: []vulnerability.Reference{{ID: "CVE-2014-fake-1", Namespace: "nvd:cpe"}}, }, { PackageName: "neutron", @@ -65,12 +68,13 @@ func Test_FindVulnerabilitiesByDistro(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2013-fake-2", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2013-fake-2", Namespace: "debian:distro:debian:8", Severity: "High", - URLs: []string{"http://somewhere/CVE-2013-fake-2"}, + URLs: nil, Description: "CVE-2013-fake-2-description", }, + RelatedVulnerabilities: []vulnerability.Reference{{ID: "CVE-2013-fake-2", Namespace: "nvd:cpe"}}, }, } @@ -123,10 +127,10 @@ func Test_FindVulnerabilitiesByCPE(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2014-fake-4", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2014-fake-4", Namespace: "nvd:cpe", Severity: "High", - URLs: []string{"http://somewhere/CVE-2014-fake-4"}, + URLs: nil, Description: "CVE-2014-fake-4-description", }, }, @@ -150,10 +154,10 @@ func Test_FindVulnerabilitiesByCPE(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2014-fake-4", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2014-fake-4", Namespace: "nvd:cpe", Severity: "High", - URLs: []string{"http://somewhere/CVE-2014-fake-4"}, + URLs: nil, Description: "CVE-2014-fake-4-description", }, }, @@ -177,10 +181,10 @@ func Test_FindVulnerabilitiesByCPE(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2014-fake-3", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2014-fake-3", Namespace: "nvd:cpe", Severity: "High", - URLs: []string{"http://somewhere/CVE-2014-fake-3"}, + URLs: nil, Description: "CVE-2014-fake-3-description", }, }, @@ -198,13 +202,34 @@ func Test_FindVulnerabilitiesByCPE(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2014-fake-4", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2014-fake-4", Namespace: "nvd:cpe", Severity: "High", - URLs: []string{"http://somewhere/CVE-2014-fake-4"}, + URLs: nil, Description: "CVE-2014-fake-4-description", }, }, + { + PackageName: "activerecord", + Constraint: version.MustGetConstraint("< 70.3.0-rc0", version.ApkFormat), + Reference: vulnerability.Reference{ + ID: "CVE-2014-fake-7", + Namespace: "nvd:cpe", + }, + CPEs: []cpe.CPE{ + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:rails:*:*", ""), + }, + PackageQualifiers: []qualifier.Qualifier{}, + Advisories: []vulnerability.Advisory{}, + Metadata: &vulnerability.Metadata{ + ID: "CVE-2014-fake-7", + DataSource: "http://somewhere/CVE-2014-fake-7", + Namespace: "nvd:cpe", + Severity: "High", + URLs: nil, + Description: "CVE-2014-fake-7-description", + }, + }, }, }, { @@ -247,11 +272,10 @@ func Test_FindVulnerabilitiesByCPE(t *testing.T) { } -func Test_FindVulnerabilitiesByByID(t *testing.T) { +func Test_FindVulnerabilitiesByID(t *testing.T) { provider := testVulnerabilityProvider(t) - d, err := distro.New(distro.Debian, "8", "") - require.NoError(t, err) + d := distro.New(distro.Debian, "8", "") // with distro actual, err := provider.FindVulnerabilities(search.ByDistro(*d), search.ByID("CVE-2014-fake-1")) @@ -270,12 +294,13 @@ func Test_FindVulnerabilitiesByByID(t *testing.T) { Advisories: []vulnerability.Advisory{}, Metadata: &vulnerability.Metadata{ ID: "CVE-2014-fake-1", - DataSource: "debian", + DataSource: "http://somewhere/CVE-2014-fake-1", Namespace: "debian:distro:debian:8", Severity: "High", - URLs: []string{"http://somewhere/CVE-2014-fake-1"}, + URLs: nil, Description: "CVE-2014-fake-1-description", }, + RelatedVulnerabilities: []vulnerability.Reference{{ID: "CVE-2014-fake-1", Namespace: "nvd:cpe"}}, }, } @@ -296,11 +321,240 @@ func Test_FindVulnerabilitiesByByID(t *testing.T) { t.Errorf("diff: %+v", d) } } +} - // prove we survive a bad request - actual, err = provider.FindVulnerabilities(search.ByDistro(*d), search.ByID("CVE-2014-fake-3")) - require.NoError(t, err) - require.Empty(t, actual) +func Test_FindVulnerabilitiesByEcosystem_UnknownPackageType(t *testing.T) { + tests := []struct { + name string + packageName string + packageType syftPkg.Type + language syftPkg.Language + expectedIDs []string + }{ + { + name: "known package type", + packageName: "Newtonsoft.Json", + packageType: syftPkg.DotnetPkg, + language: syftPkg.Java, // deliberately wrong to prove we're using package type + expectedIDs: []string{"GHSA-5crp-9r3c-p9vr"}, + }, + { + name: "unknown package type, known language", + packageName: "Newtonsoft.Json", + packageType: syftPkg.UnknownPkg, + language: syftPkg.Dotnet, + expectedIDs: []string{"GHSA-5crp-9r3c-p9vr"}, + }, + { + name: "unknown package type, unknown language", + packageName: "Newtonsoft.Json", + packageType: syftPkg.UnknownPkg, + language: syftPkg.UnknownLanguage, + // The vuln GHSA-5crp-9r3c-p9vr is specifically associated + // with the dotnet ecosystem, so it should not be returned here. + // In a real search for UnknownPkg + UnknownLanguage, there should + // be a separate search.ByCPE run that _does_ return it. + expectedIDs: []string{}, + }, + } + provider := testVulnerabilityProvider(t) + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual, err := provider.FindVulnerabilities( + search.ByEcosystem(test.language, test.packageType), + search.ByPackageName(test.packageName), + ) + require.NoError(t, err) + actualIDs := make([]string, len(actual)) + for idx, vuln := range actual { + actualIDs[idx] = vuln.ID + } + if d := cmp.Diff(test.expectedIDs, actualIDs); d != "" { + t.Errorf("diff: %+v", d) + } + }) + } +} + +func Test_DataSource(t *testing.T) { + tests := []struct { + name string + vuln VulnerabilityHandle + expected vulnerability.Metadata + }{ + { + name: "no reference urls", + vuln: VulnerabilityHandle{ + BlobValue: &VulnerabilityBlob{ + References: nil, + }, + }, + expected: vulnerability.Metadata{ + DataSource: "", + URLs: nil, + }, + }, + { + name: "one reference url", + vuln: VulnerabilityHandle{ + BlobValue: &VulnerabilityBlob{ + References: []Reference{ + { + URL: "url1", + }, + }, + }, + }, + expected: vulnerability.Metadata{ + DataSource: "url1", + URLs: nil, + }, + }, + { + name: "two reference urls", + vuln: VulnerabilityHandle{ + BlobValue: &VulnerabilityBlob{ + References: []Reference{ + { + URL: "url1", + }, + { + URL: "url2", + }, + }, + }, + }, + expected: vulnerability.Metadata{ + DataSource: "url1", + URLs: []string{"url2"}, + }, + }, + { + name: "many reference urls", + vuln: VulnerabilityHandle{ + BlobValue: &VulnerabilityBlob{ + References: []Reference{ + { + URL: "url4", + }, + { + URL: "url3", + }, + { + URL: "url2", + }, + { + URL: "url1", + }, + }, + }, + }, + expected: vulnerability.Metadata{ + DataSource: "url4", + URLs: []string{"url3", "url2", "url1"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := newVulnerabilityMetadata(&tt.vuln, "", nil, nil) + got.Severity = "" + require.NoError(t, err) + if diff := cmp.Diff(&tt.expected, got, cmpOpts()...); diff != "" { + t.Fatal(diff) + } + }) + } +} + +func Test_filterAffectedPackageRanges(t *testing.T) { + tests := []struct { + name string + ranges []AffectedRange + matchesConstraint func(constraint version.Constraint) (bool, error) + expectedAllRangesRemoved bool + expectedUnmatchedStrings []string + }{ + { + name: "no ranges", + ranges: nil, + expectedAllRangesRemoved: false, // important! we assume that a vulnerability with no ranges is always vulnerable + expectedUnmatchedStrings: nil, + }, + { + name: "has ranges within constraint", + ranges: []AffectedRange{ + { + Version: AffectedVersion{ + Type: "rpm", + Constraint: "< 1.0.0", + }, + }, + { + Version: AffectedVersion{ + Type: "rpm", + Constraint: "< 2.0.0", + }, + }, + }, + matchesConstraint: func(constraint version.Constraint) (bool, error) { + return true, nil + }, + expectedAllRangesRemoved: false, + expectedUnmatchedStrings: nil, + }, + { + name: "has ranges outside constraint", + ranges: []AffectedRange{ + { + Version: AffectedVersion{ + Type: "rpm", + Constraint: "< 1.0.0", + }, + }, + { + Version: AffectedVersion{ + Type: "rpm", + Constraint: "< 2.0.0", + }, + }, + }, + matchesConstraint: func(constraint version.Constraint) (bool, error) { + return false, nil + }, + expectedAllRangesRemoved: true, + expectedUnmatchedStrings: []string{"< 1.0.0", "< 2.0.0"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockMatcher := &mockVersionConstraintMatcher{ + matchesConstraintFunc: tt.matchesConstraint, + } + + blob := &AffectedPackageBlob{ + Ranges: tt.ranges, + } + + allRangesRemoved, unmatchedConstraints := filterAffectedPackageRanges(mockMatcher, blob) + + require.Equal(t, tt.expectedAllRangesRemoved, allRangesRemoved) + require.Equal(t, tt.expectedUnmatchedStrings, unmatchedConstraints) + }) + } +} + +type mockVersionConstraintMatcher struct { + matchesConstraintFunc func(constraint version.Constraint) (bool, error) +} + +func (m *mockVersionConstraintMatcher) MatchesConstraint(constraint version.Constraint) (bool, error) { + if m.matchesConstraintFunc != nil { + return m.matchesConstraintFunc(constraint) + } + return false, nil } func cmpOpts() []cmp.Option { diff --git a/grype/db/v6/vulnerability_test.go b/grype/db/v6/vulnerability_test.go index 934ab2f7ed8..fa5fbebe3c4 100644 --- a/grype/db/v6/vulnerability_test.go +++ b/grype/db/v6/vulnerability_test.go @@ -6,6 +6,9 @@ import ( "unicode" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/anchore/grype/grype/vulnerability" ) func TestV5Namespace(t *testing.T) { @@ -96,6 +99,7 @@ func TestV5Namespace(t *testing.T) { // | sles:distro:sles:12.5 | // | chainguard:distro:chainguard:rolling | // | wolfi:distro:wolfi:rolling | + // | minimos:distro:minimos:rolling | // | github:language:go | // | alpine:distro:alpine:3.20 | // | alpine:distro:alpine:3.21 | @@ -135,12 +139,13 @@ func TestV5Namespace(t *testing.T) { // +--------------------------------------+ type testCase struct { - name string - provider string // from Providers.id - ecosystem string // only used when provider is "github" - osName string // only used for OS-based providers - osVersion string // only used for OS-based providers - expected string + name string + provider string // from Providers.id + ecosystem string // only used when provider non-os provider + packageName string // only used for msrc + osName string // only used for OS-based providers + osVersion string // only used for OS-based providers + expected string // } tests := []testCase{ @@ -324,6 +329,13 @@ func TestV5Namespace(t *testing.T) { osVersion: "rolling", expected: "wolfi:distro:wolfi:rolling", }, + { + name: "minimos distribution", + provider: "minimos", + osName: "minimos", + osVersion: "rolling", + expected: "minimos:distro:minimos:rolling", + }, { name: "amazon linux distribution", provider: "amazon", @@ -401,6 +413,20 @@ func TestV5Namespace(t *testing.T) { osVersion: "8", expected: "oracle:distro:oraclelinux:8", }, + { + name: "echo distribution", + provider: "echo", + osName: "echo", + osVersion: "rolling", + expected: "echo:distro:echo:rolling", + }, + { + name: "minimos distribution", + provider: "minimos", + osName: "minimos", + osVersion: "rolling", + expected: "minimos:distro:minimos:rolling", + }, // Version truncation tests { @@ -431,6 +457,39 @@ func TestV5Namespace(t *testing.T) { osVersion: "9.3.1", expected: "oracle:distro:oraclelinux:9", }, + // msrc is modeled as a distro for v5 but is just a package in v6 + { + name: "microsoft msrc-kb", + provider: "msrc", + ecosystem: "msrc-kb", + packageName: "10012", + expected: "msrc:distro:windows:10012", + }, + + // new provider existing ecosystem + { + name: "grizzly go-module", + provider: "grizzly", + ecosystem: "go-module", + expected: "grizzly:language:go", + }, + + // new provider new ecosystem + { + name: "armadillo pizza", + provider: "armadillo", + ecosystem: "pizza", + expected: "armadillo:language:pizza", + }, + + // new OS + { + name: "gothmog", + provider: "gothmog", + osName: "gothmoglinux", + osVersion: "zzzzzz11123", + expected: "gothmog:distro:gothmoglinux:zzzzzz11123", + }, } for _, tt := range tests { @@ -440,6 +499,7 @@ func TestV5Namespace(t *testing.T) { ID: tt.provider, }, } + pkg := &AffectedPackageHandle{} if tt.osName != "" { @@ -454,11 +514,14 @@ func TestV5Namespace(t *testing.T) { MinorVersion: minor, LabelVersion: label, } - } - - if tt.provider == "github" { + pkg.Package = &Package{ + Name: "os-package", + Ecosystem: "os-ecosystem", + } + } else if tt.ecosystem != "" { pkg.Package = &Package{ Ecosystem: tt.ecosystem, + Name: tt.packageName, } } @@ -468,6 +531,96 @@ func TestV5Namespace(t *testing.T) { } } +func Test_getRelatedVulnerabilities(t *testing.T) { + tests := []struct { + name string + vuln VulnerabilityHandle + affected AffectedPackageBlob + expected []string + }{ + { + name: "GHSA with related CVEs", + vuln: VulnerabilityHandle{ + Name: "GHSA-1234", + BlobValue: &VulnerabilityBlob{ + Aliases: []string{"CVE-2024-1"}, + }, + }, + affected: AffectedPackageBlob{ + CVEs: []string{"CVE-2024-2", "CVE-2024-3"}, + }, + expected: []string{"CVE-2024-1", "CVE-2024-2", "CVE-2024-3"}, + }, + { + name: "CVE with related CVEs", + vuln: VulnerabilityHandle{ + Name: "CVE-2024-1234", + BlobValue: &VulnerabilityBlob{ + Aliases: []string{"CVE-2024-1"}, + }, + }, + affected: AffectedPackageBlob{ + CVEs: []string{"CVE-2024-2", "CVE-2024-3"}, + }, + expected: []string{"CVE-2024-1234", "CVE-2024-1", "CVE-2024-2", "CVE-2024-3"}, + }, + { + name: "nvd CVE skips related CVEs to self", + vuln: VulnerabilityHandle{ + Name: "CVE-2024-1234", + ProviderID: nvdProvider, + BlobValue: &VulnerabilityBlob{ + Aliases: []string{"CVE-2024-1", "CVE-2024-1234"}, + }, + }, + affected: AffectedPackageBlob{ + CVEs: []string{"CVE-2024-2", "CVE-2024-1234"}, + }, + expected: []string{"CVE-2024-1", "CVE-2024-2"}, // does not include "CVE-2024-1234" + }, + { + name: "non-nvd CVE with related nvd CVEs to self", + vuln: VulnerabilityHandle{ + Name: "CVE-2024-1234", + BlobValue: &VulnerabilityBlob{ + Aliases: []string{"CVE-2024-1", "CVE-2024-1234"}, + }, + }, + affected: AffectedPackageBlob{ + CVEs: []string{"CVE-2024-2", "CVE-2024-1234"}, + }, + expected: []string{"CVE-2024-1", "CVE-2024-2", "CVE-2024-1234"}, // does include "CVE-2024-1234" + }, + { + name: "non-nvd CVE always relates back to NVD", + vuln: VulnerabilityHandle{ + Name: "CVE-2024-1234", + ProviderID: "1234", + BlobValue: &VulnerabilityBlob{ + Aliases: []string{}, + }, + }, + affected: AffectedPackageBlob{ + CVEs: []string{}, + }, + expected: []string{"CVE-2024-1234"}, // does include "CVE-2024-1234" + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := getRelatedVulnerabilities(&tt.vuln, &tt.affected) + var expected []vulnerability.Reference + for _, name := range tt.expected { + expected = append(expected, vulnerability.Reference{ + ID: name, + Namespace: v5NvdNamespace, + }) + } + require.ElementsMatch(t, expected, got) + }) + } +} + func majorMinorPatch(ver string) (string, string, string) { if !unicode.IsDigit(rune(ver[0])) { return "", "", "" diff --git a/grype/deprecated.go b/grype/deprecated.go index fc8ec72b29f..050974fef36 100644 --- a/grype/deprecated.go +++ b/grype/deprecated.go @@ -1,6 +1,7 @@ package grype import ( + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/matcher" "github.com/anchore/grype/grype/pkg" @@ -8,7 +9,6 @@ import ( "github.com/anchore/grype/internal/log" "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/syft" - "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/source" ) @@ -33,7 +33,7 @@ func FindVulnerabilities(store vulnerability.Provider, userImageStr string, scop } // TODO: deprecated, will remove before v1.0.0 -func FindVulnerabilitiesForPackage(store vulnerability.Provider, d *linux.Release, matchers []match.Matcher, packages []pkg.Package) match.Matches { +func FindVulnerabilitiesForPackage(store vulnerability.Provider, d *distro.Distro, matchers []match.Matcher, packages []pkg.Package) match.Matches { exclusionProvider, _ := store.(match.ExclusionProvider) // TODO v5 is an exclusion provider, but v6 is not runner := VulnerabilityMatcher{ VulnerabilityProvider: store, diff --git a/grype/distro/distro.go b/grype/distro/distro.go index 17ac8dbb054..cd5f4aa10c0 100644 --- a/grype/distro/distro.go +++ b/grype/distro/distro.go @@ -6,35 +6,89 @@ import ( hashiVer "github.com/hashicorp/go-version" + "github.com/anchore/grype/internal/log" "github.com/anchore/syft/syft/linux" ) // Distro represents a Linux Distribution. type Distro struct { - Type Type - Version *hashiVer.Version - RawVersion string - IDLike []string + Type Type + Version string + Codename string + IDLike []string + + // fields populated in the constructor + + major string + minor string + remaining string } // New creates a new Distro object populated with the given values. -func New(t Type, version string, idLikes ...string) (*Distro, error) { - var verObj *hashiVer.Version - var err error - +func New(t Type, version, label string, idLikes ...string) *Distro { + var major, minor, remaining string if version != "" { - verObj, err = hashiVer.NewVersion(version) - if err != nil { - return nil, fmt.Errorf("unable to parse version: %w", err) + // if starts with a digit, then assume it's a version and extract the major, minor, and remaining versions + if version[0] >= '0' && version[0] <= '9' { + // extract the major, minor, and remaining versions + parts := strings.Split(version, ".") + if len(parts) > 0 { + major = parts[0] + if len(parts) > 1 { + minor = parts[1] + } + if len(parts) > 2 { + remaining = strings.Join(parts[2:], ".") + } + } + } + } + + for i := range idLikes { + typ, ok := IDMapping[strings.TrimSpace(idLikes[i])] + if ok { + idLikes[i] = typ.String() } } return &Distro{ - Type: t, - Version: verObj, - RawVersion: version, - IDLike: idLikes, - }, nil + Type: t, + major: major, + minor: minor, + remaining: remaining, + Version: version, + Codename: label, + IDLike: idLikes, + } +} + +// NewFromNameVersion creates a new Distro object derived from the provided name and version +func NewFromNameVersion(name, version string) *Distro { + var codename string + + // if there are no digits in the version, it is likely a codename + if !strings.ContainsAny(version, "0123456789") { + codename = version + version = "" + } + + typ := IDMapping[name] + if typ == "" { + typ = Type(name) + } + return New(typ, version, codename, string(typ)) +} + +// FromRelease attempts to get a distro from the linux release, only logging any errors +func FromRelease(linuxRelease *linux.Release) *Distro { + if linuxRelease == nil { + return nil + } + d, err := NewFromRelease(*linuxRelease) + if err != nil { + log.WithFields("error", err).Warn("unable to create distro from linux distribution") + } + return d } // NewFromRelease creates a new Distro object derived from a syft linux.Release object. @@ -51,21 +105,18 @@ func NewFromRelease(release linux.Release) (*Distro, error) { continue } - if _, err := hashiVer.NewVersion(version); err == nil { + _, err := hashiVer.NewVersion(version) + if err == nil { selectedVersion = version break } } - if t == Debian && release.VersionID == "" && release.Version == "" && strings.Contains(release.PrettyName, "sid") { - return &Distro{ - Type: t, - RawVersion: "unstable", - IDLike: release.IDLike, - }, nil + if selectedVersion == "" { + selectedVersion = release.VersionID } - return New(t, selectedVersion, release.IDLike...) + return New(t, selectedVersion, release.VersionCodename, release.IDLike...), nil } func (d Distro) Name() string { @@ -74,50 +125,33 @@ func (d Distro) Name() string { // MajorVersion returns the major version value from the pseudo-semantically versioned distro version value. func (d Distro) MajorVersion() string { - if d.Version == nil { - return strings.Split(d.RawVersion, ".")[0] - } - return fmt.Sprintf("%d", d.Version.Segments()[0]) + return d.major } // MinorVersion returns the minor version value from the pseudo-semantically versioned distro version value. func (d Distro) MinorVersion() string { - if d.Version == nil { - parts := strings.Split(d.RawVersion, ".") - if len(parts) > 1 { - return parts[1] - } - return "" - } - parts := d.Version.Segments() - if len(parts) > 1 { - return fmt.Sprintf("%d", parts[1]) - } - return "" + return d.minor } -// FullVersion returns the original user version value. -func (d Distro) FullVersion() string { - return d.RawVersion +func (d Distro) RemainingVersion() string { + return d.remaining } // String returns a human-friendly representation of the Linux distribution. func (d Distro) String() string { versionStr := "(version unknown)" - if d.RawVersion != "" { - versionStr = d.RawVersion + if d.Version != "" { + versionStr = d.Version + } else if d.Codename != "" { + versionStr = d.Codename } return fmt.Sprintf("%s %s", d.Type, versionStr) } -func (d Distro) IsRolling() bool { - return d.Type == Wolfi || d.Type == Chainguard || d.Type == ArchLinux || d.Type == Gentoo -} - // Unsupported Linux distributions func (d Distro) Disabled() bool { - switch { - case d.Type == ArchLinux: + switch d.Type { + case ArchLinux: return true default: return false diff --git a/grype/distro/distro_test.go b/grype/distro/distro_test.go index c18be758fa1..6cad6917768 100644 --- a/grype/distro/distro_test.go +++ b/grype/distro/distro_test.go @@ -3,6 +3,8 @@ package distro import ( "testing" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -14,12 +16,12 @@ import ( func Test_NewDistroFromRelease(t *testing.T) { tests := []struct { - name string - release linux.Release - expectedVersion string - expectedRawVersion string - expectedType Type - expectErr bool + name string + release linux.Release + expected *Distro + minor string + major string + expectErr require.ErrorAssertionFunc }{ { name: "go case: derive version from version-id", @@ -27,10 +29,15 @@ func Test_NewDistroFromRelease(t *testing.T) { ID: "centos", VersionID: "8", Version: "7", + IDLike: []string{"rhel"}, }, - expectedType: CentOS, - expectedRawVersion: "8", - expectedVersion: "8.0.0", + expected: &Distro{ + Type: CentOS, + Version: "8", + IDLike: []string{"redhat"}, + }, + major: "8", + minor: "", }, { name: "fallback to release name when release id is missing", @@ -38,9 +45,12 @@ func Test_NewDistroFromRelease(t *testing.T) { Name: "windows", VersionID: "8", }, - expectedType: Windows, - expectedRawVersion: "8", - expectedVersion: "8.0.0", + expected: &Distro{ + Type: Windows, + Version: "8", + }, + major: "8", + minor: "", }, { name: "fallback to version when version-id missing", @@ -48,16 +58,22 @@ func Test_NewDistroFromRelease(t *testing.T) { ID: "centos", Version: "8", }, - expectedType: CentOS, - expectedRawVersion: "8", - expectedVersion: "8.0.0", + expected: &Distro{ + Type: CentOS, + Version: "8", + }, + major: "8", + minor: "", }, { - name: "missing version results in error", + // this enables matching on multiple OS versions at once + name: "missing version or label version is allowed", release: linux.Release{ ID: "centos", }, - expectedType: CentOS, + expected: &Distro{ + Type: CentOS, + }, }, { name: "bogus distro type results in error", @@ -65,7 +81,7 @@ func Test_NewDistroFromRelease(t *testing.T) { ID: "bogosity", VersionID: "8", }, - expectErr: true, + expectErr: require.Error, }, { // syft -o json debian:testing | jq .distro @@ -78,9 +94,12 @@ func Test_NewDistroFromRelease(t *testing.T) { VersionCodename: "trixie", Name: "Debian GNU/Linux", }, - expectedType: Debian, - expectedRawVersion: "unstable", - expectedVersion: "", + expected: &Distro{ + Type: Debian, + Codename: "trixie", + }, + major: "", + minor: "", }, { name: "azure linux 3", @@ -89,176 +108,204 @@ func Test_NewDistroFromRelease(t *testing.T) { Version: "3.0.20240417", VersionID: "3.0", }, - expectedType: Azure, - expectedRawVersion: "3.0", + expected: &Distro{ + Type: Azure, + Version: "3.0", + }, + major: "3", + minor: "0", }, } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - d, err := NewFromRelease(test.release) - if test.expectErr { - require.Error(t, err) - return - } else { - require.NoError(t, err) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.expectErr == nil { + tt.expectErr = require.NoError } - assert.Equal(t, test.expectedType, d.Type) - if test.expectedVersion != "" { - assert.Equal(t, test.expectedVersion, d.Version.String()) + distro, err := NewFromRelease(tt.release) + tt.expectErr(t, err) + if err != nil { + return } - if test.expectedRawVersion != "" { - assert.Equal(t, test.expectedRawVersion, d.FullVersion()) + + if d := cmp.Diff(tt.expected, distro, cmpopts.IgnoreUnexported(Distro{})); d != "" { + t.Errorf("unexpected result: %s", d) } + assert.Equal(t, tt.major, distro.MajorVersion(), "unexpected major version") + assert.Equal(t, tt.minor, distro.MinorVersion(), "unexpected minor version") }) } } func Test_NewDistroFromRelease_Coverage(t *testing.T) { + observedDistros := stringutil.NewStringSet() + definedDistros := stringutil.NewStringSet() + + for _, distroType := range All { + definedDistros.Add(string(distroType)) + } + + // Somewhat cheating with Windows. There is no support for detecting/parsing a Windows OS, so it is not + // possible to comply with this test unless it is added manually to the "observed distros" + definedDistros.Remove(string(Windows)) + tests := []struct { - fixture string - Type Type - Version string + Name string + Type Type + Version string + LabelVersion string }{ { - fixture: "test-fixtures/os/alpine", + Name: "test-fixtures/os/alpine", Type: Alpine, Version: "3.11.6", }, { - fixture: "test-fixtures/os/amazon", + Name: "test-fixtures/os/alpine-edge", + Type: Alpine, + Version: "3.22.0_alpha20250108", + }, + { + Name: "test-fixtures/os/amazon", Type: AmazonLinux, - Version: "2.0.0", + Version: "2", }, { - fixture: "test-fixtures/os/busybox", + Name: "test-fixtures/os/busybox", Type: Busybox, Version: "1.31.1", }, { - fixture: "test-fixtures/os/centos", + Name: "test-fixtures/os/centos", Type: CentOS, - Version: "8.0.0", + Version: "8", }, { - fixture: "test-fixtures/os/debian", + Name: "test-fixtures/os/debian", Type: Debian, - Version: "8.0.0", + Version: "8", + }, + { + Name: "test-fixtures/os/debian-sid", + Type: Debian, + LabelVersion: "trixie", }, { - fixture: "test-fixtures/os/fedora", + Name: "test-fixtures/os/fedora", Type: Fedora, - Version: "31.0.0", + Version: "31", }, { - fixture: "test-fixtures/os/redhat", + Name: "test-fixtures/os/redhat", Type: RedHat, - Version: "7.3.0", + Version: "7.3", }, { - fixture: "test-fixtures/os/ubuntu", - Type: Ubuntu, - Version: "20.4.0", + Name: "test-fixtures/os/ubuntu", + Type: Ubuntu, + Version: "20.04", + LabelVersion: "focal", }, { - fixture: "test-fixtures/os/oraclelinux", + Name: "test-fixtures/os/oraclelinux", Type: OracleLinux, - Version: "8.3.0", + Version: "8.3", }, { - fixture: "test-fixtures/os/custom", + Name: "test-fixtures/os/custom", Type: RedHat, - Version: "8.0.0", + Version: "8", }, { - fixture: "test-fixtures/os/opensuse-leap", + Name: "test-fixtures/os/opensuse-leap", Type: OpenSuseLeap, - Version: "15.2.0", + Version: "15.2", }, { - fixture: "test-fixtures/os/sles", + Name: "test-fixtures/os/sles", Type: SLES, - Version: "15.2.0", + Version: "15.2", }, { - fixture: "test-fixtures/os/photon", + Name: "test-fixtures/os/photon", Type: Photon, - Version: "2.0.0", + Version: "2.0", }, { - fixture: "test-fixtures/os/arch", - Type: ArchLinux, + Name: "test-fixtures/os/arch", + Type: ArchLinux, }, { - fixture: "test-fixtures/partial-fields/missing-id", + Name: "test-fixtures/partial-fields/missing-id", Type: Debian, - Version: "8.0.0", + Version: "8", }, { - fixture: "test-fixtures/partial-fields/unknown-id", + Name: "test-fixtures/partial-fields/unknown-id", Type: Debian, - Version: "8.0.0", + Version: "8", }, { - fixture: "test-fixtures/os/centos6", + Name: "test-fixtures/os/centos6", Type: CentOS, - Version: "6.0.0", + Version: "6", }, { - fixture: "test-fixtures/os/centos5", + Name: "test-fixtures/os/centos5", Type: CentOS, - Version: "5.7.0", + Version: "5.7", }, { - fixture: "test-fixtures/os/mariner", + Name: "test-fixtures/os/mariner", Type: Mariner, - Version: "1.0.0", + Version: "1.0", }, { - fixture: "test-fixtures/os/azurelinux", + Name: "test-fixtures/os/azurelinux", Type: Azure, - Version: "3.0.0", + Version: "3.0", }, { - fixture: "test-fixtures/os/rockylinux", + Name: "test-fixtures/os/rockylinux", Type: RockyLinux, - Version: "8.4.0", + Version: "8.4", }, { - fixture: "test-fixtures/os/almalinux", + Name: "test-fixtures/os/almalinux", Type: AlmaLinux, - Version: "8.4.0", + Version: "8.4", }, { - fixture: "test-fixtures/os/gentoo", - Type: Gentoo, + Name: "test-fixtures/os/echo", + Type: Echo, + Version: "1", }, { - fixture: "test-fixtures/os/wolfi", + Name: "test-fixtures/os/gentoo", + Type: Gentoo, + }, + { + Name: "test-fixtures/os/wolfi", Type: Wolfi, + Version: "20220914", }, { - fixture: "test-fixtures/os/chainguard", + Name: "test-fixtures/os/chainguard", Type: Chainguard, + Version: "20230214", + }, + { + Name: "test-fixtures/os/minimos", + Type: MinimOS, + Version: "20241031", }, } - observedDistros := stringutil.NewStringSet() - definedDistros := stringutil.NewStringSet() - - for _, distroType := range All { - definedDistros.Add(string(distroType)) - } - - // Somewhat cheating with Windows. There is no support for detecting/parsing a Windows OS, so it is not - // possible to comply with this test unless it is added manually to the "observed distros" - definedDistros.Remove(string(Windows)) - - for _, test := range tests { - t.Run(test.fixture, func(t *testing.T) { - s, err := directorysource.NewFromPath(test.fixture) + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + s, err := directorysource.NewFromPath(tt.Name) require.NoError(t, err) resolver, err := s.FileResolver(source.SquashedScope) @@ -274,10 +321,9 @@ func Test_NewDistroFromRelease_Coverage(t *testing.T) { observedDistros.Add(d.Type.String()) - assert.Equal(t, test.Type, d.Type) - if test.Version != "" { - assert.Equal(t, d.Version.String(), test.Version) - } + assert.Equal(t, tt.Type, d.Type, "unexpected distro type") + assert.Equal(t, tt.LabelVersion, d.Codename, "unexpected label version") + assert.Equal(t, tt.Version, d.Version, "unexpected version") }) } @@ -324,7 +370,7 @@ func TestDistro_FullVersion(t *testing.T) { Version: test.version, }) require.NoError(t, err) - assert.Equal(t, test.expected, d.FullVersion()) + assert.Equal(t, test.expected, d.Version) }) } diff --git a/grype/distro/test-fixtures/os/alpine-edge/etc/os-release b/grype/distro/test-fixtures/os/alpine-edge/etc/os-release new file mode 100644 index 00000000000..c7133dc2390 --- /dev/null +++ b/grype/distro/test-fixtures/os/alpine-edge/etc/os-release @@ -0,0 +1,6 @@ +NAME="Alpine Linux" +ID=alpine +VERSION_ID=3.22.0_alpha20250108 +PRETTY_NAME="Alpine Linux edge" +HOME_URL="https://alpinelinux.org/" +BUG_REPORT_URL="https://gitlab.alpinelinux.org/alpine/aports/-/issues" \ No newline at end of file diff --git a/grype/distro/test-fixtures/os/debian-sid/usr/lib/os-release b/grype/distro/test-fixtures/os/debian-sid/usr/lib/os-release new file mode 100644 index 00000000000..c32b48d1edd --- /dev/null +++ b/grype/distro/test-fixtures/os/debian-sid/usr/lib/os-release @@ -0,0 +1,7 @@ +PRETTY_NAME="Debian GNU/Linux trixie/sid" +NAME="Debian GNU/Linux" +VERSION_CODENAME=trixie +ID=debian +HOME_URL="https://www.debian.org/" +SUPPORT_URL="https://www.debian.org/support" +BUG_REPORT_URL="https://bugs.debian.org/" diff --git a/grype/distro/test-fixtures/os/echo/etc/os-release b/grype/distro/test-fixtures/os/echo/etc/os-release new file mode 100644 index 00000000000..72a42c4e29a --- /dev/null +++ b/grype/distro/test-fixtures/os/echo/etc/os-release @@ -0,0 +1,6 @@ +NAME="Echo Linux" +PRETTY_NAME="Echo Linux" +ID="echo" +ID_LIKE="debian" +VERSION_ID="1" +HOME_URL="https://echohq.com/" \ No newline at end of file diff --git a/grype/distro/test-fixtures/os/minimos/etc/os-release b/grype/distro/test-fixtures/os/minimos/etc/os-release new file mode 100644 index 00000000000..1b104038d9b --- /dev/null +++ b/grype/distro/test-fixtures/os/minimos/etc/os-release @@ -0,0 +1,5 @@ +ID=minimos +NAME="MinimOS" +PRETTY_NAME="MinimOS" +VERSION_ID="20241031" +HOME_URL="https://minimus.io" diff --git a/grype/distro/type.go b/grype/distro/type.go index 69c73c98322..4d990a62cb5 100644 --- a/grype/distro/type.go +++ b/grype/distro/type.go @@ -23,6 +23,7 @@ const ( OpenSuseLeap Type = "opensuseleap" SLES Type = "sles" Photon Type = "photon" + Echo Type = "echo" Windows Type = "windows" Mariner Type = "mariner" Azure Type = "azurelinux" @@ -31,6 +32,7 @@ const ( Gentoo Type = "gentoo" Wolfi Type = "wolfi" Chainguard Type = "chainguard" + MinimOS Type = "minimos" ) // All contains all Linux distribution options @@ -48,6 +50,7 @@ var All = []Type{ OpenSuseLeap, SLES, Photon, + Echo, Windows, Mariner, Azure, @@ -56,6 +59,7 @@ var All = []Type{ Gentoo, Wolfi, Chainguard, + MinimOS, } // IDMapping connects a distro ID like "ubuntu" to a Distro type @@ -74,6 +78,7 @@ var IDMapping = map[string]Type{ "opensuse-leap": OpenSuseLeap, "sles": SLES, "photon": Photon, + "echo": Echo, "windows": Windows, "mariner": Mariner, "azurelinux": Azure, @@ -82,6 +87,7 @@ var IDMapping = map[string]Type{ "gentoo": Gentoo, "wolfi": Wolfi, "chainguard": Chainguard, + "minimos": MinimOS, } func TypeFromRelease(release linux.Release) Type { diff --git a/grype/grypeerr/errors.go b/grype/grypeerr/errors.go index a7a8a246366..f1baf28059a 100644 --- a/grype/grypeerr/errors.go +++ b/grype/grypeerr/errors.go @@ -1,6 +1,10 @@ package grypeerr var ( - // ErrAboveSeverityThreshold indicates when a vulnerability severity is discovered that is above the given --fail-on severity value + // ErrAboveSeverityThreshold indicates when a vulnerability severity is discovered that is equal + // or above the given --fail-on severity value. ErrAboveSeverityThreshold = NewExpectedErr("discovered vulnerabilities at or above the severity threshold") + + // ErrDBUpgradeAvailable indicates that a DB upgrade is available. + ErrDBUpgradeAvailable = NewExpectedErr("db upgrade available") ) diff --git a/grype/internal/packagemetadata/discover_type_names.go b/grype/internal/packagemetadata/discover_type_names.go index 617e18d8585..a59748758c3 100644 --- a/grype/internal/packagemetadata/discover_type_names.go +++ b/grype/internal/packagemetadata/discover_type_names.go @@ -16,7 +16,7 @@ import ( var metadataExceptions = strset.New( "FileMetadata", - "PURLFileMetadata", + "SBOMFileMetadata", "PURLLiteralMetadata", "CPELiteralMetadata", ) diff --git a/grype/load_vulnerability_db.go b/grype/load_vulnerability_db.go index ba303435b16..286fcdb1e4b 100644 --- a/grype/load_vulnerability_db.go +++ b/grype/load_vulnerability_db.go @@ -10,7 +10,7 @@ import ( "github.com/anchore/grype/internal/log" ) -func LoadVulnerabilityDB(distCfg v6dist.Config, installCfg v6inst.Config, update bool) (vulnerability.Provider, *v6.Status, error) { +func LoadVulnerabilityDB(distCfg v6dist.Config, installCfg v6inst.Config, update bool) (vulnerability.Provider, *vulnerability.ProviderStatus, error) { client, err := v6dist.NewClient(distCfg) if err != nil { return nil, nil, fmt.Errorf("unable to create distribution client: %w", err) @@ -36,8 +36,8 @@ func LoadVulnerabilityDB(distCfg v6dist.Config, installCfg v6inst.Config, update } s := c.Status() - if s.Err != nil { - return nil, nil, s.Err + if s.Error != nil { + return nil, nil, s.Error } rdr, err := c.Reader() diff --git a/grype/match/details.go b/grype/match/details.go index aeeabad98b2..5c7ac53fbe3 100644 --- a/grype/match/details.go +++ b/grype/match/details.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/mitchellh/hashstructure/v2" + "github.com/gohugoio/hashstructure" ) type Details []Detail @@ -43,7 +43,7 @@ func (m Details) Types() (tys []Type) { } func (m Detail) ID() string { - f, err := hashstructure.Hash(&m, hashstructure.FormatV2, &hashstructure.HashOptions{ + f, err := hashstructure.Hash(&m, &hashstructure.HashOptions{ ZeroNil: true, SlicesAsSets: true, }) diff --git a/grype/match/fingerprint.go b/grype/match/fingerprint.go index d4950ee65c3..cec830fcd09 100644 --- a/grype/match/fingerprint.go +++ b/grype/match/fingerprint.go @@ -3,7 +3,7 @@ package match import ( "fmt" - "github.com/mitchellh/hashstructure/v2" + "github.com/gohugoio/hashstructure" "github.com/anchore/grype/grype/pkg" ) @@ -24,7 +24,7 @@ func (m Fingerprint) String() string { } func (m Fingerprint) ID() string { - f, err := hashstructure.Hash(&m, hashstructure.FormatV2, &hashstructure.HashOptions{ + f, err := hashstructure.Hash(&m, &hashstructure.HashOptions{ ZeroNil: true, SlicesAsSets: true, }) diff --git a/grype/match/ignore.go b/grype/match/ignore.go index 83bb38f81fd..f8bc6cc29d3 100644 --- a/grype/match/ignore.go +++ b/grype/match/ignore.go @@ -29,6 +29,7 @@ type IgnoredMatch struct { // rule to apply. type IgnoreRule struct { Vulnerability string `yaml:"vulnerability" json:"vulnerability" mapstructure:"vulnerability"` + IncludeAliases bool `yaml:"include-aliases" json:"include-aliases" mapstructure:"include-aliases"` Reason string `yaml:"reason" json:"reason" mapstructure:"reason"` Namespace string `yaml:"namespace" json:"namespace" mapstructure:"namespace"` FixState string `yaml:"fix-state" json:"fix-state" mapstructure:"fix-state"` @@ -139,7 +140,7 @@ func getIgnoreConditionsForRule(rule IgnoreRule) []ignoreCondition { var ignoreConditions []ignoreCondition if v := rule.Vulnerability; v != "" { - ignoreConditions = append(ignoreConditions, ifVulnerabilityApplies(v)) + ignoreConditions = append(ignoreConditions, ifVulnerabilityApplies(v, rule.IncludeAliases)) } if ns := rule.Namespace; ns != "" { @@ -190,9 +191,19 @@ func ifFixStateApplies(fs string) ignoreCondition { } } -func ifVulnerabilityApplies(vulnerability string) ignoreCondition { +func ifVulnerabilityApplies(vulnerability string, includeAliases bool) ignoreCondition { return func(match Match) bool { - return vulnerability == match.Vulnerability.ID + if vulnerability == match.Vulnerability.ID { + return true + } + if includeAliases { + for _, related := range match.Vulnerability.RelatedVulnerabilities { + if vulnerability == related.ID { + return true + } + } + } + return false } } diff --git a/grype/match/ignore_test.go b/grype/match/ignore_test.go index c4e92114b72..115a5d9ecb4 100644 --- a/grype/match/ignore_test.go +++ b/grype/match/ignore_test.go @@ -41,6 +41,11 @@ var ( Fix: vulnerability.Fix{ State: vulnerability.FixStateNotFixed, }, + RelatedVulnerabilities: []vulnerability.Reference{ + { + ID: "CVE-123", + }, + }, }, Package: pkg.Package{ ID: pkg.ID(uuid.NewString()), @@ -369,6 +374,40 @@ func TestApplyIgnoreRules(t *testing.T) { }, }, }, + { + name: "ignore related matches", + allMatches: allMatches, + ignoreRules: []IgnoreRule{ + { + Vulnerability: "CVE-123", + IncludeAliases: true, + }, + }, + expectedRemainingMatches: []Match{ + allMatches[2], + allMatches[3], + }, + expectedIgnoredMatches: []IgnoredMatch{ + { + Match: allMatches[0], + AppliedIgnoreRules: []IgnoreRule{ + { + Vulnerability: "CVE-123", + IncludeAliases: true, + }, + }, + }, + { + Match: allMatches[1], + AppliedIgnoreRules: []IgnoreRule{ + { + Vulnerability: "CVE-123", + IncludeAliases: true, + }, + }, + }, + }, + }, { name: "ignore subset of matches", allMatches: allMatches, diff --git a/grype/match/matcher.go b/grype/match/matcher.go index 1e8e387faf6..b354e9d43d1 100644 --- a/grype/match/matcher.go +++ b/grype/match/matcher.go @@ -1,6 +1,9 @@ package match import ( + "errors" + "fmt" + "github.com/anchore/grype/grype/pkg" "github.com/anchore/grype/grype/vulnerability" syftPkg "github.com/anchore/syft/syft/pkg" @@ -14,5 +17,28 @@ type Matcher interface { // Match is called for every package found, returning any matches and an optional Ignorer which will be applied // after all matches are found - Match(vp vulnerability.Provider, p pkg.Package) ([]Match, []IgnoredMatch, error) + Match(vp vulnerability.Provider, p pkg.Package) ([]Match, []IgnoreFilter, error) +} + +// fatalError can be returned from a Matcher to indicate the matching process should stop. +// When fatalError(s) are encountered by the top-level matching process, these will be returned as errors to the caller. +type fatalError struct { + matcher MatcherType + inner error +} + +// NewFatalError creates a new fatalError wrapping the given error +func NewFatalError(matcher MatcherType, e error) error { + return fatalError{matcher: matcher, inner: e} +} + +// Error implements the error interface for fatalError. +func (f fatalError) Error() string { + return fmt.Sprintf("%s encountered a fatal error: %v", f.matcher, f.inner) +} + +// IsFatalError returns true if err includes a fatalError +func IsFatalError(err error) bool { + var fe fatalError + return err != nil && errors.As(err, &fe) } diff --git a/grype/match/matcher_type.go b/grype/match/matcher_type.go index cc0aa412102..8e4658c21a8 100644 --- a/grype/match/matcher_type.go +++ b/grype/match/matcher_type.go @@ -16,6 +16,7 @@ const ( GoModuleMatcher MatcherType = "go-module-matcher" OpenVexMatcher MatcherType = "openvex-matcher" RustMatcher MatcherType = "rust-matcher" + BitnamiMatcher MatcherType = "bitnami-matcher" ) var AllMatcherTypes = []MatcherType{ @@ -32,6 +33,7 @@ var AllMatcherTypes = []MatcherType{ GoModuleMatcher, OpenVexMatcher, RustMatcher, + BitnamiMatcher, } type MatcherType string diff --git a/grype/match/matches.go b/grype/match/matches.go index 264703920e8..7ce27f6b852 100644 --- a/grype/match/matches.go +++ b/grype/match/matches.go @@ -100,7 +100,8 @@ func (r *Matches) addOrMerge(newMatch Match, newFp Fingerprint) { // case A if err := existingMatch.Merge(newMatch); err != nil { log.WithFields("original", existingMatch.String(), "new", newMatch.String(), "error", err).Warn("unable to merge matches") - // TODO: dropped match in this case, we should figure a way to handle this + // at least capture the additional details + existingMatch.Details = append(existingMatch.Details, newMatch.Details...) } r.byFingerprint[newFp] = existingMatch @@ -125,6 +126,8 @@ func (r *Matches) mergeCoreMatches(newMatch Match, newFp Fingerprint, existingFi // case B1 if replaced := r.replace(newMatch, existingFp, newFp, existingMatch.Details...); !replaced { log.WithFields("original", existingMatch.String(), "new", newMatch.String()).Trace("unable to replace match") + // at least capture the new details + existingMatch.Details = append(existingMatch.Details, newMatch.Details...) } else { return true } @@ -132,7 +135,9 @@ func (r *Matches) mergeCoreMatches(newMatch Match, newFp Fingerprint, existingFi // case B2 if err := existingMatch.Merge(newMatch); err != nil { - log.WithFields("original", existingMatch.String(), "new", newMatch.String(), "error", err).Warn("unable to merge matches") + log.WithFields("original", existingMatch.String(), "new", newMatch.String(), "error", err).Trace("unable to merge matches") + // at least capture the new details + existingMatch.Details = append(existingMatch.Details, newMatch.Details...) } else { return true } diff --git a/grype/match/results.go b/grype/match/results.go index f98dbee5757..e7ff3bbb589 100644 --- a/grype/match/results.go +++ b/grype/match/results.go @@ -7,15 +7,15 @@ import ( "github.com/scylladb/go-set/strset" ) -type CPEPackageParameter struct { - Name string `json:"name"` - Version string `json:"version"` +type CPEParameters struct { + Namespace string `json:"namespace"` + CPEs []string `json:"cpes"` + Package PackageParameter `json:"package"` } -type CPEParameters struct { - Namespace string `json:"namespace"` - CPEs []string `json:"cpes"` - Package CPEPackageParameter `json:"package"` +type PackageParameter struct { + Name string `json:"name"` + Version string `json:"version"` } func (i *CPEParameters) Merge(other CPEParameters) error { @@ -54,3 +54,70 @@ func (h CPEResult) Equals(other CPEResult) bool { return true } + +type DistroParameters struct { + Distro DistroIdentification `json:"distro"` + Package PackageParameter `json:"package"` + Namespace string `json:"namespace"` +} + +type DistroIdentification struct { + Type string `json:"type"` + Version string `json:"version"` +} + +func (d *DistroParameters) Merge(other DistroParameters) error { + if d.Namespace != other.Namespace { + return fmt.Errorf("namespaces do not match") + } + if d.Distro.Type != other.Distro.Type { + return fmt.Errorf("distro types do not match") + } + if d.Distro.Version != other.Distro.Version { + return fmt.Errorf("distro versions do not match") + } + if d.Package.Name != other.Package.Name { + return fmt.Errorf("package names do not match") + } + if d.Package.Version != other.Package.Version { + return fmt.Errorf("package versions do not match") + } + return nil +} + +type DistroResult struct { + VulnerabilityID string `json:"vulnerabilityID"` + VersionConstraint string `json:"versionConstraint"` +} + +func (d DistroResult) Equals(other DistroResult) bool { + return d.VulnerabilityID == other.VulnerabilityID && + d.VersionConstraint == other.VersionConstraint +} + +type EcosystemParameters struct { + Language string `json:"language"` + Namespace string `json:"namespace"` + Package PackageParameter `json:"package"` +} + +func (e *EcosystemParameters) Merge(other EcosystemParameters) error { + if e.Namespace != other.Namespace { + return fmt.Errorf("namespaces do not match") + } + if e.Language != other.Language { + return fmt.Errorf("languages do not match") + } + if e.Package.Name != other.Package.Name { + return fmt.Errorf("package names do not match") + } + if e.Package.Version != other.Package.Version { + return fmt.Errorf("package versions do not match") + } + return nil +} + +type EcosystemResult struct { + VulnerabilityID string `json:"vulnerabilityID"` + VersionConstraint string `json:"versionConstraint"` +} diff --git a/grype/matcher/apk/matcher.go b/grype/matcher/apk/matcher.go index 7aafeaa65c5..dc7de907f0b 100644 --- a/grype/matcher/apk/matcher.go +++ b/grype/matcher/apk/matcher.go @@ -4,7 +4,6 @@ import ( "errors" "fmt" - "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/matcher/internal" "github.com/anchore/grype/grype/pkg" @@ -15,8 +14,16 @@ import ( syftPkg "github.com/anchore/syft/syft/pkg" ) -type Matcher struct { -} +var ( + nakVersionString = version.MustGetConstraint("< 0", version.ApkFormat).String() + + // nakConstraint checks the exact version string for being an APK version with "< 0" + nakConstraint = search.ByConstraintFunc(func(c version.Constraint) (bool, error) { + return c.String() == nakVersionString, nil + }) +) + +type Matcher struct{} func (m *Matcher) PackageTypes() []syftPkg.Type { return []syftPkg.Type{syftPkg.ApkPkg} @@ -26,11 +33,11 @@ func (m *Matcher) Type() match.MatcherType { return match.ApkMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { var matches []match.Match // direct matches with package itself - directMatches, err := m.findMatchesForPackage(store, p) + directMatches, err := m.findMatchesForPackage(store, p, nil) if err != nil { return nil, nil, err } @@ -84,14 +91,7 @@ func (m *Matcher) cpeMatchesWithoutSecDBFixes(provider vulnerability.Provider, p secDBVulnerabilitiesByID := vulnerabilitiesByID(secDBVulnerabilities) - verObj, err := version.NewVersionFromPkg(p) - if err != nil { - if errors.Is(err, version.ErrUnsupportedVersion) { - log.WithFields("error", err).Tracef("skipping package '%s@%s'", p.Name, p.Version) - return nil, nil - } - return nil, fmt.Errorf("matcher failed to parse version pkg='%s' ver='%s': %w", p.Name, p.Version, err) - } + verObj := version.NewVersionFromPkg(p) var finalCpeMatches []match.Match @@ -169,9 +169,9 @@ func vulnerabilitiesByID(vulns []vulnerability.Vulnerability) map[string][]vulne return results } -func (m *Matcher) findMatchesForPackage(store vulnerability.Provider, p pkg.Package) ([]match.Match, error) { +func (m *Matcher) findMatchesForPackage(store vulnerability.Provider, p pkg.Package, catalogPkg *pkg.Package) ([]match.Match, error) { // find SecDB matches for the given package name and version - secDBMatches, _, err := internal.MatchPackageByDistro(store, p, m.Type()) + secDBMatches, _, err := internal.MatchPackageByDistro(store, p, catalogPkg, m.Type()) if err != nil { return nil, err } @@ -193,11 +193,11 @@ func (m *Matcher) findMatchesForPackage(store vulnerability.Provider, p pkg.Pack return matches, nil } -func (m *Matcher) findMatchesForOriginPackage(store vulnerability.Provider, p pkg.Package) ([]match.Match, error) { +func (m *Matcher) findMatchesForOriginPackage(store vulnerability.Provider, catalogPkg pkg.Package) ([]match.Match, error) { var matches []match.Match - for _, indirectPackage := range pkg.UpstreamPackages(p) { - indirectMatches, err := m.findMatchesForPackage(store, indirectPackage) + for _, indirectPackage := range pkg.UpstreamPackages(catalogPkg) { + indirectMatches, err := m.findMatchesForPackage(store, indirectPackage, &catalogPkg) if err != nil { return nil, fmt.Errorf("failed to find vulnerabilities for apk upstream source package: %w", err) } @@ -206,7 +206,7 @@ func (m *Matcher) findMatchesForOriginPackage(store vulnerability.Provider, p pk // we want to make certain that we are tracking the match based on the package from the SBOM (not the indirect package) // however, we also want to keep the indirect package around for future reference - match.ConvertToIndirectMatches(matches, p) + match.ConvertToIndirectMatches(matches, catalogPkg) return matches, nil } @@ -218,9 +218,8 @@ func (m *Matcher) findMatchesForOriginPackage(store vulnerability.Provider, p pk // we want to report these NAK entries as match.IgnoredMatch, to allow for later processing to create ignore rules // based on packages which overlap by location, such as a python binary found in addition to the python APK entry -- // we want to NAK this vulnerability for BOTH packages -func (m *Matcher) findNaksForPackage(provider vulnerability.Provider, p pkg.Package) ([]match.IgnoredMatch, error) { - // TODO: this was only applying to specific distros as originally implemented; this should probably be removed: - if d := p.Distro; d == nil || d.Type != distro.Wolfi && d.Type != distro.Chainguard && d.Type != distro.Alpine { +func (m *Matcher) findNaksForPackage(provider vulnerability.Provider, p pkg.Package) ([]match.IgnoreFilter, error) { + if p.Distro == nil { return nil, nil } @@ -248,30 +247,25 @@ func (m *Matcher) findNaksForPackage(provider vulnerability.Provider, p pkg.Pack naks = append(naks, upstreamNaks...) } - var ignores []match.IgnoredMatch + meta, ok := p.Metadata.(pkg.ApkMetadata) + if !ok { + return nil, nil + } + + var ignores []match.IgnoreFilter for _, nak := range naks { - ignores = append(ignores, match.IgnoredMatch{ - Match: match.Match{ - Vulnerability: nak, - Package: p, - Details: nil, // Probably don't need details here - }, - AppliedIgnoreRules: []match.IgnoreRule{ - { - Vulnerability: nak.ID, - Reason: "NAK", - }, - }, - }) + for _, f := range meta.Files { + ignores = append(ignores, + match.IgnoreRule{ + Vulnerability: nak.ID, + IncludeAliases: true, + Reason: "Explicit APK NAK", + Package: match.IgnoreRulePackage{ + Location: f.Path, + }, + }) + } } return ignores, nil } - -var ( - nakVersionString = version.MustGetConstraint("< 0", version.ApkFormat).String() - // nakConstraint checks the exact version string for being an APK version with "< 0" - nakConstraint = search.ByConstraintFunc(func(c version.Constraint) (bool, error) { - return c.String() == nakVersionString, nil - }) -) diff --git a/grype/matcher/apk/matcher_test.go b/grype/matcher/apk/matcher_test.go index 353c973ddb8..b73ddede33d 100644 --- a/grype/matcher/apk/matcher_test.go +++ b/grype/matcher/apk/matcher_test.go @@ -33,10 +33,7 @@ func TestSecDBOnlyMatch(t *testing.T) { vp := mock.VulnerabilityProvider(secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") p := pkg.Package{ ID: pkg.ID(uuid.NewString()), @@ -58,20 +55,20 @@ func TestSecDBOnlyMatch(t *testing.T) { { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": d.Type.String(), - "version": d.RawVersion, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: d.Type.String(), + Version: d.Version, }, - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, - "namespace": "secdb:distro:alpine:3.12", + Namespace: "secdb:distro:alpine:3.12", }, - Found: map[string]interface{}{ - "versionConstraint": secDbVuln.Constraint.String(), - "vulnerabilityID": "CVE-2020-2", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2020-2", + VersionConstraint: secDbVuln.Constraint.String(), }, Matcher: match.ApkMatcher, }, @@ -112,10 +109,7 @@ func TestBothSecdbAndNvdMatches(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln, secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") p := pkg.Package{ ID: pkg.ID(uuid.NewString()), @@ -137,20 +131,20 @@ func TestBothSecdbAndNvdMatches(t *testing.T) { { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": d.Type.String(), - "version": d.RawVersion, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: d.Type.String(), + Version: d.Version, }, - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, - "namespace": "secdb:distro:alpine:3.12", + Namespace: "secdb:distro:alpine:3.12", }, - Found: map[string]interface{}{ - "versionConstraint": secDbVuln.Constraint.String(), - "vulnerabilityID": "CVE-2020-1", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2020-1", + VersionConstraint: secDbVuln.Constraint.String(), }, Matcher: match.ApkMatcher, }, @@ -198,10 +192,7 @@ func TestBothSecdbAndNvdMatches_DifferentFixInfo(t *testing.T) { } vp := mock.VulnerabilityProvider(nvdVuln, secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") p := pkg.Package{ ID: pkg.ID(uuid.NewString()), @@ -223,20 +214,20 @@ func TestBothSecdbAndNvdMatches_DifferentFixInfo(t *testing.T) { { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": d.Type.String(), - "version": d.RawVersion, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: d.Type.String(), + Version: d.Version, }, - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, - "namespace": "secdb:distro:alpine:3.12", + Namespace: "secdb:distro:alpine:3.12", }, - Found: map[string]interface{}{ - "versionConstraint": secDbVuln.Constraint.String(), - "vulnerabilityID": "CVE-2020-1", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2020-1", + VersionConstraint: secDbVuln.Constraint.String(), }, Matcher: match.ApkMatcher, }, @@ -278,10 +269,8 @@ func TestBothSecdbAndNvdMatches_DifferentPackageName(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln, secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "libvncserver", @@ -303,20 +292,20 @@ func TestBothSecdbAndNvdMatches_DifferentPackageName(t *testing.T) { { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": d.Type.String(), - "version": d.RawVersion, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: d.Type.String(), + Version: d.Version, }, - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, - "namespace": "secdb:distro:alpine:3.12", + Namespace: "secdb:distro:alpine:3.12", }, - Found: map[string]interface{}{ - "versionConstraint": secDbVuln.Constraint.String(), - "vulnerabilityID": "CVE-2020-1", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2020-1", + VersionConstraint: secDbVuln.Constraint.String(), }, Matcher: match.ApkMatcher, }, @@ -339,16 +328,14 @@ func TestNvdOnlyMatches(t *testing.T) { PackageName: "libvncserver", Constraint: version.MustGetConstraint("<= 0.9.11", version.UnknownFormat), CPEs: []cpe.CPE{ - cpe.Must(`cpe:2.3:a:lib_vnc_project-\(server\):libvncserver:*:*:*:*:*:*:*:*`, ""), + cpe.Must(`cpe:2.3:a:lib_vnc_project-\(server\):lib/vncserver:*:*:*:*:*:*:*:*`, ""), }, } vp := mock.VulnerabilityProvider(nvdVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "libvncserver", @@ -356,7 +343,7 @@ func TestNvdOnlyMatches(t *testing.T) { Type: syftPkg.ApkPkg, Distro: d, CPEs: []cpe.CPE{ - cpe.Must("cpe:2.3:a:*:libvncserver:0.9.9:*:*:*:*:*:*:*", ""), + cpe.Must("cpe:2.3:a:*:lib/vncserver:0.9.9:*:*:*:*:*:*:*", ""), }, } @@ -370,15 +357,16 @@ func TestNvdOnlyMatches(t *testing.T) { Type: match.CPEMatch, Confidence: 0.9, SearchedBy: match.CPEParameters{ - CPEs: []string{"cpe:2.3:a:*:libvncserver:0.9.9:*:*:*:*:*:*:*"}, + CPEs: []string{"cpe:2.3:a:*:lib\\/vncserver:0.9.9:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "libvncserver", Version: "0.9.9", }, }, Found: match.CPEResult{ - CPEs: []string{nvdVuln.CPEs[0].Attributes.BindToFmtString()}, + // use .String() for proper escaping + CPEs: []string{nvdVuln.CPEs[0].Attributes.String()}, VersionConstraint: nvdVuln.Constraint.String(), VulnerabilityID: "CVE-2020-1", }, @@ -413,10 +401,8 @@ func TestNvdOnlyMatches_FixInNvd(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "libvncserver", @@ -444,13 +430,13 @@ func TestNvdOnlyMatches_FixInNvd(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:*:libvncserver:0.9.9:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "libvncserver", Version: "0.9.9", }, }, Found: match.CPEResult{ - CPEs: []string{vulnFound.CPEs[0].Attributes.BindToFmtString()}, + CPEs: []string{vulnFound.CPEs[0].Attributes.String()}, VersionConstraint: vulnFound.Constraint.String(), VulnerabilityID: "CVE-2020-1", }, @@ -492,10 +478,8 @@ func TestNvdMatchesProperVersionFiltering(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVulnMatch, nvdVulnNoMatch) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "libvncserver", @@ -518,13 +502,13 @@ func TestNvdMatchesProperVersionFiltering(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:*:libvncserver:0.9.11:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "libvncserver", Version: "0.9.11-r10", }, }, Found: match.CPEResult{ - CPEs: []string{nvdVulnMatch.CPEs[0].Attributes.BindToFmtString()}, + CPEs: []string{nvdVulnMatch.CPEs[0].Attributes.String()}, VersionConstraint: nvdVulnMatch.Constraint.String(), VulnerabilityID: "CVE-2020-1", }, @@ -565,10 +549,8 @@ func TestNvdMatchesWithSecDBFix(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln, secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "libvncserver", @@ -613,10 +595,8 @@ func TestNvdMatchesNoConstraintWithSecDBFix(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln, secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "libvncserver", @@ -659,10 +639,8 @@ func TestNVDMatchCanceledByOriginPackageInSecDB(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln, secDBVuln) m := Matcher{} - d, err := distro.New(distro.Wolfi, "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Wolfi, "", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "php-8.3-fpm", // the package will not match anything @@ -702,10 +680,8 @@ func TestDistroMatchBySourceIndirection(t *testing.T) { vp := mock.VulnerabilityProvider(secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "musl-utils", @@ -731,20 +707,20 @@ func TestDistroMatchBySourceIndirection(t *testing.T) { { Type: match.ExactIndirectMatch, Confidence: 1.0, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": d.Type.String(), - "version": d.RawVersion, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: d.Type.String(), + Version: d.Version, }, - "package": map[string]string{ - "name": "musl", - "version": p.Version, + Package: match.PackageParameter{ + Name: "musl", + Version: p.Version, }, - "namespace": "secdb:distro:alpine:3.12", + Namespace: "secdb:distro:alpine:3.12", }, - Found: map[string]interface{}{ - "versionConstraint": secDbVuln.Constraint.String(), - "vulnerabilityID": "CVE-2020-2", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2020-2", + VersionConstraint: secDbVuln.Constraint.String(), }, Matcher: match.ApkMatcher, }, @@ -774,10 +750,7 @@ func TestSecDBMatchesStillCountedWithCpeErrors(t *testing.T) { vp := mock.VulnerabilityProvider(secDbVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") p := pkg.Package{ ID: pkg.ID(uuid.NewString()), @@ -802,20 +775,20 @@ func TestSecDBMatchesStillCountedWithCpeErrors(t *testing.T) { { Type: match.ExactIndirectMatch, Confidence: 1.0, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": d.Type.String(), - "version": d.RawVersion, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: d.Type.String(), + Version: d.Version, }, - "package": map[string]string{ - "name": "musl", - "version": p.Version, + Package: match.PackageParameter{ + Name: "musl", + Version: p.Version, }, - "namespace": "secdb:distro:alpine:3.12", + Namespace: "secdb:distro:alpine:3.12", }, - Found: map[string]interface{}{ - "versionConstraint": secDbVuln.Constraint.String(), - "vulnerabilityID": "CVE-2020-2", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2020-2", + VersionConstraint: secDbVuln.Constraint.String(), }, Matcher: match.ApkMatcher, }, @@ -844,10 +817,8 @@ func TestNVDMatchBySourceIndirection(t *testing.T) { vp := mock.VulnerabilityProvider(nvdVuln) m := Matcher{} - d, err := distro.New(distro.Alpine, "3.12.0", "") - if err != nil { - t.Fatalf("failed to create a new distro: %+v", err) - } + d := distro.New(distro.Alpine, "3.12.0", "") + p := pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "musl-utils", @@ -876,13 +847,13 @@ func TestNVDMatchBySourceIndirection(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:musl:musl:1.3.2-r0:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "musl", Version: "1.3.2-r0", }, }, Found: match.CPEResult{ - CPEs: []string{nvdVuln.CPEs[0].Attributes.BindToFmtString()}, + CPEs: []string{nvdVuln.CPEs[0].Attributes.String()}, VersionConstraint: nvdVuln.Constraint.String(), VulnerabilityID: "CVE-2020-1", }, @@ -903,6 +874,7 @@ func assertMatches(t *testing.T, expected, actual []match.Match) { var opts = []cmp.Option{ cmpopts.IgnoreFields(vulnerability.Vulnerability{}, "Constraint"), cmpopts.IgnoreFields(pkg.Package{}, "Locations"), + cmpopts.IgnoreUnexported(distro.Distro{}), } if diff := cmp.Diff(expected, actual, opts...); diff != "" { @@ -942,12 +914,12 @@ func Test_nakConstraint(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - matches, _, err := nakConstraint.MatchesVulnerability(tt.input) - wantErr := require.NoError - if tt.wantErr != nil { - wantErr = tt.wantErr + if tt.wantErr == nil { + tt.wantErr = require.NoError } - wantErr(t, err) + + matches, _, err := nakConstraint.MatchesVulnerability(tt.input) + tt.wantErr(t, err) require.Equal(t, tt.matches, matches) }) } diff --git a/grype/matcher/bitnami/matcher.go b/grype/matcher/bitnami/matcher.go new file mode 100644 index 00000000000..4c8999cda0c --- /dev/null +++ b/grype/matcher/bitnami/matcher.go @@ -0,0 +1,27 @@ +package bitnami + +import ( + "github.com/anchore/grype/grype/match" + "github.com/anchore/grype/grype/matcher/internal" + "github.com/anchore/grype/grype/pkg" + "github.com/anchore/grype/grype/vulnerability" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +type Matcher struct{} + +func (m *Matcher) PackageTypes() []syftPkg.Type { + return []syftPkg.Type{syftPkg.BitnamiPkg} +} + +func (m *Matcher) Type() match.MatcherType { + return match.BitnamiMatcher +} + +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + // Bitnami packages' metadata are built from the package URL which contains + // info such as the package name, version, revision, distro or architecture. + // ref: https://github.com/anchore/syft/blob/main/syft/pkg/bitnami.go#L3-L13 + // ref: https://github.com/anchore/syft/blob/main/syft/pkg/cataloger/bitnami/package.go#L18-L45 + return internal.MatchPackageByEcosystemPackageName(store, p, p.Name, m.Type()) +} diff --git a/grype/matcher/dotnet/matcher.go b/grype/matcher/dotnet/matcher.go index a78c1d11ccb..461dd2e56ef 100644 --- a/grype/matcher/dotnet/matcher.go +++ b/grype/matcher/dotnet/matcher.go @@ -30,6 +30,6 @@ func (m *Matcher) Type() match.MatcherType { return match.DotnetMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { return internal.MatchPackageByEcosystemAndCPEs(store, p, m.Type(), m.cfg.UseCPEs) } diff --git a/grype/matcher/dpkg/matcher.go b/grype/matcher/dpkg/matcher.go index 3470661a827..4d49581e419 100644 --- a/grype/matcher/dpkg/matcher.go +++ b/grype/matcher/dpkg/matcher.go @@ -21,7 +21,7 @@ func (m *Matcher) Type() match.MatcherType { return match.DpkgMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { matches := make([]match.Match, 0) sourceMatches, err := m.matchUpstreamPackages(store, p) @@ -30,7 +30,7 @@ func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Ma } matches = append(matches, sourceMatches...) - exactMatches, _, err := internal.MatchPackageByDistro(store, p, m.Type()) + exactMatches, _, err := internal.MatchPackageByDistro(store, p, nil, m.Type()) if err != nil { return nil, nil, fmt.Errorf("failed to match by exact package name: %w", err) } @@ -43,7 +43,7 @@ func (m *Matcher) matchUpstreamPackages(store vulnerability.Provider, p pkg.Pack var matches []match.Match for _, indirectPackage := range pkg.UpstreamPackages(p) { - indirectMatches, _, err := internal.MatchPackageByDistro(store, indirectPackage, m.Type()) + indirectMatches, _, err := internal.MatchPackageByDistro(store, indirectPackage, &p, m.Type()) if err != nil { return nil, fmt.Errorf("failed to find vulnerabilities for dpkg upstream source package: %w", err) } diff --git a/grype/matcher/dpkg/matcher_test.go b/grype/matcher/dpkg/matcher_test.go index fbbd80ff009..42651060420 100644 --- a/grype/matcher/dpkg/matcher_test.go +++ b/grype/matcher/dpkg/matcher_test.go @@ -17,10 +17,7 @@ import ( func TestMatcherDpkg_matchBySourceIndirection(t *testing.T) { matcher := Matcher{} - d, err := distro.New(distro.Debian, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.Debian, "8", "") p := pkg.Package{ ID: pkg.ID(uuid.NewString()), diff --git a/grype/matcher/golang/matcher.go b/grype/matcher/golang/matcher.go index bdb014ca9b8..7a0c4a177bb 100644 --- a/grype/matcher/golang/matcher.go +++ b/grype/matcher/golang/matcher.go @@ -34,7 +34,7 @@ func (m *Matcher) Type() match.MatcherType { return match.GoModuleMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { matches := make([]match.Match, 0) mainModule := "" diff --git a/grype/matcher/internal/common.go b/grype/matcher/internal/common.go index b6b5e2e223b..23f5d14ca18 100644 --- a/grype/matcher/internal/common.go +++ b/grype/matcher/internal/common.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/grype/internal/log" ) -func MatchPackageByEcosystemAndCPEs(store vulnerability.Provider, p pkg.Package, matcher match.MatcherType, includeCPEs bool) ([]match.Match, []match.IgnoredMatch, error) { +func MatchPackageByEcosystemAndCPEs(store vulnerability.Provider, p pkg.Package, matcher match.MatcherType, includeCPEs bool) ([]match.Match, []match.IgnoreFilter, error) { var matches []match.Match - var ignored []match.IgnoredMatch + var ignored []match.IgnoreFilter for _, name := range store.PackageSearchNames(p) { nameMatches, nameIgnores, err := MatchPackageByEcosystemPackageNameAndCPEs(store, p, name, matcher, includeCPEs) @@ -25,7 +25,7 @@ func MatchPackageByEcosystemAndCPEs(store vulnerability.Provider, p pkg.Package, return matches, ignored, nil } -func MatchPackageByEcosystemPackageNameAndCPEs(store vulnerability.Provider, p pkg.Package, packageName string, matcher match.MatcherType, includeCPEs bool) ([]match.Match, []match.IgnoredMatch, error) { +func MatchPackageByEcosystemPackageNameAndCPEs(store vulnerability.Provider, p pkg.Package, packageName string, matcher match.MatcherType, includeCPEs bool) ([]match.Match, []match.IgnoreFilter, error) { matches, ignored, err := MatchPackageByEcosystemPackageName(store, p, packageName, matcher) if err != nil { log.Debugf("could not match by package ecosystem (package=%+v): %v", p, err) diff --git a/grype/matcher/internal/cpe.go b/grype/matcher/internal/cpe.go index 00de88f8758..fe501102582 100644 --- a/grype/matcher/internal/cpe.go +++ b/grype/matcher/internal/cpe.go @@ -74,9 +74,10 @@ func MatchPackageByCPEs(provider vulnerability.Provider, p pkg.Package, upstream searchVersion = transformJvmVersion(searchVersion, c.Attributes.Update) } - verObj, err := version.NewVersion(searchVersion, format) - if err != nil { - return nil, fmt.Errorf("matcher failed to parse version pkg=%q ver=%q: %w", p.Name, p.Version, err) + var verObj *version.Version + var err error + if searchVersion != "" { + verObj = version.NewVersion(searchVersion, format) } // find all vulnerability records in the DB for the given CPE (not including version comparisons) @@ -95,7 +96,7 @@ func MatchPackageByCPEs(provider vulnerability.Provider, p pkg.Package, upstream // relative to the current version information from the CPE (or the package) then the given package // is vulnerable. for _, vuln := range vulns { - addNewMatch(matchesByFingerprint, vuln, p, *verObj, upstreamMatcher, c) + addNewMatch(matchesByFingerprint, vuln, p, verObj, upstreamMatcher, c) } } @@ -110,7 +111,7 @@ func transformJvmVersion(searchVersion, updateCpeField string) string { return searchVersion } -func addNewMatch(matchesByFingerprint map[match.Fingerprint]match.Match, vuln vulnerability.Vulnerability, p pkg.Package, searchVersion version.Version, upstreamMatcher match.MatcherType, searchedByCPE cpe.CPE) { +func addNewMatch(matchesByFingerprint map[match.Fingerprint]match.Match, vuln vulnerability.Vulnerability, p pkg.Package, searchVersion *version.Version, upstreamMatcher match.MatcherType, searchedByCPE cpe.CPE) { candidateMatch := match.Match{ Vulnerability: vuln, @@ -122,31 +123,36 @@ func addNewMatch(matchesByFingerprint map[match.Fingerprint]match.Match, vuln vu } candidateMatch.Details = addMatchDetails(candidateMatch.Details, - match.Detail{ - Type: match.CPEMatch, - Confidence: 0.9, // TODO: this is hard coded for now - Matcher: upstreamMatcher, - SearchedBy: match.CPEParameters{ - Namespace: vuln.Namespace, - CPEs: []string{ - searchedByCPE.Attributes.BindToFmtString(), - }, - Package: match.CPEPackageParameter{ - Name: p.Name, - Version: p.Version, - }, - }, - Found: match.CPEResult{ - VulnerabilityID: vuln.ID, - VersionConstraint: vuln.Constraint.String(), - CPEs: cpesToString(filterCPEsByVersion(searchVersion, vuln.CPEs)), - }, - }, + CPEMatchDetails(upstreamMatcher, vuln, searchedByCPE, p, searchVersion), ) matchesByFingerprint[candidateMatch.Fingerprint()] = candidateMatch } +func CPEMatchDetails(matcherType match.MatcherType, vuln vulnerability.Vulnerability, searchedByCPE cpe.CPE, p pkg.Package, searchVersion *version.Version) match.Detail { + return match.Detail{ + Type: match.CPEMatch, + Confidence: 0.9, // TODO: this is hard coded for now + Matcher: matcherType, + SearchedBy: match.CPEParameters{ + Namespace: vuln.Namespace, + CPEs: []string{ + // use .String() for proper escaping + searchedByCPE.Attributes.String(), + }, + Package: match.PackageParameter{ + Name: p.Name, + Version: p.Version, + }, + }, + Found: match.CPEResult{ + VulnerabilityID: vuln.ID, + VersionConstraint: vuln.Constraint.String(), + CPEs: cpesToString(filterCPEsByVersion(searchVersion, vuln.CPEs)), + }, + } +} + func addMatchDetails(existingDetails []match.Detail, newDetails match.Detail) []match.Detail { newFound, ok := newDetails.Found.(match.CPEResult) if !ok { @@ -186,7 +192,11 @@ func addMatchDetails(existingDetails []match.Detail, newDetails match.Detail) [] return existingDetails } -func filterCPEsByVersion(pkgVersion version.Version, allCPEs []cpe.CPE) (matchedCPEs []cpe.CPE) { +func filterCPEsByVersion(pkgVersion *version.Version, allCPEs []cpe.CPE) (matchedCPEs []cpe.CPE) { + if pkgVersion == nil { + // all CPEs are valid in the case when a version is not specified + return allCPEs + } for _, c := range allCPEs { if c.Attributes.Version == wfn.Any || c.Attributes.Version == wfn.NA { matchedCPEs = append(matchedCPEs, c) @@ -208,7 +218,7 @@ func filterCPEsByVersion(pkgVersion version.Version, allCPEs []cpe.CPE) (matched continue } - satisfied, err := constraint.Satisfied(&pkgVersion) + satisfied, err := constraint.Satisfied(pkgVersion) if err != nil || satisfied { // if we can't check for version satisfaction, don't filter out the CPE matchedCPEs = append(matchedCPEs, c) @@ -230,7 +240,8 @@ func toMatches(matchesByFingerprint map[match.Fingerprint]match.Match) (matches func cpesToString(cpes []cpe.CPE) []string { var strs = make([]string, len(cpes)) for idx, c := range cpes { - strs[idx] = c.Attributes.BindToFmtString() + // use .String() for proper escaping + strs[idx] = c.Attributes.String() } sort.Strings(strs) return strs diff --git a/grype/matcher/internal/cpe_test.go b/grype/matcher/internal/cpe_test.go index 0a2a2155c9d..c1335d5aa97 100644 --- a/grype/matcher/internal/cpe_test.go +++ b/grype/matcher/internal/cpe_test.go @@ -26,7 +26,7 @@ func newCPETestStore() vulnerability.Provider { Namespace: "nvd:cpe", }, PackageName: "activerecord", - Constraint: version.MustGetConstraint("< 3.7.6", version.SemanticFormat), + Constraint: version.MustGetConstraint("< 3.7.6", version.GemFormat), CPEs: []cpe.CPE{cpe.Must("cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:rails:*:*", "")}, }, { @@ -35,7 +35,7 @@ func newCPETestStore() vulnerability.Provider { Namespace: "nvd:cpe", }, PackageName: "activerecord", - Constraint: version.MustGetConstraint("< 3.7.4", version.SemanticFormat), + Constraint: version.MustGetConstraint("< 3.7.4", version.GemFormat), CPEs: []cpe.CPE{cpe.Must("cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:ruby:*:*", "")}, }, { @@ -148,14 +148,14 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ Namespace: "nvd:cpe", CPEs: []string{"cpe:2.3:*:activerecord:activerecord:3.7.5:rando4:*:re:*:rails:*:*"}, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, }, Found: match.CPEResult{ CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:rails:*:*"}, - VersionConstraint: "< 3.7.6 (semver)", + VersionConstraint: "< 3.7.6 (gem)", VulnerabilityID: "CVE-2017-fake-1", }, Matcher: matcher, @@ -199,14 +199,14 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ Namespace: "nvd:cpe", CPEs: []string{"cpe:2.3:*:activerecord:activerecord:3.7.5:rando4:*:re:*:rails:*:*"}, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, }, Found: match.CPEResult{ CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:rails:*:*"}, - VersionConstraint: "< 3.7.6 (semver)", + VersionConstraint: "< 3.7.6 (gem)", VulnerabilityID: "CVE-2017-fake-1", }, Matcher: matcher, @@ -216,17 +216,147 @@ func TestFindMatchesByPackageCPE(t *testing.T) { }, }, { - name: "suppress matching when missing version", + name: "return all possible matches when missing version", p: pkg.Package{ CPEs: []cpe.CPE{ - cpe.Must("cpe:2.3:*:activerecord:activerecord:unknown:rando1:*:ra:*:ruby:*:*", ""), - cpe.Must("cpe:2.3:*:activerecord:activerecord:unknown:rando4:*:re:*:rails:*:*", ""), + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*", ""), + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", ""), }, Name: "activerecord", Version: "", Language: syftPkg.Ruby, Type: syftPkg.GemPkg, }, + expected: []match.Match{ + { + + Vulnerability: vulnerability.Vulnerability{ + Reference: vulnerability.Reference{ID: "CVE-2017-fake-1"}, + }, + Package: pkg.Package{ + CPEs: []cpe.CPE{ + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*", ""), + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", ""), + }, + Name: "activerecord", + Version: "", // important! + Language: syftPkg.Ruby, + Type: syftPkg.GemPkg, + }, + + Details: []match.Detail{ + { + Type: match.CPEMatch, + Confidence: 0.9, + SearchedBy: match.CPEParameters{ + CPEs: []string{ + "cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", //important! + }, + Namespace: "nvd:cpe", + Package: match.PackageParameter{ + Name: "activerecord", + Version: "", // important! + }, + }, + Found: match.CPEResult{ + CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:rails:*:*"}, + VersionConstraint: "< 3.7.6 (gem)", + VulnerabilityID: "CVE-2017-fake-1", + }, + Matcher: matcher, + }, + }, + }, + { + + Vulnerability: vulnerability.Vulnerability{ + Reference: vulnerability.Reference{ID: "CVE-2017-fake-2"}, + }, + Package: pkg.Package{ + CPEs: []cpe.CPE{ + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*", ""), + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", ""), + }, + Name: "activerecord", + Version: "", // important! + Language: syftPkg.Ruby, + Type: syftPkg.GemPkg, + }, + + Details: []match.Detail{ + { + Type: match.CPEMatch, + Confidence: 0.9, + SearchedBy: match.CPEParameters{ + CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*"}, //important! + Namespace: "nvd:cpe", + Package: match.PackageParameter{ + Name: "activerecord", + Version: "", // important! + }, + }, + Found: match.CPEResult{ + CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:ruby:*:*"}, + VersionConstraint: "< 3.7.4 (gem)", + VulnerabilityID: "CVE-2017-fake-2", + }, + Matcher: matcher, + }, + }, + }, + { + + Vulnerability: vulnerability.Vulnerability{ + Reference: vulnerability.Reference{ID: "CVE-2017-fake-3"}, + }, + Package: pkg.Package{ + CPEs: []cpe.CPE{ + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*", ""), + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", ""), + }, + Name: "activerecord", + Version: "", // important! + Language: syftPkg.Ruby, + Type: syftPkg.GemPkg, + }, + Details: []match.Detail{ + { + Type: match.CPEMatch, + Confidence: 0.9, + SearchedBy: match.CPEParameters{ + CPEs: []string{ + "cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*", //important! + "cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", //important! + }, + Namespace: "nvd:cpe", + Package: match.PackageParameter{ + Name: "activerecord", + Version: "", // important! + }, + }, + Found: match.CPEResult{ + CPEs: []string{"cpe:2.3:*:activerecord:activerecord:4.0.1:*:*:*:*:*:*:*"}, + VersionConstraint: "= 4.0.1 (gem)", + VulnerabilityID: "CVE-2017-fake-3", + }, + Matcher: matcher, + }, + }, + }, + }, + }, + { + name: "suppress matching when version is unknown", + p: pkg.Package{ + CPEs: []cpe.CPE{ + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando1:*:ra:*:ruby:*:*", ""), + cpe.Must("cpe:2.3:*:activerecord:activerecord:*:rando4:*:re:*:rails:*:*", ""), + }, + Name: "activerecord", + Version: "unknown", + Language: syftPkg.Ruby, + Type: syftPkg.GemPkg, + }, expected: []match.Match{}, }, { @@ -267,14 +397,14 @@ func TestFindMatchesByPackageCPE(t *testing.T) { "cpe:2.3:*:activerecord:activerecord:3.7.3:rando4:*:re:*:rails:*:*", }, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.3", }, }, Found: match.CPEResult{ CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:rails:*:*"}, - VersionConstraint: "< 3.7.6 (semver)", + VersionConstraint: "< 3.7.6 (gem)", VulnerabilityID: "CVE-2017-fake-1", }, Matcher: matcher, @@ -304,14 +434,14 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:*:activerecord:activerecord:3.7.3:rando1:*:ra:*:ruby:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.3", }, }, Found: match.CPEResult{ CPEs: []string{"cpe:2.3:*:activerecord:activerecord:*:*:*:*:*:ruby:*:*"}, - VersionConstraint: "< 3.7.4 (semver)", + VersionConstraint: "< 3.7.4 (gem)", VulnerabilityID: "CVE-2017-fake-2", }, Matcher: matcher, @@ -353,14 +483,14 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:*:*:activerecord:4.0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "4.0.1", }, }, Found: match.CPEResult{ CPEs: []string{"cpe:2.3:*:activerecord:activerecord:4.0.1:*:*:*:*:*:*:*"}, - VersionConstraint: "= 4.0.1 (semver)", + VersionConstraint: "= 4.0.1 (gem)", VulnerabilityID: "CVE-2017-fake-3", }, Matcher: matcher, @@ -413,7 +543,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:*:awesome:awesome:98SE1:rando1:*:ra:*:dunno:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "awesome", Version: "98SE1", }, @@ -463,7 +593,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:*:multiple:multiple:1.0:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "multiple", Version: "1.0", }, @@ -527,7 +657,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:*:sw:sw:0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "sw", Version: "0.1", }, @@ -583,7 +713,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:*:funfun:funfun:5.2.1:*:*:*:*:python:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "funfun", Version: "5.2.1", }, @@ -634,7 +764,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:handlebarsjs:handlebars:0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "handlebars", Version: "0.1", }, @@ -684,7 +814,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:handlebarsjs:handlebars:0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "handlebars", Version: "0.1", }, @@ -734,7 +864,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:handlebarsjs:handlebars:0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "handlebars", Version: "0.1", }, @@ -784,7 +914,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:handlebarsjs:handlebars:0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "handlebars", Version: "0.1", }, @@ -847,7 +977,7 @@ func TestFindMatchesByPackageCPE(t *testing.T) { SearchedBy: match.CPEParameters{ CPEs: []string{"cpe:2.3:a:handlebarsjs:handlebars:0.1:*:*:*:*:*:*:*"}, Namespace: "nvd:cpe", - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "handlebars", Version: "0.1", }, @@ -908,6 +1038,20 @@ func TestFilterCPEsByVersion(t *testing.T) { "cpe:2.3:*:multiple:multiple:1.0:*:*:*:*:*:*:*", }, }, + { + name: "do not filter on empty version", + version: "", // important! + vulnerabilityCPEs: []string{ + "cpe:2.3:*:multiple:multiple:*:*:*:*:*:*:*:*", + "cpe:2.3:*:multiple:multiple:1.0:*:*:*:*:*:*:*", + "cpe:2.3:*:multiple:multiple:2.0:*:*:*:*:*:*:*", + }, + expected: []string{ + "cpe:2.3:*:multiple:multiple:*:*:*:*:*:*:*:*", + "cpe:2.3:*:multiple:multiple:1.0:*:*:*:*:*:*:*", + "cpe:2.3:*:multiple:multiple:2.0:*:*:*:*:*:*:*", + }, + }, } for _, test := range tests { @@ -918,18 +1062,19 @@ func TestFilterCPEsByVersion(t *testing.T) { vulnerabilityCPEs[idx] = cpe.Must(c, "") } - versionObj, err := version.NewVersion(test.version, version.UnknownFormat) - if err != nil { - t.Fatalf("unable to get version: %+v", err) + var versionObj *version.Version + if test.version != "" { + versionObj = version.NewVersion(test.version, version.UnknownFormat) } // run the test subject... - actual := filterCPEsByVersion(*versionObj, vulnerabilityCPEs) + actual := filterCPEsByVersion(versionObj, vulnerabilityCPEs) // format CPE objects to string... actualStrs := make([]string, len(actual)) for idx, a := range actual { - actualStrs[idx] = a.Attributes.BindToFmtString() + // use .String() for proper escaping + actualStrs[idx] = a.Attributes.String() } assert.ElementsMatch(t, test.expected, actualStrs) diff --git a/grype/matcher/internal/distro.go b/grype/matcher/internal/distro.go index 1608522f9e8..e757f249e54 100644 --- a/grype/matcher/internal/distro.go +++ b/grype/matcher/internal/distro.go @@ -1,7 +1,6 @@ package internal import ( - "errors" "fmt" "strings" @@ -13,70 +12,74 @@ import ( "github.com/anchore/grype/internal/log" ) -func MatchPackageByDistro(provider vulnerability.Provider, p pkg.Package, upstreamMatcher match.MatcherType) ([]match.Match, []match.IgnoredMatch, error) { - if p.Distro == nil { +func MatchPackageByDistro(provider vulnerability.Provider, searchPkg pkg.Package, catalogPkg *pkg.Package, upstreamMatcher match.MatcherType) ([]match.Match, []match.IgnoreFilter, error) { + if searchPkg.Distro == nil { return nil, nil, nil } - if isUnknownVersion(p.Version) { - log.WithFields("package", p.Name).Trace("skipping package with unknown version") + if isUnknownVersion(searchPkg.Version) { + log.WithFields("package", searchPkg.Name).Trace("skipping package with unknown version") return nil, nil, nil } - verObj, err := version.NewVersionFromPkg(p) - if err != nil { - if errors.Is(err, version.ErrUnsupportedVersion) { - log.WithFields("error", err).Tracef("skipping package '%s@%s'", p.Name, p.Version) - return nil, nil, nil - } - return nil, nil, fmt.Errorf("matcher failed to parse version pkg=%q ver=%q: %w", p.Name, p.Version, err) - } - var matches []match.Match vulns, err := provider.FindVulnerabilities( - search.ByPackageName(p.Name), - search.ByDistro(*p.Distro), - onlyQualifiedPackages(p), - onlyVulnerableVersions(verObj), + search.ByPackageName(searchPkg.Name), + search.ByDistro(*searchPkg.Distro), + onlyQualifiedPackages(searchPkg), + onlyVulnerableVersions(version.NewVersionFromPkg(searchPkg)), ) if err != nil { - return nil, nil, fmt.Errorf("matcher failed to fetch distro=%q pkg=%q: %w", p.Distro, p.Name, err) + return nil, nil, fmt.Errorf("matcher failed to fetch distro=%q pkg=%q: %w", searchPkg.Distro, searchPkg.Name, err) } for _, vuln := range vulns { matches = append(matches, match.Match{ Vulnerability: vuln, - Package: p, - Details: []match.Detail{ - { - Type: match.ExactDirectMatch, - Matcher: upstreamMatcher, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": p.Distro.Type.String(), - "version": p.Distro.RawVersion, - }, - // why include the package information? The given package searched with may be a source package - // for another package that is installed on the system. This makes it apparent exactly what - // was used in the search. - "package": map[string]string{ - "name": p.Name, - "version": p.Version, - }, - "namespace": vuln.Namespace, - }, - Found: map[string]interface{}{ - "vulnerabilityID": vuln.ID, - "versionConstraint": vuln.Constraint.String(), - }, - Confidence: 1.0, // TODO: this is hard coded for now - }, - }, + Package: matchPackage(searchPkg, catalogPkg), + Details: distroMatchDetails(upstreamMatcher, searchPkg, catalogPkg, vuln), }) } return matches, nil, err } +func matchPackage(searchPkg pkg.Package, catalogPkg *pkg.Package) pkg.Package { + if catalogPkg != nil { + return *catalogPkg + } + return searchPkg +} + +func distroMatchDetails(upstreamMatcher match.MatcherType, searchPkg pkg.Package, catalogPkg *pkg.Package, vuln vulnerability.Vulnerability) []match.Detail { + ty := match.ExactIndirectMatch + if catalogPkg == nil { + ty = match.ExactDirectMatch + } + + return []match.Detail{ + { + Type: ty, + Matcher: upstreamMatcher, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: searchPkg.Distro.Type.String(), + Version: searchPkg.Distro.Version, + }, + Package: match.PackageParameter{ + Name: searchPkg.Name, + Version: searchPkg.Version, + }, + Namespace: vuln.Namespace, + }, + Found: match.DistroResult{ + VulnerabilityID: vuln.ID, + VersionConstraint: vuln.Constraint.String(), + }, + Confidence: 1.0, // TODO: this is hard coded for now + }, + } +} + func isUnknownVersion(v string) bool { - return v == "" || strings.ToLower(v) == "unknown" + return strings.ToLower(v) == "unknown" } diff --git a/grype/matcher/internal/distro_test.go b/grype/matcher/internal/distro_test.go index cca7f73f006..156a325ea88 100644 --- a/grype/matcher/internal/distro_test.go +++ b/grype/matcher/internal/distro_test.go @@ -51,10 +51,7 @@ func TestFindMatchesByPackageDistro(t *testing.T) { }, } - d, err := distro.New(distro.Debian, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.Debian, "8", "") p.Distro = d expected := []match.Match{ @@ -70,20 +67,20 @@ func TestFindMatchesByPackageDistro(t *testing.T) { { Type: match.ExactDirectMatch, Confidence: 1, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": "debian", - "version": "8", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "debian", + Version: "8", }, - "package": map[string]string{ - "name": "neutron", - "version": "2014.1.3-6", + Package: match.PackageParameter{ + Name: "neutron", + Version: "2014.1.3-6", }, - "namespace": "secdb:distro:debian:8", + Namespace: "secdb:distro:debian:8", }, - Found: map[string]interface{}{ - "versionConstraint": "< 2014.1.5-6 (deb)", - "vulnerabilityID": "CVE-2014-fake-1", + Found: match.DistroResult{ + VersionConstraint: "< 2014.1.5-6 (deb)", + VulnerabilityID: "CVE-2014-fake-1", }, Matcher: match.PythonMatcher, }, @@ -92,14 +89,14 @@ func TestFindMatchesByPackageDistro(t *testing.T) { } store := newMockProviderByDistro() - actual, ignored, err := MatchPackageByDistro(store, p, match.PythonMatcher) + actual, ignored, err := MatchPackageByDistro(store, p, nil, match.PythonMatcher) require.NoError(t, err) require.Empty(t, ignored) assertMatchesUsingIDsForVulnerabilities(t, expected, actual) // prove we do not search for unknown versions p.Version = "unknown" - actual, ignored, err = MatchPackageByDistro(store, p, match.PythonMatcher) + actual, ignored, err = MatchPackageByDistro(store, p, nil, match.PythonMatcher) require.NoError(t, err) require.Empty(t, ignored) assert.Empty(t, actual) @@ -118,10 +115,7 @@ func TestFindMatchesByPackageDistroSles(t *testing.T) { }, } - d, err := distro.New(distro.SLES, "12.5", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.SLES, "12.5", "") p.Distro = d expected := []match.Match{ @@ -137,20 +131,20 @@ func TestFindMatchesByPackageDistroSles(t *testing.T) { { Type: match.ExactDirectMatch, Confidence: 1, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": "sles", - "version": "12.5", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "sles", + Version: "12.5", }, - "package": map[string]string{ - "name": "sles_test_package", - "version": "2014.1.3-6", + Package: match.PackageParameter{ + Name: "sles_test_package", + Version: "2014.1.3-6", }, - "namespace": "secdb:distro:sles:12.5", + Namespace: "secdb:distro:sles:12.5", }, - Found: map[string]interface{}{ - "versionConstraint": "< 2014.1.5-6 (rpm)", - "vulnerabilityID": "CVE-2014-fake-4", + Found: match.DistroResult{ + VersionConstraint: "< 2014.1.5-6 (rpm)", + VulnerabilityID: "CVE-2014-fake-4", }, Matcher: match.PythonMatcher, }, @@ -159,7 +153,7 @@ func TestFindMatchesByPackageDistroSles(t *testing.T) { } store := newMockProviderByDistro() - actual, ignored, err := MatchPackageByDistro(store, p, match.PythonMatcher) + actual, ignored, err := MatchPackageByDistro(store, p, nil, match.PythonMatcher) assert.NoError(t, err) require.Empty(t, ignored) assertMatchesUsingIDsForVulnerabilities(t, expected, actual) diff --git a/grype/matcher/internal/language.go b/grype/matcher/internal/language.go index 645d87d4660..2580ba5972c 100644 --- a/grype/matcher/internal/language.go +++ b/grype/matcher/internal/language.go @@ -1,7 +1,6 @@ package internal import ( - "errors" "fmt" "github.com/anchore/grype/grype/match" @@ -12,9 +11,9 @@ import ( "github.com/anchore/grype/internal/log" ) -func MatchPackageByLanguage(store vulnerability.Provider, p pkg.Package, matcherType match.MatcherType) ([]match.Match, []match.IgnoredMatch, error) { +func MatchPackageByLanguage(store vulnerability.Provider, p pkg.Package, matcherType match.MatcherType) ([]match.Match, []match.IgnoreFilter, error) { var matches []match.Match - var ignored []match.IgnoredMatch + var ignored []match.IgnoreFilter for _, name := range store.PackageSearchNames(p) { nameMatches, nameIgnores, err := MatchPackageByEcosystemPackageName(store, p, name, matcherType) @@ -28,27 +27,18 @@ func MatchPackageByLanguage(store vulnerability.Provider, p pkg.Package, matcher return matches, ignored, nil } -func MatchPackageByEcosystemPackageName(provider vulnerability.Provider, p pkg.Package, packageName string, matcherType match.MatcherType) ([]match.Match, []match.IgnoredMatch, error) { +func MatchPackageByEcosystemPackageName(provider vulnerability.Provider, p pkg.Package, packageName string, matcherType match.MatcherType) ([]match.Match, []match.IgnoreFilter, error) { if isUnknownVersion(p.Version) { log.WithFields("package", p.Name).Trace("skipping package with unknown version") return nil, nil, nil } - verObj, err := version.NewVersionFromPkg(p) - if err != nil { - if errors.Is(err, version.ErrUnsupportedVersion) { - log.WithFields("error", err).Tracef("skipping package '%s@%s'", p.Name, p.Version) - return nil, nil, nil - } - return nil, nil, fmt.Errorf("matcher failed to parse version pkg=%q ver=%q: %w", p.Name, p.Version, err) - } - var matches []match.Match vulns, err := provider.FindVulnerabilities( search.ByEcosystem(p.Language, p.Type), search.ByPackageName(packageName), onlyQualifiedPackages(p), - onlyVulnerableVersions(verObj), + onlyVulnerableVersions(version.NewVersionFromPkg(p)), onlyNonWithdrawnVulnerabilities(), ) if err != nil { @@ -64,17 +54,17 @@ func MatchPackageByEcosystemPackageName(provider vulnerability.Provider, p pkg.P Type: match.ExactDirectMatch, Confidence: 1.0, // TODO: this is hard coded for now Matcher: matcherType, - SearchedBy: map[string]interface{}{ - "language": string(p.Language), - "namespace": vuln.Namespace, - "package": map[string]string{ - "name": p.Name, - "version": p.Version, + SearchedBy: match.EcosystemParameters{ + Language: string(p.Language), + Namespace: vuln.Namespace, + Package: match.PackageParameter{ + Name: p.Name, + Version: p.Version, }, }, - Found: map[string]interface{}{ - "vulnerabilityID": vuln.ID, - "versionConstraint": vuln.Constraint.String(), + Found: match.EcosystemResult{ + VulnerabilityID: vuln.ID, + VersionConstraint: vuln.Constraint.String(), }, }, }, diff --git a/grype/matcher/internal/language_test.go b/grype/matcher/internal/language_test.go index 307ea73db1d..52a239ea137 100644 --- a/grype/matcher/internal/language_test.go +++ b/grype/matcher/internal/language_test.go @@ -24,7 +24,7 @@ func newMockProviderByLanguage() vulnerability.Provider { }, PackageName: "activerecord", // make sure we find it with semVer constraint - Constraint: version.MustGetConstraint("< 3.7.6", version.SemanticFormat), + Constraint: version.MustGetConstraint("< 3.7.6", version.GemFormat), }, { Reference: vulnerability.Reference{ @@ -49,7 +49,7 @@ func newMockProviderByLanguage() vulnerability.Provider { Namespace: "github:language:ruby", }, PackageName: "nokogiri", - Constraint: version.MustGetConstraint("< 1.7.4", version.SemanticFormat), + Constraint: version.MustGetConstraint("< 1.7.4", version.GemFormat), }, }...) } @@ -67,14 +67,14 @@ func expectedMatch(p pkg.Package, constraint string) []match.Match { { Type: match.ExactDirectMatch, Confidence: 1, - SearchedBy: map[string]interface{}{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{"name": p.Name, "version": p.Version}, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{Name: p.Name, Version: p.Version}, }, - Found: map[string]interface{}{ - "versionConstraint": constraint, - "vulnerabilityID": "CVE-2017-fake-1", + Found: match.EcosystemResult{ + VulnerabilityID: "CVE-2017-fake-1", + VersionConstraint: constraint, }, Matcher: match.RubyGemMatcher, }, @@ -90,7 +90,7 @@ func TestFindMatchesByPackageLanguage(t *testing.T) { assertEmpty bool }{ { - constraint: "< 3.7.6 (semver)", + constraint: "< 3.7.6 (gem)", p: pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "activerecord", @@ -100,7 +100,7 @@ func TestFindMatchesByPackageLanguage(t *testing.T) { }, }, { - constraint: "< 1.7.6 (semver)", + constraint: "< 1.7.6 (gem)", p: pkg.Package{ ID: pkg.ID(uuid.NewString()), Name: "nokogiri", diff --git a/grype/matcher/internal/only_vulnerable_targets.go b/grype/matcher/internal/only_vulnerable_targets.go index e1e9a65d38a..18279730422 100644 --- a/grype/matcher/internal/only_vulnerable_targets.go +++ b/grype/matcher/internal/only_vulnerable_targets.go @@ -154,21 +154,27 @@ func matchesAttribute(a1, a2 string) bool { } func hasIntersectingTargetSoftware(set1, set2 *strset.Set) bool { - set1Pkg := pkgTypesFromTargetSoftware(set1.List()) - set2Pkg := pkgTypesFromTargetSoftware(set2.List()) + set1Pkg := normalizeTargetSoftwares(set1.List()) + set2Pkg := normalizeTargetSoftwares(set2.List()) intersection := strset.Intersection(set1Pkg, set2Pkg) return !intersection.IsEmpty() } -func pkgTypesFromTargetSoftware(ts []string) *strset.Set { - pkgTypes := strset.New() +func normalizeTargetSoftwares(ts []string) *strset.Set { + normalizedTargetSWs := strset.New() for _, ts := range ts { - pt := internal.CPETargetSoftwareToPackageType(ts) + // Attempt to normalize target sw to package type, e.g. node and nodejs should match + pt := string(internal.CPETargetSoftwareToPackageType(ts)) + if pt == "" && ts != "*" && ts != "?" && ts != "-" { + // normalizing failed; preserve raw cpe target sw string as the type + // unless it is wildcard + pt = strings.ToLower(ts) + } if pt != "" { - pkgTypes.Add(string(pt)) + normalizedTargetSWs.Add(pt) } } - return pkgTypes + return normalizedTargetSWs } func packageElements(p pkg.Package, ts []string) string { diff --git a/grype/matcher/internal/only_vulnerable_targets_test.go b/grype/matcher/internal/only_vulnerable_targets_test.go index f33efb2b774..19fe4957bf5 100644 --- a/grype/matcher/internal/only_vulnerable_targets_test.go +++ b/grype/matcher/internal/only_vulnerable_targets_test.go @@ -280,85 +280,80 @@ func TestPkgTypesFromTargetSoftware(t *testing.T) { tests := []struct { name string input []string - expected []syftPkg.Type + expected []string }{ { name: "empty input", input: []string{}, - expected: []syftPkg.Type{}, + expected: []string{}, }, { name: "single input with known mapping", input: []string{"node.js"}, - expected: []syftPkg.Type{syftPkg.NpmPkg}, + expected: []string{string(syftPkg.NpmPkg)}, }, { name: "multiple inputs with known mappings", input: []string{"python", "ruby", "java"}, - expected: []syftPkg.Type{syftPkg.PythonPkg, syftPkg.GemPkg, syftPkg.JavaPkg}, + expected: []string{string(syftPkg.PythonPkg), string(syftPkg.GemPkg), string(syftPkg.JavaPkg)}, }, { name: "case insensitive input", input: []string{"Python", "RUBY", "Java"}, - expected: []syftPkg.Type{syftPkg.PythonPkg, syftPkg.GemPkg, syftPkg.JavaPkg}, + expected: []string{string(syftPkg.PythonPkg), string(syftPkg.GemPkg), string(syftPkg.JavaPkg)}, }, { name: "mixed known and unknown inputs", input: []string{"python", "unknown", "ruby"}, - expected: []syftPkg.Type{syftPkg.PythonPkg, syftPkg.GemPkg}, + expected: []string{string(syftPkg.PythonPkg), "unknown", string(syftPkg.GemPkg)}, }, { name: "all unknown inputs", input: []string{"unknown1", "unknown2", "unknown3"}, - expected: []syftPkg.Type{}, + expected: []string{"unknown1", "unknown2", "unknown3"}, }, { name: "inputs with spaces and hyphens", input: []string{"redhat-enterprise-linux", "jenkins ci"}, - expected: []syftPkg.Type{syftPkg.RpmPkg, syftPkg.JavaPkg}, + expected: []string{string(syftPkg.RpmPkg), string(syftPkg.JavaPkg)}, }, { name: "aliases for the same package type", input: []string{"nodejs", "npm", "javascript"}, - expected: []syftPkg.Type{syftPkg.NpmPkg}, + expected: []string{string(syftPkg.NpmPkg)}, }, { name: "wildcards and special characters should be ignored", - input: []string{"*", "?", ""}, - expected: []syftPkg.Type{}, + input: []string{"*", "?", "-", ""}, + expected: []string{}, }, { name: "Linux distributions", input: []string{"alpine", "debian", "redhat", "gentoo"}, - expected: []syftPkg.Type{syftPkg.ApkPkg, syftPkg.DebPkg, syftPkg.RpmPkg, syftPkg.PortagePkg}, + expected: []string{string(syftPkg.ApkPkg), string(syftPkg.DebPkg), string(syftPkg.RpmPkg), string(syftPkg.PortagePkg)}, }, { name: ".NET ecosystem", input: []string{".net", "asp.net", "c#"}, - expected: []syftPkg.Type{syftPkg.DotnetPkg}, + expected: []string{string(syftPkg.DotnetPkg)}, }, { name: "JavaScript ecosystem", input: []string{"javascript", "node.js", "jquery"}, - expected: []syftPkg.Type{syftPkg.NpmPkg}, + expected: []string{string(syftPkg.NpmPkg)}, }, { name: "Java ecosystem", input: []string{"java", "maven", "kafka", "log4j"}, - expected: []syftPkg.Type{syftPkg.JavaPkg}, + expected: []string{string(syftPkg.JavaPkg)}, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - actual := pkgTypesFromTargetSoftware(test.input) + actual := normalizeTargetSoftwares(test.input) - var actualTypes []syftPkg.Type - for _, typeStr := range actual.List() { - actualTypes = append(actualTypes, syftPkg.Type(typeStr)) - } - - assert.ElementsMatch(t, test.expected, actualTypes, "package types should match") + assert.ElementsMatch(t, test.expected, actual.List(), "package types should match") }) } } diff --git a/grype/matcher/internal/only_vulnerable_versions.go b/grype/matcher/internal/only_vulnerable_versions.go index 81b2315cdfd..bd480411aaa 100644 --- a/grype/matcher/internal/only_vulnerable_versions.go +++ b/grype/matcher/internal/only_vulnerable_versions.go @@ -8,7 +8,7 @@ import ( // onlyVulnerableVersion returns a criteria object that tests affected vulnerability ranges against the provided version func onlyVulnerableVersions(v *version.Version) vulnerability.Criteria { - if v == nil { + if v == nil || v.Raw == "" { // if no version is provided, match everything return search.ByFunc(func(_ vulnerability.Vulnerability) (bool, string, error) { return true, "", nil diff --git a/grype/matcher/java/matcher.go b/grype/matcher/java/matcher.go index 23b5a1a5d81..11567ad6767 100644 --- a/grype/matcher/java/matcher.go +++ b/grype/matcher/java/matcher.go @@ -50,7 +50,7 @@ func (m *Matcher) Type() match.MatcherType { return match.JavaMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { var matches []match.Match if m.cfg.SearchMavenUpstream { @@ -59,8 +59,7 @@ func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Ma if strings.Contains(err.Error(), "no artifact found") { log.Debugf("no upstream maven artifact found for %s", p.Name) } else { - log.WithFields("package", p.Name, "error", err).Error("failed to resolve package details with maven") - return nil, nil, fmt.Errorf("resolving package details with maven: %w", err) + return nil, nil, match.NewFatalError(match.JavaMatcher, fmt.Errorf("resolving details for package %q with maven: %w", p.Name, err)) } } else { matches = append(matches, upstreamMatches...) @@ -82,23 +81,53 @@ func (m *Matcher) matchUpstreamMavenPackages(store vulnerability.Provider, p pkg ctx := context.Background() - if metadata, ok := p.Metadata.(pkg.JavaMetadata); ok { - for _, digest := range metadata.ArchiveDigests { - if digest.Algorithm == "sha1" { - indirectPackage, err := m.GetMavenPackageBySha(ctx, digest.Value) - if err != nil { - return nil, err - } - indirectMatches, _, err := internal.MatchPackageByLanguage(store, *indirectPackage, m.Type()) - if err != nil { - return nil, err - } - matches = append(matches, indirectMatches...) + // Check if we need to search Maven by SHA + searchMaven, digests := m.shouldSearchMavenBySha(p) + if searchMaven { + // If the artifact and group ID exist are missing, attempt Maven lookup using SHA-1 + for _, digest := range digests { + log.Debugf("searching maven, POM data missing for %s", p.Name) + indirectPackage, err := m.GetMavenPackageBySha(ctx, digest) + if err != nil { + return nil, err + } + indirectMatches, _, err := internal.MatchPackageByLanguage(store, *indirectPackage, m.Type()) + if err != nil { + return nil, err } + matches = append(matches, indirectMatches...) } + } else { + log.Debugf("skipping maven search, POM data present for %s", p.Name) + indirectMatches, _, err := internal.MatchPackageByLanguage(store, p, m.Type()) + if err != nil { + return nil, err + } + matches = append(matches, indirectMatches...) } match.ConvertToIndirectMatches(matches, p) return matches, nil } + +func (m *Matcher) shouldSearchMavenBySha(p pkg.Package) (bool, []string) { + digests := []string{} + + if metadata, ok := p.Metadata.(pkg.JavaMetadata); ok { + // if either the PomArtifactID or PomGroupID is missing, we need to search Maven + if metadata.PomArtifactID == "" || metadata.PomGroupID == "" { + for _, digest := range metadata.ArchiveDigests { + if digest.Algorithm == "sha1" && digest.Value != "" { + digests = append(digests, digest.Value) + } + } + // if we need to search Maven but no valid SHA-1 digests exist, skip search + if len(digests) == 0 { + return false, digests + } + } + } + + return len(digests) > 0, digests +} diff --git a/grype/matcher/java/matcher_mocks_test.go b/grype/matcher/java/matcher_mocks_test.go index f9b703e0a10..c781c7618ae 100644 --- a/grype/matcher/java/matcher_mocks_test.go +++ b/grype/matcher/java/matcher_mocks_test.go @@ -23,6 +23,18 @@ func newMockProvider() vulnerability.Provider { Constraint: version.MustGetConstraint(">=5.0.1,<5.1.7", version.UnknownFormat), Reference: vulnerability.Reference{ID: "CVE-2013-fake-3", Namespace: "github:language:" + syftPkg.Java.String()}, }, + // Package name is expected to resolve to : if pom groupID and artifactID is present + // See JavaResolver.Names: https://github.com/anchore/grype/blob/402067e958a4fa9d20384752351d6c54b0436ba1/grype/db/v6/name/java.go#L19 + { + PackageName: "org.springframework:spring-webmvc", + Constraint: version.MustGetConstraint(">=5.0.0,<5.1.7", version.UnknownFormat), + Reference: vulnerability.Reference{ID: "CVE-2014-fake-2", Namespace: "github:language:" + syftPkg.Java.String()}, + }, + { + PackageName: "org.springframework:spring-webmvc", + Constraint: version.MustGetConstraint(">=5.0.1,<5.1.7", version.UnknownFormat), + Reference: vulnerability.Reference{ID: "CVE-2013-fake-3", Namespace: "github:language:" + syftPkg.Java.String()}, + }, // unexpected... { PackageName: "org.springframework.spring-webmvc", diff --git a/grype/matcher/java/matcher_test.go b/grype/matcher/java/matcher_test.go index ad9531de45c..83e2a4b4eb3 100644 --- a/grype/matcher/java/matcher_test.go +++ b/grype/matcher/java/matcher_test.go @@ -26,57 +26,266 @@ func TestMatcherJava_matchUpstreamMavenPackage(t *testing.T) { } store := newMockProvider() - p := pkg.Package{ - ID: pkg.ID(uuid.NewString()), - Name: "org.springframework.spring-webmvc", - Version: "5.1.5.RELEASE", - Language: syftPkg.Java, - Type: syftPkg.JavaPkg, - Metadata: pkg.JavaMetadata{ - ArchiveDigests: []pkg.Digest{ + // Define test cases + testCases := []struct { + testname string + testExpectRateLimit bool + packages []pkg.Package + }{ + { + testname: "do not search maven - metadata present", + testExpectRateLimit: false, + packages: []pkg.Package{ { - Algorithm: "sha1", - Value: "236e3bfdbdc6c86629237a74f0f11414adb4e211", + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "spring-webmvc", + PomGroupID: "org.springframework", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "236e3bfdbdc6c86629237a74f0f11414adb4e211", + }, + }, + }, + }, + }, + }, + { + testname: "search maven - missing metadata", + testExpectRateLimit: false, + packages: []pkg.Package{ + { + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "", + PomGroupID: "", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "236e3bfdbdc6c86629237a74f0f11414adb4e211", + }, + }, + }, + }, + }, + }, + { + testname: "search maven - missing sha1 error", + testExpectRateLimit: false, + packages: []pkg.Package{ + { + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "", + PomGroupID: "", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "", + }, + }, + }, }, }, }, } t.Run("matching from maven search results", func(t *testing.T) { - matcher := newMatcher(mockMavenSearcher{ - pkg: p, - }) - actual, _ := matcher.matchUpstreamMavenPackages(store, p) - - assert.Len(t, actual, 2, "unexpected matches count") - - foundCVEs := stringutil.NewStringSet() - for _, v := range actual { - foundCVEs.Add(v.Vulnerability.ID) - - require.NotEmpty(t, v.Details) - for _, d := range v.Details { - assert.Equal(t, match.ExactIndirectMatch, d.Type, "indirect match not indicated") - assert.Equal(t, matcher.Type(), d.Matcher, "failed to capture matcher type") - } - assert.Equal(t, p.Name, v.Package.Name, "failed to capture original package name") - } + for _, p := range testCases { + // Adding test isolation + t.Run(p.testname, func(t *testing.T) { + matcher := newMatcher(mockMavenSearcher{ + pkg: p.packages[0], + }) + actual, _ := matcher.matchUpstreamMavenPackages(store, p.packages[0]) - for _, id := range []string{"CVE-2014-fake-2", "CVE-2013-fake-3"} { - if !foundCVEs.Contains(id) { - t.Errorf("missing discovered CVE: %s", id) - } - } - if t.Failed() { - t.Logf("discovered CVES: %+v", foundCVEs) + assert.Len(t, actual, 2, "unexpected matches count") + + foundCVEs := stringutil.NewStringSet() + for _, v := range actual { + foundCVEs.Add(v.Vulnerability.ID) + + require.NotEmpty(t, v.Details) + for _, d := range v.Details { + assert.Equal(t, match.ExactIndirectMatch, d.Type, "indirect match not indicated") + assert.Equal(t, matcher.Type(), d.Matcher, "failed to capture matcher type") + } + assert.Equal(t, p.packages[0].Name, v.Package.Name, "failed to capture original package name") + } + + for _, id := range []string{"CVE-2014-fake-2", "CVE-2013-fake-3"} { + if !foundCVEs.Contains(id) { + t.Errorf("missing discovered CVE: %s", id) + } + } + if t.Failed() { + t.Logf("discovered CVES: %+v", foundCVEs) + } + + }) } }) t.Run("handles maven rate limiting", func(t *testing.T) { - matcher := newMatcher(mockMavenSearcher{simulateRateLimiting: true}) + for _, p := range testCases { + // Adding test isolation + t.Run(p.testname, func(t *testing.T) { + matcher := newMatcher(mockMavenSearcher{simulateRateLimiting: true}) - _, err := matcher.matchUpstreamMavenPackages(store, p) + _, err := matcher.matchUpstreamMavenPackages(store, p.packages[0]) - assert.Errorf(t, err, "should have gotten an error from the rate limiting") + if p.testExpectRateLimit { + assert.Errorf(t, err, "should have gotten an error from the rate limiting") + } + }) + } + }) +} + +func TestMatcherJava_shouldSearchMavenBySha(t *testing.T) { + newMatcher := func(searcher MavenSearcher) *Matcher { + return &Matcher{ + cfg: MatcherConfig{ + ExternalSearchConfig: ExternalSearchConfig{ + SearchMavenUpstream: true, + }, + }, + MavenSearcher: searcher, + } + } + + // Define test cases + testCases := []struct { + testname string + expectedShouldSearchMaven bool + testExpectedError bool + packages []pkg.Package + }{ + { + testname: "do not search maven - metadata present", + expectedShouldSearchMaven: false, + testExpectedError: false, + packages: []pkg.Package{ + { + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "spring-webmvc", + PomGroupID: "org.springframework", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "236e3bfdbdc6c86629237a74f0f11414adb4e211", + }, + }, + }, + }, + }, + }, + { + testname: "search maven - missing metadata", + expectedShouldSearchMaven: true, + testExpectedError: false, + packages: []pkg.Package{ + { + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "", + PomGroupID: "", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "236e3bfdbdc6c86629237a74f0f11414adb4e211", + }, + }, + }, + }, + }, + }, + { + testname: "search maven - missing artifactId", + expectedShouldSearchMaven: true, + packages: []pkg.Package{ + { + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "", + PomGroupID: "org.springframework", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "236e3bfdbdc6c86629237a74f0f11414adb4e211", + }, + }, + }, + }, + }, + }, + { + testname: "do not search maven - missing sha1", + expectedShouldSearchMaven: false, + packages: []pkg.Package{ + { + ID: pkg.ID(uuid.NewString()), + Name: "org.springframework.spring-webmvc", + Version: "5.1.5.RELEASE", + Language: syftPkg.Java, + Type: syftPkg.JavaPkg, + Metadata: pkg.JavaMetadata{ + PomArtifactID: "", + PomGroupID: "", + ArchiveDigests: []pkg.Digest{ + { + Algorithm: "sha1", + Value: "", + }, + }, + }, + }, + }, + }, + } + + t.Run("matching from Maven search results", func(t *testing.T) { + for _, p := range testCases { + // Adding test isolation + t.Run(p.testname, func(t *testing.T) { + matcher := newMatcher(mockMavenSearcher{ + pkg: p.packages[0], + }) + actual, digests := matcher.shouldSearchMavenBySha(p.packages[0]) + + assert.Equal(t, p.expectedShouldSearchMaven, actual, "unexpected decision to search Maven") + + if actual { + assert.NotEmpty(t, digests, "sha digests should not be empty when search is expected") + } + + }) + } }) } diff --git a/grype/matcher/javascript/matcher.go b/grype/matcher/javascript/matcher.go index c0057ccbdc9..cb0d03e46a3 100644 --- a/grype/matcher/javascript/matcher.go +++ b/grype/matcher/javascript/matcher.go @@ -30,6 +30,6 @@ func (m *Matcher) Type() match.MatcherType { return match.JavascriptMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { return internal.MatchPackageByEcosystemAndCPEs(store, p, m.Type(), m.cfg.UseCPEs) } diff --git a/grype/matcher/matchers.go b/grype/matcher/matchers.go index 978f31ac5e4..d95f56ff409 100644 --- a/grype/matcher/matchers.go +++ b/grype/matcher/matchers.go @@ -3,6 +3,7 @@ package matcher import ( "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/matcher/apk" + "github.com/anchore/grype/grype/matcher/bitnami" "github.com/anchore/grype/grype/matcher/dotnet" "github.com/anchore/grype/grype/matcher/dpkg" "github.com/anchore/grype/grype/matcher/golang" @@ -44,5 +45,6 @@ func NewDefaultMatchers(mc Config) []match.Matcher { &portage.Matcher{}, rust.NewRustMatcher(mc.Rust), stock.NewStockMatcher(mc.Stock), + &bitnami.Matcher{}, } } diff --git a/grype/matcher/mock/matcher.go b/grype/matcher/mock/matcher.go new file mode 100644 index 00000000000..235fa47eb43 --- /dev/null +++ b/grype/matcher/mock/matcher.go @@ -0,0 +1,45 @@ +package mock + +import ( + "errors" + + "github.com/anchore/grype/grype/match" + "github.com/anchore/grype/grype/pkg" + "github.com/anchore/grype/grype/vulnerability" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +// MatchFunc is a function that takes a vulnerability provider and a package, +// and returns matches, ignored matches, and an error. +type MatchFunc func(vp vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) + +// Matcher is a mock implementation of the match.Matcher interface. This is +// intended for testing purposes only. +type Matcher struct { + typ syftPkg.Type + matchFunc MatchFunc +} + +// New creates a new mock Matcher with the given type and match function. +func New(typ syftPkg.Type, matchFunc MatchFunc) *Matcher { + return &Matcher{ + typ: typ, + matchFunc: matchFunc, + } +} + +func (m Matcher) PackageTypes() []syftPkg.Type { + return []syftPkg.Type{m.typ} +} + +func (m Matcher) Type() match.MatcherType { + return "MOCK" +} + +func (m Matcher) Match(vp vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + if m.matchFunc != nil { + return m.matchFunc(vp, p) + } + + return nil, nil, errors.New("no match function provided") +} diff --git a/grype/matcher/msrc/matcher.go b/grype/matcher/msrc/matcher.go index edb038b2619..32fe9aa08dd 100644 --- a/grype/matcher/msrc/matcher.go +++ b/grype/matcher/msrc/matcher.go @@ -22,9 +22,9 @@ func (m *Matcher) Type() match.MatcherType { return match.MsrcMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { // find KB matches for the MSFT version given in the package and version. - // The "distro" holds the information about the Windows version, and its + // The "package" holds the information about the Windows version, and its // patch (KB) - return internal.MatchPackageByDistro(store, p, m.Type()) + return internal.MatchPackageByEcosystemPackageName(store, p, p.Name, m.Type()) } diff --git a/grype/matcher/msrc/matcher_test.go b/grype/matcher/msrc/matcher_test.go deleted file mode 100644 index 0696dc807b2..00000000000 --- a/grype/matcher/msrc/matcher_test.go +++ /dev/null @@ -1,117 +0,0 @@ -package msrc - -import ( - "fmt" - "testing" - - "github.com/google/uuid" - "github.com/stretchr/testify/require" - - "github.com/anchore/grype/grype/distro" - "github.com/anchore/grype/grype/pkg" - "github.com/anchore/grype/grype/version" - "github.com/anchore/grype/grype/vulnerability" - "github.com/anchore/grype/grype/vulnerability/mock" - syftPkg "github.com/anchore/syft/syft/pkg" -) - -func TestMatches(t *testing.T) { - d, err := distro.New(distro.Windows, "10816", "Windows Server 2016") - require.NoError(t, err) - - // TODO: it would be ideal to test against something that constructs the namespace based on grype-db - // and not break the adaption of grype-db - msrcNamespace := fmt.Sprintf("msrc:distro:windows:%s", d.RawVersion) - - vp := mock.VulnerabilityProvider([]vulnerability.Vulnerability{ - { - Reference: vulnerability.Reference{ - ID: "CVE-2016-3333", - Namespace: msrcNamespace, - }, - PackageName: d.RawVersion, - Constraint: version.MustGetConstraint("3200970 || 878787 || base", version.KBFormat), - }, - { - Reference: vulnerability.Reference{ - // Does not match, version constraints do not apply - ID: "CVE-2020-made-up", - Namespace: msrcNamespace, - }, - PackageName: d.RawVersion, - Constraint: version.MustGetConstraint("778786 || 878787 || base", version.KBFormat), - }, - // Does not match the product ID - { - Reference: vulnerability.Reference{ - ID: "CVE-2020-also-made-up", - Namespace: msrcNamespace, - }, - PackageName: "something-else", - Constraint: version.MustGetConstraint("3200970 || 878787 || base", version.KBFormat), - }, - }...) - - tests := []struct { - name string - pkg pkg.Package - expectedVulnIDs []string - }{ - { - name: "direct KB match", - pkg: pkg.Package{ - ID: pkg.ID(uuid.NewString()), - Name: d.RawVersion, - Version: "3200970", - Type: syftPkg.KbPkg, - Distro: d, - }, - expectedVulnIDs: []string{ - "CVE-2016-3333", - }, - }, - { - name: "multiple direct KB match", - pkg: pkg.Package{ - ID: pkg.ID(uuid.NewString()), - Name: d.RawVersion, - Version: "878787", - Type: syftPkg.KbPkg, - Distro: d, - }, - expectedVulnIDs: []string{ - "CVE-2016-3333", - "CVE-2020-made-up", - }, - }, - { - name: "no KBs found", - pkg: pkg.Package{ - ID: pkg.ID(uuid.NewString()), - Name: d.RawVersion, - // this is the assumed version if no KBs are found - Version: "base", - Type: syftPkg.KbPkg, - Distro: d, - }, - expectedVulnIDs: []string{ - "CVE-2016-3333", - "CVE-2020-made-up", - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - m := Matcher{} - matches, _, err := m.Match(vp, test.pkg) - require.NoError(t, err) - var actualVulnIDs []string - for _, a := range matches { - actualVulnIDs = append(actualVulnIDs, a.Vulnerability.ID) - } - require.ElementsMatch(t, test.expectedVulnIDs, actualVulnIDs) - }) - } - -} diff --git a/grype/matcher/portage/matcher.go b/grype/matcher/portage/matcher.go index 2126cd337c7..d5b1ba0b3b0 100644 --- a/grype/matcher/portage/matcher.go +++ b/grype/matcher/portage/matcher.go @@ -19,6 +19,6 @@ func (m *Matcher) Type() match.MatcherType { return match.PortageMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { - return internal.MatchPackageByDistro(store, p, m.Type()) +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + return internal.MatchPackageByDistro(store, p, nil, m.Type()) } diff --git a/grype/matcher/portage/matcher_test.go b/grype/matcher/portage/matcher_test.go index 9814d9a526b..b291c954464 100644 --- a/grype/matcher/portage/matcher_test.go +++ b/grype/matcher/portage/matcher_test.go @@ -16,10 +16,7 @@ import ( func TestMatcherPortage_Match(t *testing.T) { matcher := Matcher{} - d, err := distro.New(distro.Gentoo, "", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.Gentoo, "", "") p := pkg.Package{ ID: pkg.ID(uuid.NewString()), diff --git a/grype/matcher/python/matcher.go b/grype/matcher/python/matcher.go index 78916a52e3b..56111525318 100644 --- a/grype/matcher/python/matcher.go +++ b/grype/matcher/python/matcher.go @@ -30,6 +30,6 @@ func (m *Matcher) Type() match.MatcherType { return match.PythonMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { return internal.MatchPackageByEcosystemAndCPEs(store, p, m.Type(), m.cfg.UseCPEs) } diff --git a/grype/matcher/rpm/matcher.go b/grype/matcher/rpm/matcher.go index 59971faaa9e..6698428cb43 100644 --- a/grype/matcher/rpm/matcher.go +++ b/grype/matcher/rpm/matcher.go @@ -23,7 +23,7 @@ func (m *Matcher) Type() match.MatcherType { } //nolint:funlen -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { matches := make([]match.Match, 0) // let's match with a synthetic package that doesn't exist. We will create a new @@ -107,7 +107,7 @@ func (m *Matcher) matchUpstreamPackages(store vulnerability.Provider, p pkg.Pack var matches []match.Match for _, indirectPackage := range pkg.UpstreamPackages(p) { - indirectMatches, _, err := internal.MatchPackageByDistro(store, indirectPackage, m.Type()) + indirectMatches, _, err := internal.MatchPackageByDistro(store, indirectPackage, &p, m.Type()) if err != nil { return nil, fmt.Errorf("failed to find vulnerabilities for rpm upstream source package: %w", err) } @@ -127,7 +127,7 @@ func (m *Matcher) matchPackage(store vulnerability.Provider, p pkg.Package) ([]m addEpochIfApplicable(&p) - matches, _, err := internal.MatchPackageByDistro(store, p, m.Type()) + matches, _, err := internal.MatchPackageByDistro(store, p, nil, m.Type()) if err != nil { return nil, fmt.Errorf("failed to find vulnerabilities by dpkg source indirection: %w", err) } diff --git a/grype/matcher/rpm/matcher_test.go b/grype/matcher/rpm/matcher_test.go index 568ae6f8af6..eb89c89d9ff 100644 --- a/grype/matcher/rpm/matcher_test.go +++ b/grype/matcher/rpm/matcher_test.go @@ -42,11 +42,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } - + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("neutron-libs", "neutron", false, false) return store, d, matcher @@ -73,10 +69,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("neutron", "neutron-devel", false, false) @@ -103,10 +96,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("neutron-libs", "neutron", false, false) @@ -137,10 +127,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("perl-Errno", "perl", true, false) @@ -162,10 +149,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("perl-Errno", "doesn't-matter", false, false) @@ -186,10 +170,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("perl-Errno", "doesn't-matter", true, false) @@ -210,10 +191,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("perl-Errno", "doesn't-matter", false, false) @@ -234,10 +212,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("perl-Errno", "doesn't-matter", true, false) @@ -258,10 +233,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("maniac", "doesn't-matter", false, true) @@ -285,10 +257,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("maniac", "doesn't-matter", false, true) @@ -308,10 +277,7 @@ func TestMatcherRpm(t *testing.T) { }, setup: func() (vulnerability.Provider, *distro.Distro, Matcher) { matcher := Matcher{} - d, err := distro.New(distro.CentOS, "8", "") - if err != nil { - t.Fatal("could not create distro: ", err) - } + d := distro.New(distro.CentOS, "8", "") store := newMockProvider("maniac", "doesn't-matter", false, true) diff --git a/grype/matcher/ruby/matcher.go b/grype/matcher/ruby/matcher.go index 0fe094e511f..32ebf87d488 100644 --- a/grype/matcher/ruby/matcher.go +++ b/grype/matcher/ruby/matcher.go @@ -30,6 +30,6 @@ func (m *Matcher) Type() match.MatcherType { return match.RubyGemMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { return internal.MatchPackageByEcosystemAndCPEs(store, p, m.Type(), m.cfg.UseCPEs) } diff --git a/grype/matcher/rust/matcher.go b/grype/matcher/rust/matcher.go index 9923cb8ad0c..8691cd05e72 100644 --- a/grype/matcher/rust/matcher.go +++ b/grype/matcher/rust/matcher.go @@ -30,6 +30,6 @@ func (m *Matcher) Type() match.MatcherType { return match.RustMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { return internal.MatchPackageByEcosystemAndCPEs(store, p, m.Type(), m.cfg.UseCPEs) } diff --git a/grype/matcher/stock/matcher.go b/grype/matcher/stock/matcher.go index 26dd782f2cd..e7a16cb923a 100644 --- a/grype/matcher/stock/matcher.go +++ b/grype/matcher/stock/matcher.go @@ -30,6 +30,6 @@ func (m *Matcher) Type() match.MatcherType { return match.StockMatcher } -func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoredMatch, error) { +func (m *Matcher) Match(store vulnerability.Provider, p pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { return internal.MatchPackageByEcosystemAndCPEs(store, p, m.Type(), m.cfg.UseCPEs) } diff --git a/grype/pkg/context.go b/grype/pkg/context.go index 5f46a6f9f9c..0279e24a401 100644 --- a/grype/pkg/context.go +++ b/grype/pkg/context.go @@ -1,11 +1,11 @@ package pkg import ( - "github.com/anchore/syft/syft/linux" + "github.com/anchore/grype/grype/distro" "github.com/anchore/syft/syft/source" ) type Context struct { Source *source.Description - Distro *linux.Release + Distro *distro.Distro } diff --git a/grype/pkg/package.go b/grype/pkg/package.go index 5d3a0594f44..ee4d8ea13d2 100644 --- a/grype/pkg/package.go +++ b/grype/pkg/package.go @@ -3,15 +3,16 @@ package pkg import ( "fmt" "regexp" + "slices" "strings" "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/internal/log" "github.com/anchore/grype/internal/stringutil" + "github.com/anchore/packageurl-go" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" cpes "github.com/anchore/syft/syft/pkg/cataloger/common/cpe" ) @@ -44,7 +45,7 @@ type Package struct { Metadata interface{} // This is NOT 1-for-1 the syft metadata! Only the select data needed for vulnerability matching } -func New(p syftPkg.Package) Package { +func New(p syftPkg.Package, enhancers ...Enhancer) Package { metadata, upstreams := dataFromPkg(p) licenseObjs := p.Licenses.ToSlice() @@ -57,7 +58,7 @@ func New(p syftPkg.Package) Package { licenses = []string{} } - return Package{ + out := Package{ ID: ID(p.ID()), Name: p.Name, Version: p.Version, @@ -70,13 +71,25 @@ func New(p syftPkg.Package) Package { Upstreams: upstreams, Metadata: metadata, } + + if len(enhancers) > 0 { + purl, err := packageurl.FromString(p.PURL) + if err != nil { + log.WithFields("purl", purl, "error", err).Debug("unable to parse PURL") + } + for _, e := range enhancers { + e(&out, purl, p) + } + } + + return out } -func FromCollection(catalog *syftPkg.Collection, config SynthesisConfig) []Package { - return FromPackages(catalog.Sorted(), config) +func FromCollection(catalog *syftPkg.Collection, config SynthesisConfig, enhancers ...Enhancer) []Package { + return FromPackages(catalog.Sorted(), config, enhancers...) } -func FromPackages(syftpkgs []syftPkg.Package, config SynthesisConfig) []Package { +func FromPackages(syftpkgs []syftPkg.Package, config SynthesisConfig, enhancers ...Enhancer) []Package { var pkgs []Package for _, p := range syftpkgs { if len(p.CPEs) == 0 { @@ -87,7 +100,7 @@ func FromPackages(syftpkgs []syftPkg.Package, config SynthesisConfig) []Package log.Debugf("no CPEs for package: %s", p) } } - pkgs = append(pkgs, New(p)) + pkgs = append(pkgs, New(p, enhancers...)) } return pkgs @@ -98,7 +111,7 @@ func (p Package) String() string { return fmt.Sprintf("Pkg(type=%s, name=%s, version=%s, upstreams=%d)", p.Type, p.Name, p.Version, len(p.Upstreams)) } -func removePackagesByOverlap(catalog *syftPkg.Collection, relationships []artifact.Relationship, distro *linux.Release) *syftPkg.Collection { +func removePackagesByOverlap(catalog *syftPkg.Collection, relationships []artifact.Relationship, distro *distro.Distro) *syftPkg.Collection { byOverlap := map[artifact.ID]artifact.Relationship{} for _, r := range relationships { if r.Type == artifact.OwnershipByFileOverlapRelationship { @@ -127,8 +140,8 @@ func excludePackage(comprehensiveDistroFeed bool, p syftPkg.Package, parent syft // python 3.9.2 binary // python3.9 3.9.2-1 deb - // If the version is not effectively the same, keep both - if !strings.HasPrefix(parent.Version, p.Version) { + // If the version is not approximately the same, keep both + if !strings.HasPrefix(parent.Version, p.Version) && !strings.HasPrefix(p.Version, parent.Version) { return false } @@ -151,23 +164,23 @@ func excludePackage(comprehensiveDistroFeed bool, p syftPkg.Package, parent syft // distroFeedIsComprehensive returns true if the distro feed // is comprehensive enough that we can drop packages owned by distro packages // before matching. -func distroFeedIsComprehensive(distro *linux.Release) bool { +func distroFeedIsComprehensive(dst *distro.Distro) bool { // TODO: this mechanism should be re-examined once https://github.com/anchore/grype/issues/1426 // is addressed - if distro == nil { + if dst == nil { return false } - if distro.ID == "amzn" { + if dst.Type == distro.AmazonLinux { // AmazonLinux shows "like rhel" but is not an rhel clone // and does not have an exhaustive vulnerability feed. return false } for _, d := range comprehensiveDistros { - if strings.EqualFold(d, distro.ID) { + if strings.EqualFold(string(d), dst.Name()) { return true } - for _, n := range distro.IDLike { - if strings.EqualFold(d, n) { + for _, n := range dst.IDLike { + if strings.EqualFold(string(d), n) { return true } } @@ -177,13 +190,13 @@ func distroFeedIsComprehensive(distro *linux.Release) bool { // computed by: // sqlite3 vulnerability.db 'select distinct namespace from vulnerability where fix_state in ("wont-fix", "not-fixed") order by namespace;' | cut -d ':' -f 1 | sort | uniq -// then removing 'github' and replacing 'redhat' with 'rhel' -var comprehensiveDistros = []string{ - "azurelinux", - "debian", - "mariner", - "rhel", - "ubuntu", +// then removing 'github' +var comprehensiveDistros = []distro.Type{ + distro.Azure, + distro.Debian, + distro.Mariner, + distro.RedHat, + distro.Ubuntu, } func isOSPackage(p syftPkg.Package) bool { @@ -195,15 +208,18 @@ func isOSPackage(p syftPkg.Package) bool { } } -func dataFromPkg(p syftPkg.Package) (interface{}, []UpstreamPackage) { +func dataFromPkg(p syftPkg.Package) (any, []UpstreamPackage) { var metadata interface{} var upstreams []UpstreamPackage + // use the metadata to determine the type of package switch p.Metadata.(type) { case syftPkg.GolangModuleEntry, syftPkg.GolangBinaryBuildinfoEntry: metadata = golangMetadataFromPkg(p) case syftPkg.DpkgDBEntry: upstreams = dpkgDataFromPkg(p) + case syftPkg.DpkgArchiveEntry: + upstreams = dpkgDataFromPkg(p) case syftPkg.RpmArchive, syftPkg.RpmDBEntry: m, u := rpmDataFromPkg(p) upstreams = u @@ -211,7 +227,7 @@ func dataFromPkg(p syftPkg.Package) (interface{}, []UpstreamPackage) { metadata = *m } case syftPkg.JavaArchive: - if m := javaDataFromPkg(p); m != nil { + if m := javaDataFromPkgMetadata(p); m != nil { metadata = *m } case syftPkg.ApkDBEntry: @@ -220,6 +236,14 @@ func dataFromPkg(p syftPkg.Package) (interface{}, []UpstreamPackage) { case syftPkg.JavaVMInstallation: metadata = javaVMDataFromPkg(p) } + + // there are still cases where we could still fill the metadata from other info (such as the PURL) + if metadata == nil { + if p.Type == syftPkg.JavaPkg { + metadata = javaDataFromPkgData(p) + } + } + return metadata, upstreams } @@ -277,16 +301,25 @@ func golangMetadataFromPkg(p syftPkg.Package) interface{} { } func dpkgDataFromPkg(p syftPkg.Package) (upstreams []UpstreamPackage) { - if value, ok := p.Metadata.(syftPkg.DpkgDBEntry); ok { + switch value := p.Metadata.(type) { + case syftPkg.DpkgDBEntry: if value.Source != "" { upstreams = append(upstreams, UpstreamPackage{ Name: value.Source, Version: value.SourceVersion, }) } - } else { - log.Warnf("unable to extract DPKG metadata for %s", p) + case syftPkg.DpkgArchiveEntry: + if value.Source != "" { + upstreams = append(upstreams, UpstreamPackage{ + Name: value.Source, + Version: value.SourceVersion, + }) + } + default: + log.Debugf("unable to extract DPKG metadata for %s", p) } + return upstreams } @@ -318,7 +351,7 @@ func handleSourceRPM(pkgName, sourceRpm string) []UpstreamPackage { var upstreams []UpstreamPackage name, version := getNameAndELVersion(sourceRpm) if name == "" && version == "" { - log.Warnf("unable to extract name and version from SourceRPM=%q ", sourceRpm) + log.Debugf("unable to extract name and version from SourceRPM=%q", sourceRpm) } else if name != pkgName { // don't include matches if the source package name matches the current package name if name != "" && version != "" { @@ -339,13 +372,17 @@ func getNameAndELVersion(sourceRpm string) (string, string) { return groupMatches["name"], version } -func javaDataFromPkg(p syftPkg.Package) (metadata *JavaMetadata) { +func javaDataFromPkgMetadata(p syftPkg.Package) (metadata *JavaMetadata) { if value, ok := p.Metadata.(syftPkg.JavaArchive); ok { var artifactID, groupID, name string if value.PomProperties != nil { artifactID = value.PomProperties.ArtifactID groupID = value.PomProperties.GroupID + } else { + // get the group ID / artifact ID from the PURL + artifactID, groupID = javaGroupArtifactIDFromPurl(p.PURL) } + if value.Manifest != nil { for _, kv := range value.Manifest.Main { if kv.Key == "Name" { @@ -371,12 +408,36 @@ func javaDataFromPkg(p syftPkg.Package) (metadata *JavaMetadata) { ManifestName: name, ArchiveDigests: archiveDigests, } - } else { - log.Warnf("unable to extract Java metadata for %s", p) } return metadata } +func javaDataFromPkgData(p syftPkg.Package) (metadata *JavaMetadata) { + switch p.Type { + case syftPkg.JavaPkg: + artifactID, groupID := javaGroupArtifactIDFromPurl(p.PURL) + if artifactID != "" && groupID != "" { + metadata = &JavaMetadata{ + PomArtifactID: artifactID, + PomGroupID: groupID, + } + } + default: + log.Debugf("unable to extract metadata for %s", p) + } + + return metadata +} + +func javaGroupArtifactIDFromPurl(p string) (string, string) { + purl, err := packageurl.FromString(p) + if err != nil { + log.WithFields("purl", purl, "error", err).Debug("unable to parse java PURL") + return "", "" + } + return purl.Name, purl.Namespace +} + func apkDataFromPkg(p syftPkg.Package) (upstreams []UpstreamPackage) { if value, ok := p.Metadata.(syftPkg.ApkDBEntry); ok { if value.OriginPackage != "" { @@ -385,7 +446,7 @@ func apkDataFromPkg(p syftPkg.Package) (upstreams []UpstreamPackage) { }) } } else { - log.Warnf("unable to extract APK metadata for %s", p) + log.Debugf("unable to extract APK metadata for %s", p) } return upstreams } @@ -398,3 +459,88 @@ func ByID(id ID, pkgs []Package) *Package { } return nil } + +func parseUpstream(pkgName string, value string, pkgType syftPkg.Type) []UpstreamPackage { + if pkgType == syftPkg.RpmPkg { + return handleSourceRPM(pkgName, value) + } + return handleDefaultUpstream(pkgName, value) +} + +func handleDefaultUpstream(pkgName string, value string) []UpstreamPackage { + fields := strings.Split(value, "@") + switch len(fields) { + case 2: + if fields[0] == pkgName { + return nil + } + return []UpstreamPackage{ + { + Name: fields[0], + Version: fields[1], + }, + } + case 1: + if fields[0] == pkgName { + return nil + } + return []UpstreamPackage{ + { + Name: fields[0], + }, + } + } + return nil +} + +func setUpstreamsFromPURL(out *Package, purl packageurl.PackageURL, syftPkg syftPkg.Package) { + if len(out.Upstreams) == 0 { + out.Upstreams = upstreamsFromPURL(purl, syftPkg.Type) + } +} + +// upstreamsFromPURL reads any additional data Grype can use, which is ignored by Syft's PURL conversion +func upstreamsFromPURL(purl packageurl.PackageURL, pkgType syftPkg.Type) (upstreams []UpstreamPackage) { + for _, qualifier := range purl.Qualifiers { + if qualifier.Key == syftPkg.PURLQualifierUpstream { + for _, newUpstream := range parseUpstream(purl.Name, qualifier.Value, pkgType) { + if slices.Contains(upstreams, newUpstream) { + continue + } + upstreams = append(upstreams, newUpstream) + } + } + } + return upstreams +} + +func setDistroFromPURL(out *Package, purl packageurl.PackageURL, _ syftPkg.Package) { + if out.Distro == nil { + out.Distro = distroFromPURL(purl) + } +} + +// distroFromPURL reads distro data for Grype can use, which is ignored by Syft's PURL conversion +func distroFromPURL(purl packageurl.PackageURL) (d *distro.Distro) { + var distroName, distroVersion string + + for _, qualifier := range purl.Qualifiers { + if qualifier.Key == syftPkg.PURLQualifierDistro { + fields := strings.SplitN(qualifier.Value, "-", 2) + distroName = fields[0] + if len(fields) > 1 { + distroVersion = fields[1] + } + } + } + + if distroName != "" { + d = distro.NewFromNameVersion(distroName, distroVersion) + } + + return d +} + +type Enhancer func(out *Package, purl packageurl.PackageURL, pkg syftPkg.Package) + +var purlEnhancers = []Enhancer{setUpstreamsFromPURL, setDistroFromPURL} diff --git a/grype/pkg/package_test.go b/grype/pkg/package_test.go index 0c02886b312..622e0be5d9f 100644 --- a/grype/pkg/package_test.go +++ b/grype/pkg/package_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/assert" + "github.com/anchore/grype/grype/distro" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/file" @@ -68,6 +69,36 @@ func TestNew(t *testing.T) { }, }, }, + { + name: "dpkg archive with source info", + syftPkg: syftPkg.Package{ + Metadata: syftPkg.DpkgArchiveEntry{ + Package: "pkg-info", + Source: "src-info", + Version: "version-info", + SourceVersion: "src-version-info", + Architecture: "arch-info", + Maintainer: "maintainer-info", + InstalledSize: 10, + Files: []syftPkg.DpkgFileRecord{ + { + Path: "path-info", + Digest: &file.Digest{ + Algorithm: "algo-info", + Value: "digest-info", + }, + IsConfigFile: true, + }, + }, + }, + }, + upstreams: []UpstreamPackage{ + { + Name: "src-info", + Version: "src-version-info", + }, + }, + }, { name: "rpm archive with source info", syftPkg: syftPkg.Package{ @@ -290,6 +321,15 @@ func TestNew(t *testing.T) { }, }, }, + { + name: "github-actions-use-statement", + syftPkg: syftPkg.Package{ + Metadata: syftPkg.GitHubActionsUseStatement{ + Value: "a", + Comment: "a", + }, + }, + }, { name: "golang-metadata", syftPkg: syftPkg.Package{ @@ -337,7 +377,7 @@ func TestNew(t *testing.T) { }, }, { - name: "dart-pub-metadata", + name: "dart-publock-metadata", syftPkg: syftPkg.Package{ Metadata: syftPkg.DartPubspecLockEntry{ Name: "a", @@ -345,6 +385,33 @@ func TestNew(t *testing.T) { }, }, }, + { + name: "dart-pubspec-metadata", + syftPkg: syftPkg.Package{ + Metadata: syftPkg.DartPubspec{ + Homepage: "a", + Repository: "a", + Documentation: "a", + PublishTo: "a", + Environment: &syftPkg.DartPubspecEnvironment{ + SDK: "a", + Flutter: "a", + }, + Platforms: []string{"a"}, + IgnoredAdvisories: []string{"a"}, + }, + }, + }, + { + name: "homebrew-formula-metadata", + syftPkg: syftPkg.Package{ + Metadata: syftPkg.HomebrewFormula{ + Tap: "a", + Homepage: "a", + Description: "a", + }, + }, + }, { name: "dotnet-metadata", syftPkg: syftPkg.Package{ @@ -655,7 +722,7 @@ func TestNew(t *testing.T) { }, }, { - name: "Php-pecl-entry", + name: "php-pecl-entry", syftPkg: syftPkg.Package{ Metadata: syftPkg.PhpPeclEntry{ Name: "a", @@ -664,6 +731,15 @@ func TestNew(t *testing.T) { }, }, }, + { + name: "php-pear-entry", + syftPkg: syftPkg.Package{ + Metadata: syftPkg.PhpPearEntry{ + Name: "a", + Version: "a", + }, + }, + }, { name: "lua-rocks-entry", syftPkg: syftPkg.Package{ @@ -766,6 +842,19 @@ func TestNew(t *testing.T) { }, }, }, + { + name: "pe binary metadata", + syftPkg: syftPkg.Package{ + Metadata: syftPkg.PEBinary{ + VersionResources: syftPkg.KeyValues{ + { + Key: "k", + Value: "k", + }, + }, + }, + }, + }, } // capture each observed metadata type, we should see all of them relate to what syft provides by the end of testing @@ -935,22 +1024,44 @@ func Test_RemovePackagesByOverlap(t *testing.T) { }, { name: "python bindings for system RPM install", - sbom: withDistro(catalogWithOverlaps( + sbom: withLinuxRelease(catalogWithOverlaps( []string{"rpm:python3-rpm@4.14.3-26.el8", "python:rpm@4.14.3"}, []string{"rpm:python3-rpm@4.14.3-26.el8 -> python:rpm@4.14.3"}), "rhel"), expectedPackages: []string{"rpm:python3-rpm@4.14.3-26.el8"}, }, { name: "amzn linux doesn't remove packages in this way", - sbom: withDistro(catalogWithOverlaps( + sbom: withLinuxRelease(catalogWithOverlaps( []string{"rpm:python3-rpm@4.14.3-26.el8", "python:rpm@4.14.3"}, []string{"rpm:python3-rpm@4.14.3-26.el8 -> python:rpm@4.14.3"}), "amzn"), expectedPackages: []string{"rpm:python3-rpm@4.14.3-26.el8", "python:rpm@4.14.3"}, }, + { + name: "remove overlapping package when parent version is prefix of child version", + sbom: withLinuxRelease(catalogWithOverlaps( + []string{"rpm:kernel-rt-core@5.14.0-503.40.1.el9_5", "linux-kernel:linux-kernel@5.14.0-503.40.1.el9_5.x86_64+rt"}, + []string{"rpm:kernel-rt-core@5.14.0-503.40.1.el9_5 -> linux-kernel:linux-kernel@5.14.0-503.40.1.el9_5.x86_64+rt"}), "rhel"), + expectedPackages: []string{"rpm:kernel-rt-core@5.14.0-503.40.1.el9_5"}, + }, + { + name: "remove overlapping package when child version is prefix of parent version", + sbom: withLinuxRelease(catalogWithOverlaps( + []string{"rpm:kernel-rt-core@5.14.0-503.40.1.el9_5+rt", "linux-kernel:linux-kernel@5.14.0-503.40.1.el9_5"}, + []string{"rpm:kernel-rt-core@5.14.0-503.40.1.el9_5+rt -> linux-kernel:linux-kernel@5.14.0-503.40.1.el9_5"}), "rhel"), + expectedPackages: []string{"rpm:kernel-rt-core@5.14.0-503.40.1.el9_5+rt"}, + }, + { + name: "do not remove overlapping package when versions are not similar", + sbom: withLinuxRelease(catalogWithOverlaps( + []string{"rpm:kernel@5.14.0-503.40.1.el9_5", "linux-kernel:linux-kernel@6.17"}, + []string{"rpm:kernel@5.14.0-503.40.1.el9_5 -> linux-kernel:linux-kernel@6.17"}), "rhel"), + expectedPackages: []string{"rpm:kernel@5.14.0-503.40.1.el9_5", "linux-kernel:linux-kernel@6.17"}, + }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - catalog := removePackagesByOverlap(test.sbom.Artifacts.Packages, test.sbom.Relationships, test.sbom.Artifacts.LinuxDistribution) + d := distro.FromRelease(test.sbom.Artifacts.LinuxDistribution) + catalog := removePackagesByOverlap(test.sbom.Artifacts.Packages, test.sbom.Relationships, d) pkgs := FromCollection(catalog, SynthesisConfig{}) var pkgNames []string for _, p := range pkgs { @@ -1031,7 +1142,7 @@ func catalogWithOverlaps(packages []string, overlaps []string) *sbom.SBOM { } } -func withDistro(s *sbom.SBOM, id string) *sbom.SBOM { +func withLinuxRelease(s *sbom.SBOM, id string) *sbom.SBOM { s.Artifacts.LinuxDistribution = &linux.Release{ ID: id, } diff --git a/grype/pkg/provider.go b/grype/pkg/provider.go index 165d490ad00..e06af83b23e 100644 --- a/grype/pkg/provider.go +++ b/grype/pkg/provider.go @@ -6,6 +6,7 @@ import ( "github.com/bmatcuk/doublestar/v2" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/internal/log" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/sbom" @@ -15,20 +16,17 @@ var errDoesNotProvide = fmt.Errorf("cannot provide packages from the given sourc // Provide a set of packages and context metadata describing where they were sourced from. func Provide(userInput string, config ProviderConfig) ([]Package, Context, *sbom.SBOM, error) { - packages, ctx, s, err := syftSBOMProvider(userInput, config) - if !errors.Is(err, errDoesNotProvide) { - if len(config.Exclusions) > 0 { - var exclusionsErr error - packages, exclusionsErr = filterPackageExclusions(packages, config.Exclusions) - if exclusionsErr != nil { - return nil, ctx, s, exclusionsErr - } - } - log.WithFields("input", userInput).Trace("interpreting input as an SBOM document") - return packages, ctx, s, err + packages, ctx, s, err := provide(userInput, config) + if err != nil { + return nil, Context{}, nil, err } + setContextDistro(packages, &ctx) + return packages, ctx, s, nil +} - packages, ctx, s, err = purlProvider(userInput) +// Provide a set of packages and context metadata describing where they were sourced from. +func provide(userInput string, config ProviderConfig) ([]Package, Context, *sbom.SBOM, error) { + packages, ctx, s, err := purlProvider(userInput, config) if !errors.Is(err, errDoesNotProvide) { log.WithFields("input", userInput).Trace("interpreting input as one or more PURLs") return packages, ctx, s, err @@ -40,6 +38,19 @@ func Provide(userInput string, config ProviderConfig) ([]Package, Context, *sbom return packages, ctx, s, err } + packages, ctx, s, err = syftSBOMProvider(userInput, config) + if !errors.Is(err, errDoesNotProvide) { + if len(config.Exclusions) > 0 { + var exclusionsErr error + packages, exclusionsErr = filterPackageExclusions(packages, config.Exclusions) + if exclusionsErr != nil { + return nil, ctx, s, exclusionsErr + } + } + log.WithFields("input", userInput).Trace("interpreting input as an SBOM document") + return packages, ctx, s, err + } + log.WithFields("input", userInput).Trace("passing input to syft for interpretation") return syftProvider(userInput, config) } @@ -92,3 +103,29 @@ func locationMatches(location file.Location, exclusion string) (bool, error) { } return matchesRealPath || matchesVirtualPath, nil } + +func setContextDistro(packages []Package, ctx *Context) { + if ctx.Distro != nil { + return + } + var singleDistro *distro.Distro + for _, p := range packages { + if p.Distro == nil { + continue + } + if singleDistro == nil { + singleDistro = p.Distro + continue + } + if singleDistro.Type != p.Distro.Type || + singleDistro.Version != p.Distro.Version || + singleDistro.Codename != p.Distro.Codename { + return + } + } + + // if there is one distro (with one version) represented, use that + if singleDistro != nil { + ctx.Distro = singleDistro + } +} diff --git a/grype/pkg/purl_provider.go b/grype/pkg/purl_provider.go index 692a3b10ba9..740f34960fe 100644 --- a/grype/pkg/purl_provider.go +++ b/grype/pkg/purl_provider.go @@ -1,19 +1,11 @@ package pkg import ( - "bufio" "fmt" "io" - "os" "strings" - "github.com/mitchellh/go-homedir" - "github.com/scylladb/go-set/strset" - - "github.com/anchore/packageurl-go" - "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/linux" - "github.com/anchore/syft/syft/pkg" + "github.com/anchore/syft/syft/format" "github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/source" ) @@ -21,41 +13,28 @@ import ( const ( purlInputPrefix = "purl:" singlePurlInputPrefix = "pkg:" - cpesQualifierKey = "cpes" ) type PURLLiteralMetadata struct { PURL string } -type PURLFileMetadata struct { - Path string -} - -func purlProvider(userInput string) ([]Package, Context, *sbom.SBOM, error) { +func purlProvider(userInput string, config ProviderConfig) ([]Package, Context, *sbom.SBOM, error) { reader, ctx, err := getPurlReader(userInput) if err != nil { return nil, Context{}, nil, err } - return decodePurlsFromReader(reader, ctx) + s, _, _, err := format.Decode(reader) + if s == nil { + return nil, Context{}, nil, fmt.Errorf("unable to decode purl: %w", err) + } + + return FromCollection(s.Artifacts.Packages, config.SynthesisConfig, purlEnhancers...), ctx, s, nil } func getPurlReader(userInput string) (r io.Reader, ctx Context, err error) { - switch { - case strings.HasPrefix(userInput, purlInputPrefix): - path := strings.TrimPrefix(userInput, purlInputPrefix) - ctx.Source = &source.Description{ - Metadata: PURLFileMetadata{ - Path: path, - }, - } - file, err := openPurlFile(path) - if err != nil { - return nil, ctx, err - } - return file, ctx, nil - case strings.HasPrefix(userInput, singlePurlInputPrefix): + if strings.HasPrefix(userInput, singlePurlInputPrefix) { ctx.Source = &source.Description{ Metadata: PURLLiteralMetadata{ PURL: userInput, @@ -65,196 +44,3 @@ func getPurlReader(userInput string) (r io.Reader, ctx Context, err error) { } return nil, ctx, errDoesNotProvide } - -func openPurlFile(path string) (*os.File, error) { - expandedPath, err := homedir.Expand(path) - if err != nil { - return nil, fmt.Errorf("unable to open purls: %w", err) - } - - f, err := os.Open(expandedPath) - if err != nil { - return nil, fmt.Errorf("unable to open file %s: %w", expandedPath, err) - } - - return f, nil -} - -func decodePurlsFromReader(reader io.Reader, ctx Context) ([]Package, Context, *sbom.SBOM, error) { - scanner := bufio.NewScanner(reader) - var packages []Package - var syftPkgs []pkg.Package - - distros := make(map[string]*strset.Set) - for scanner.Scan() { - rawLine := scanner.Text() - p, syftPkg, distroName, distroVersion, err := purlToPackage(rawLine) - if err != nil { - return nil, Context{}, nil, err - } - if distroName != "" { - if _, ok := distros[distroName]; !ok { - distros[distroName] = strset.New() - } - distros[distroName].Add(distroVersion) - } - if p != nil { - packages = append(packages, *p) - } - if syftPkg != nil { - syftPkgs = append(syftPkgs, *syftPkg) - } - } - - if err := scanner.Err(); err != nil { - return nil, Context{}, nil, err - } - - s := &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(syftPkgs...), - }, - } - // Do we have multiple purls - // purl litteral <-- - // purl file <-- FileMetadata - - // if there is one distro (with one version) represented, use that - if len(distros) == 1 { - for name, versions := range distros { - if versions.Size() == 1 { - version := versions.List()[0] - var codename string - // if there are no digits in the version, it is likely a codename - if !strings.ContainsAny(version, "0123456789") { - codename = version - version = "" - } - ctx.Distro = &linux.Release{ - Name: name, - ID: name, - IDLike: []string{name}, - Version: version, - VersionCodename: codename, - } - s.Artifacts.LinuxDistribution = &linux.Release{ - Name: name, - ID: name, - IDLike: []string{name}, - Version: version, - VersionCodename: codename, - } - } - } - } - - return packages, ctx, s, nil -} - -func purlToPackage(rawLine string) (*Package, *pkg.Package, string, string, error) { - purl, err := packageurl.FromString(rawLine) - if err != nil { - return nil, nil, "", "", fmt.Errorf("unable to decode purl %s: %w", rawLine, err) - } - - var cpes []cpe.CPE - var upstreams []UpstreamPackage - var distroName, distroVersion string - epoch := "0" - - pkgType := pkg.TypeByName(purl.Type) - - for _, qualifier := range purl.Qualifiers { - switch qualifier.Key { - case cpesQualifierKey: - rawCpes := strings.Split(qualifier.Value, ",") - for _, rawCpe := range rawCpes { - c, err := cpe.New(rawCpe, "") - if err != nil { - return nil, nil, "", "", fmt.Errorf("unable to decode cpe %s in purl %s: %w", rawCpe, rawLine, err) - } - cpes = append(cpes, c) - } - case pkg.PURLQualifierEpoch: - epoch = qualifier.Value - case pkg.PURLQualifierUpstream: - upstreams = append(upstreams, parseUpstream(purl.Name, qualifier.Value, pkgType)...) - case pkg.PURLQualifierDistro: - name, version := parseDistroQualifier(qualifier.Value) - if name != "" && version != "" { - distroName = name - distroVersion = version - } - } - } - - version := purl.Version - if purl.Type == packageurl.TypeRPM && !strings.HasPrefix(purl.Version, fmt.Sprintf("%s:", epoch)) { - version = fmt.Sprintf("%s:%s", epoch, purl.Version) - } - - syftPkg := pkg.Package{ - Name: purl.Name, - Version: version, - Type: pkgType, - CPEs: cpes, - PURL: purl.String(), - Language: pkg.LanguageByName(purl.Type), - } - - syftPkg.SetID() - return &Package{ - ID: ID(purl.String()), - CPEs: cpes, - Name: purl.Name, - Version: version, - Type: pkgType, - Language: pkg.LanguageByName(purl.Type), - PURL: purl.String(), - Upstreams: upstreams, - }, &syftPkg, distroName, distroVersion, nil -} - -func parseDistroQualifier(value string) (string, string) { - fields := strings.SplitN(value, "-", 2) - switch len(fields) { - case 2: - return fields[0], fields[1] - case 1: - return fields[0], "" - } - return "", "" -} - -func parseUpstream(pkgName string, value string, pkgType pkg.Type) []UpstreamPackage { - if pkgType == pkg.RpmPkg { - return handleSourceRPM(pkgName, value) - } - return handleDefaultUpstream(pkgName, value) -} - -func handleDefaultUpstream(pkgName string, value string) []UpstreamPackage { - fields := strings.Split(value, "@") - switch len(fields) { - case 2: - if fields[0] == pkgName { - return nil - } - return []UpstreamPackage{ - { - Name: fields[0], - Version: fields[1], - }, - } - case 1: - if fields[0] == pkgName { - return nil - } - return []UpstreamPackage{ - { - Name: fields[0], - }, - } - } - return nil -} diff --git a/grype/pkg/purl_provider_test.go b/grype/pkg/purl_provider_test.go index ca909402b8b..ca8b7c371b1 100644 --- a/grype/pkg/purl_provider_test.go +++ b/grype/pkg/purl_provider_test.go @@ -7,10 +7,8 @@ import ( "github.com/google/go-cmp/cmp/cmpopts" "github.com/stretchr/testify/require" - "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/linux" + "github.com/anchore/grype/grype/distro" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/source" ) @@ -20,7 +18,6 @@ func Test_PurlProvider(t *testing.T) { userInput string context Context pkgs []Package - sbom *sbom.SBOM wantErr require.ErrorAssertionFunc }{ { @@ -41,14 +38,27 @@ func Test_PurlProvider(t *testing.T) { PURL: "pkg:apk/curl@7.61.1", }, }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "curl", - Version: "7.61.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/curl@7.61.1", - }), + }, + { + name: "java metadata decoded from purl", + userInput: "pkg:maven/org.apache.commons/commons-lang3@3.12.0", + context: Context{ + Source: &source.Description{ + Metadata: PURLLiteralMetadata{ + PURL: "pkg:maven/org.apache.commons/commons-lang3@3.12.0", + }, + }, + }, + pkgs: []Package{ + { + Name: "commons-lang3", + Version: "3.12.0", + Type: pkg.JavaPkg, + PURL: "pkg:maven/org.apache.commons/commons-lang3@3.12.0", + Metadata: JavaMetadata{ + PomArtifactID: "commons-lang3", + PomGroupID: "org.apache.commons", + }, }, }, }, @@ -56,11 +66,10 @@ func Test_PurlProvider(t *testing.T) { name: "os with codename", userInput: "pkg:deb/debian/sysv-rc@2.88dsf-59?arch=all&distro=debian-jessie&upstream=sysvinit", context: Context{ - Distro: &linux.Release{ - Name: "debian", - ID: "debian", - IDLike: []string{"debian"}, - VersionCodename: "jessie", // important! + Distro: &distro.Distro{ + Type: "debian", + IDLike: []string{"debian"}, + Codename: "jessie", // important! }, Source: &source.Description{ Metadata: PURLLiteralMetadata{ @@ -74,6 +83,7 @@ func Test_PurlProvider(t *testing.T) { Version: "2.88dsf-59", Type: pkg.DebPkg, PURL: "pkg:deb/debian/sysv-rc@2.88dsf-59?arch=all&distro=debian-jessie&upstream=sysvinit", + Distro: &distro.Distro{Type: distro.Debian, Version: "", Codename: "jessie", IDLike: []string{"debian"}}, Upstreams: []UpstreamPackage{ { Name: "sysvinit", @@ -81,22 +91,6 @@ func Test_PurlProvider(t *testing.T) { }, }, }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "sysv-rc", - Version: "2.88dsf-59", - Type: pkg.DebPkg, - PURL: "pkg:deb/debian/sysv-rc@2.88dsf-59?arch=all&distro=debian-jessie&upstream=sysvinit", - }), - LinuxDistribution: &linux.Release{ - Name: "debian", - ID: "debian", - IDLike: []string{"debian"}, - VersionCodename: "jessie", - }, - }, - }, }, { name: "default upstream", @@ -121,16 +115,6 @@ func Test_PurlProvider(t *testing.T) { }, }, }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "libcrypto3", - Version: "3.3.2", - Type: pkg.ApkPkg, - PURL: "pkg:apk/libcrypto3@3.3.2?upstream=openssl", - }), - }, - }, }, { name: "upstream with version", @@ -156,25 +140,14 @@ func Test_PurlProvider(t *testing.T) { }, }, }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "libcrypto3", - Version: "3.3.2", - Type: pkg.ApkPkg, - PURL: "pkg:apk/libcrypto3@3.3.2?upstream=openssl%403.2.1", - }), - }, - }, }, { name: "upstream for source RPM", userInput: "pkg:rpm/redhat/systemd-x@239-82.el8_10.2?arch=aarch64&distro=rhel-8.10&upstream=systemd-239-82.el8_10.2.src.rpm", context: Context{ - Distro: &linux.Release{ - Name: "rhel", - ID: "rhel", - IDLike: []string{"rhel"}, + Distro: &distro.Distro{ + Type: "redhat", + IDLike: []string{"redhat"}, Version: "8.10", }, Source: &source.Description{ @@ -186,9 +159,10 @@ func Test_PurlProvider(t *testing.T) { pkgs: []Package{ { Name: "systemd-x", - Version: "0:239-82.el8_10.2", + Version: "239-82.el8_10.2", Type: pkg.RpmPkg, PURL: "pkg:rpm/redhat/systemd-x@239-82.el8_10.2?arch=aarch64&distro=rhel-8.10&upstream=systemd-239-82.el8_10.2.src.rpm", + Distro: &distro.Distro{Type: distro.RedHat, Version: "8.10", Codename: "", IDLike: []string{"redhat"}}, Upstreams: []UpstreamPackage{ { Name: "systemd", @@ -197,31 +171,14 @@ func Test_PurlProvider(t *testing.T) { }, }, }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "systemd-x", - Version: "0:239-82.el8_10.2", - Type: pkg.RpmPkg, - PURL: "pkg:rpm/redhat/systemd-x@239-82.el8_10.2?arch=aarch64&distro=rhel-8.10&upstream=systemd-239-82.el8_10.2.src.rpm", - }), - LinuxDistribution: &linux.Release{ - Name: "rhel", - ID: "rhel", - IDLike: []string{"rhel"}, - Version: "8.10", - }, - }, - }, }, { name: "RPM with epoch", userInput: "pkg:rpm/redhat/dbus-common@1.12.8-26.el8?arch=noarch&distro=rhel-8.10&epoch=1&upstream=dbus-1.12.8-26.el8.src.rpm", context: Context{ - Distro: &linux.Release{ - Name: "rhel", - ID: "rhel", - IDLike: []string{"rhel"}, + Distro: &distro.Distro{ + Type: "redhat", + IDLike: []string{"redhat"}, Version: "8.10", }, Source: &source.Description{ @@ -236,6 +193,7 @@ func Test_PurlProvider(t *testing.T) { Version: "1:1.12.8-26.el8", Type: pkg.RpmPkg, PURL: "pkg:rpm/redhat/dbus-common@1.12.8-26.el8?arch=noarch&distro=rhel-8.10&epoch=1&upstream=dbus-1.12.8-26.el8.src.rpm", + Distro: &distro.Distro{Type: distro.RedHat, Version: "8.10", Codename: "", IDLike: []string{"redhat"}}, Upstreams: []UpstreamPackage{ { Name: "dbus", @@ -244,103 +202,13 @@ func Test_PurlProvider(t *testing.T) { }, }, }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "dbus-common", - Version: "1:1.12.8-26.el8", - Type: pkg.RpmPkg, - PURL: "pkg:rpm/redhat/dbus-common@1.12.8-26.el8?arch=noarch&distro=rhel-8.10&epoch=1&upstream=dbus-1.12.8-26.el8.src.rpm", - }), - LinuxDistribution: &linux.Release{ - Name: "rhel", - ID: "rhel", - IDLike: []string{"rhel"}, - Version: "8.10", - }, - }, - }, - }, - { - name: "takes multiple purls", - userInput: "purl:test-fixtures/purl/valid-purl.txt", - context: Context{ - Distro: &linux.Release{ - Name: "debian", - ID: "debian", - IDLike: []string{"debian"}, - Version: "8", - }, - Source: &source.Description{ - Metadata: PURLFileMetadata{ - Path: "test-fixtures/purl/valid-purl.txt", - }, - }, - }, - pkgs: []Package{ - { - Name: "sysv-rc", - Version: "2.88dsf-59", - Type: pkg.DebPkg, - PURL: "pkg:deb/debian/sysv-rc@2.88dsf-59?arch=all&distro=debian-8&upstream=sysvinit", - Upstreams: []UpstreamPackage{ - { - Name: "sysvinit", - }, - }, - }, - { - Name: "ant", - Version: "1.10.8", - Type: pkg.JavaPkg, - PURL: "pkg:maven/org.apache.ant/ant@1.10.8", - }, - { - Name: "log4j-core", - Version: "2.14.1", - Type: pkg.JavaPkg, - PURL: "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1", - }, - }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection( - pkg.Package{ - Name: "sysv-rc", - Version: "2.88dsf-59", - Type: pkg.DebPkg, - PURL: "pkg:deb/debian/sysv-rc@2.88dsf-59?arch=all&distro=debian-8&upstream=sysvinit", - }, - pkg.Package{ - Name: "ant", - Version: "1.10.8", - Type: pkg.JavaPkg, - Language: pkg.Java, - PURL: "pkg:maven/org.apache.ant/ant@1.10.8", - }, - pkg.Package{ - Name: "log4j-core", - Version: "2.14.1", - Type: pkg.JavaPkg, - Language: pkg.Java, - PURL: "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1", - }), - LinuxDistribution: &linux.Release{ - Name: "debian", - ID: "debian", - IDLike: []string{"debian"}, - Version: "8", - }, - }, - }, }, { name: "infer context when distro is present for single purl", userInput: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.3", context: Context{ - Distro: &linux.Release{ - Name: "alpine", - ID: "alpine", + Distro: &distro.Distro{ + Type: "alpine", IDLike: []string{"alpine"}, Version: "3.20.3", }, @@ -356,152 +224,66 @@ func Test_PurlProvider(t *testing.T) { Version: "7.61.1", Type: pkg.ApkPkg, PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.3", - }, - }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "curl", - Version: "7.61.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.3", - }), - LinuxDistribution: &linux.Release{ - Name: "alpine", - ID: "alpine", - IDLike: []string{"alpine"}, - Version: "3.20.3", - }, + Distro: &distro.Distro{Type: distro.Alpine, Version: "3.20.3", Codename: "", IDLike: []string{"alpine"}}, }, }, }, { - name: "infer context when distro is present for multiple similar purls", - userInput: "purl:test-fixtures/purl/homogeneous-os.txt", + name: "include namespace in name when purl is type Golang", + userInput: "pkg:golang/k8s.io/ingress-nginx@v1.11.2", context: Context{ - Distro: &linux.Release{ - Name: "alpine", - ID: "alpine", - IDLike: []string{"alpine"}, - Version: "3.20.3", - }, Source: &source.Description{ - Metadata: PURLFileMetadata{ - Path: "test-fixtures/purl/homogeneous-os.txt", - }, + Metadata: PURLLiteralMetadata{PURL: "pkg:golang/k8s.io/ingress-nginx@v1.11.2"}, }, }, pkgs: []Package{ { - Name: "openssl", - Version: "3.2.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/openssl@3.2.1?arch=aarch64&distro=alpine-3.20.3", - }, - { - Name: "curl", - Version: "7.61.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.3", - }, - }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "openssl", - Version: "3.2.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/openssl@3.2.1?arch=aarch64&distro=alpine-3.20.3", - }, - pkg.Package{ - Name: "curl", - Version: "7.61.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.3", - }), - LinuxDistribution: &linux.Release{ - Name: "alpine", - ID: "alpine", - IDLike: []string{"alpine"}, - Version: "3.20.3", - }, + Name: "k8s.io/ingress-nginx", + Version: "v1.11.2", + Type: pkg.GoModulePkg, + PURL: "pkg:golang/k8s.io/ingress-nginx@v1.11.2", }, }, }, { - name: "different distro info in purls does not infer context", - userInput: "purl:test-fixtures/purl/different-os.txt", + name: "include complex namespace in name when purl is type Golang", + userInput: "pkg:golang/github.com/wazuh/wazuh@v4.5.0", context: Context{ - // important: no distro info inferred Source: &source.Description{ - Metadata: PURLFileMetadata{ - Path: "test-fixtures/purl/different-os.txt", - }, + Metadata: PURLLiteralMetadata{PURL: "pkg:golang/github.com/wazuh/wazuh@v4.5.0"}, }, }, pkgs: []Package{ { - Name: "openssl", - Version: "3.2.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/openssl@3.2.1?arch=aarch64&distro=alpine-3.20.3", - }, - { - Name: "curl", - Version: "7.61.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.2", - }, - }, - sbom: &sbom.SBOM{ - Artifacts: sbom.Artifacts{ - Packages: pkg.NewCollection(pkg.Package{ - Name: "openssl", - Version: "3.2.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/openssl@3.2.1?arch=aarch64&distro=alpine-3.20.3", - }, - pkg.Package{ - Name: "curl", - Version: "7.61.1", - Type: pkg.ApkPkg, - PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.2", - }), + Name: "github.com/wazuh/wazuh", + Version: "v4.5.0", + Type: pkg.GoModulePkg, + PURL: "pkg:golang/github.com/wazuh/wazuh@v4.5.0", }, }, }, { - name: "fails on path with nonexistant file", - userInput: "purl:tttt/empty.txt", - wantErr: require.Error, - }, - { - name: "fails on invalid path", - userInput: "purl:~&&", - wantErr: require.Error, - }, - { - name: "allow empty purl file", - userInput: "purl:test-fixtures/purl/empty.json", - sbom: &sbom.SBOM{}, + name: "do not include namespace when given blank input blank", + userInput: "pkg:golang/wazuh@v4.5.0", context: Context{ Source: &source.Description{ - Metadata: PURLFileMetadata{ - Path: "test-fixtures/purl/empty.json", - }, + Metadata: PURLLiteralMetadata{PURL: "pkg:golang/wazuh@v4.5.0"}, + }, + }, + pkgs: []Package{ + { + Name: "wazuh", + Version: "v4.5.0", + Type: pkg.GoModulePkg, + PURL: "pkg:golang/wazuh@v4.5.0", }, }, }, { - name: "fails on invalid purl in file", + name: "fails on purl list input", userInput: "purl:test-fixtures/purl/invalid-purl.txt", wantErr: require.Error, }, - { - name: "fails on invalid cpe in file", - userInput: "purl:test-fixtures/purl/invalid-cpe.txt", - wantErr: require.Error, - }, { name: "invalid prefix", userInput: "dir:test-fixtures/purl", @@ -509,22 +291,14 @@ func Test_PurlProvider(t *testing.T) { }, } - opts := []cmp.Option{ - cmpopts.IgnoreFields(Package{}, "ID", "Locations", "Licenses", "Metadata", "Language", "CPEs"), - } - - syftPkgOpts := []cmp.Option{ - cmpopts.IgnoreFields(pkg.Package{}, "id"), - cmpopts.IgnoreUnexported(pkg.Package{}, file.LocationSet{}, pkg.LicenseSet{}), - } - for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { if tc.wantErr == nil { tc.wantErr = require.NoError } - packages, ctx, gotSBOM, err := purlProvider(tc.userInput) + packages, ctx, _, err := purlProvider(tc.userInput, ProviderConfig{}) + setContextDistro(packages, &ctx) tc.wantErr(t, err) if err != nil { @@ -532,35 +306,20 @@ func Test_PurlProvider(t *testing.T) { return } - if d := cmp.Diff(tc.context, ctx, opts...); d != "" { + if d := cmp.Diff(tc.context, ctx, diffOpts...); d != "" { t.Errorf("unexpected context (-want +got):\n%s", d) } require.Len(t, packages, len(tc.pkgs)) for idx, expected := range tc.pkgs { - if d := cmp.Diff(expected, packages[idx], opts...); d != "" { + if d := cmp.Diff(expected, packages[idx], diffOpts...); d != "" { t.Errorf("unexpected context (-want +got):\n%s", d) } } - - gotSyftPkgs := gotSBOM.Artifacts.Packages.Sorted() - wantSyftPkgs := tc.sbom.Artifacts.Packages.Sorted() - require.Equal(t, len(gotSyftPkgs), len(wantSyftPkgs)) - for idx, wantPkg := range wantSyftPkgs { - if d := cmp.Diff(wantPkg, gotSyftPkgs[idx], syftPkgOpts...); d != "" { - t.Errorf("unexpected Syft Pkg (-want +got):\n%s", d) - } - } - - wantSyftDistro := tc.sbom.Artifacts.LinuxDistribution - gotDistro := gotSBOM.Artifacts.LinuxDistribution - if wantSyftDistro == nil { - require.Nil(t, gotDistro) - return - } - - if d := cmp.Diff(wantSyftDistro, gotDistro); d != "" { - t.Errorf("unexpected Syft Distro (-want +got):\n%s", d) - } }) } } + +var diffOpts = []cmp.Option{ + cmpopts.IgnoreFields(Package{}, "ID", "Locations", "Licenses", "Language", "CPEs"), + cmpopts.IgnoreUnexported(distro.Distro{}), +} diff --git a/grype/pkg/qualifier/rpmmodularity/qualifier_test.go b/grype/pkg/qualifier/rpmmodularity/qualifier_test.go index f8b14f1a175..075e3c15bb2 100644 --- a/grype/pkg/qualifier/rpmmodularity/qualifier_test.go +++ b/grype/pkg/qualifier/rpmmodularity/qualifier_test.go @@ -11,7 +11,7 @@ import ( ) func TestRpmModularity_Satisfied(t *testing.T) { - oracle, _ := distro.New(distro.OracleLinux, "8") + oracle := distro.New(distro.OracleLinux, "8", "") tests := []struct { name string diff --git a/grype/pkg/syft_provider.go b/grype/pkg/syft_provider.go index 8cfaec2de1f..9bd8a609cd1 100644 --- a/grype/pkg/syft_provider.go +++ b/grype/pkg/syft_provider.go @@ -5,6 +5,7 @@ import ( "errors" "github.com/anchore/go-collections" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/internal/log" "github.com/anchore/stereoscope" "github.com/anchore/stereoscope/pkg/image" @@ -19,14 +20,7 @@ func syftProvider(userInput string, config ProviderConfig) ([]Package, Context, if err != nil { return nil, Context{}, nil, err } - - defer func() { - if src != nil { - if err := src.Close(); err != nil { - log.Tracef("unable to close source: %+v", err) - } - } - }() + defer log.CloseAndLogError(src, "syft source") s, err := syft.CreateSBOM(context.Background(), src, config.SBOMOptions) if err != nil { @@ -37,14 +31,16 @@ func syftProvider(userInput string, config ProviderConfig) ([]Package, Context, return nil, Context{}, nil, errors.New("no SBOM provided") } - pkgCatalog := removePackagesByOverlap(s.Artifacts.Packages, s.Relationships, s.Artifacts.LinuxDistribution) - srcDescription := src.Describe() + d := distro.FromRelease(s.Artifacts.LinuxDistribution) + + pkgCatalog := removePackagesByOverlap(s.Artifacts.Packages, s.Relationships, d) + packages := FromCollection(pkgCatalog, config.SynthesisConfig) pkgCtx := Context{ Source: &srcDescription, - Distro: s.Artifacts.LinuxDistribution, + Distro: d, } return packages, pkgCtx, s, nil diff --git a/grype/pkg/syft_sbom_provider.go b/grype/pkg/syft_sbom_provider.go index 056742a94a9..f2ebf9bbd48 100644 --- a/grype/pkg/syft_sbom_provider.go +++ b/grype/pkg/syft_sbom_provider.go @@ -9,68 +9,72 @@ import ( "strings" "github.com/gabriel-vasile/mimetype" - "github.com/mitchellh/go-homedir" + "github.com/anchore/go-homedir" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/internal" "github.com/anchore/grype/internal/log" "github.com/anchore/syft/syft/format" + "github.com/anchore/syft/syft/format/syftjson" "github.com/anchore/syft/syft/sbom" ) +type SBOMFileMetadata struct { + Path string +} + func syftSBOMProvider(userInput string, config ProviderConfig) ([]Package, Context, *sbom.SBOM, error) { - s, err := getSBOM(userInput) + s, fmtID, path, err := getSBOM(userInput) if err != nil { return nil, Context{}, nil, err } - catalog := removePackagesByOverlap(s.Artifacts.Packages, s.Relationships, s.Artifacts.LinuxDistribution) + src := s.Source + if src.Metadata == nil && path != "" { + src.Metadata = SBOMFileMetadata{ + Path: path, + } + } + + d := distro.FromRelease(s.Artifacts.LinuxDistribution) - return FromCollection(catalog, config.SynthesisConfig), Context{ - Source: &s.Source, - Distro: s.Artifacts.LinuxDistribution, - }, s, nil -} + catalog := removePackagesByOverlap(s.Artifacts.Packages, s.Relationships, d) -func newInputInfo(scheme, contentTye string) *inputInfo { - return &inputInfo{ - Scheme: scheme, - ContentType: contentTye, + var enhancers []Enhancer + if fmtID != syftjson.ID { + enhancers = purlEnhancers } -} -type inputInfo struct { - ContentType string - Scheme string + return FromCollection(catalog, config.SynthesisConfig, enhancers...), Context{ + Source: &src, + Distro: d, + }, s, nil } -func getSBOM(userInput string) (*sbom.SBOM, error) { - reader, err := getSBOMReader(userInput) +func getSBOM(userInput string) (*sbom.SBOM, sbom.FormatID, string, error) { + reader, path, err := getSBOMReader(userInput) if err != nil { - return nil, err + return nil, "", path, err } + s, fmtID, err := readSBOM(reader) + return s, fmtID, path, err +} + +func readSBOM(reader io.ReadSeeker) (*sbom.SBOM, sbom.FormatID, error) { s, fmtID, _, err := format.Decode(reader) if err != nil { - return nil, fmt.Errorf("unable to decode sbom: %w", err) + return nil, "", fmt.Errorf("unable to decode sbom: %w", err) } if fmtID == "" || s == nil { - return nil, errDoesNotProvide + return nil, "", errDoesNotProvide } - return s, nil + return s, fmtID, nil } -func getSBOMReader(userInput string) (r io.ReadSeeker, err error) { - r, _, err = extractReaderAndInfo(userInput) - if err != nil { - return nil, err - } - - return r, nil -} - -func extractReaderAndInfo(userInput string) (io.ReadSeeker, *inputInfo, error) { +func getSBOMReader(userInput string) (io.ReadSeeker, string, error) { switch { // the order of cases matter case userInput == "": @@ -78,44 +82,39 @@ func extractReaderAndInfo(userInput string) (io.ReadSeeker, *inputInfo, error) { // options from the CLI, otherwise we should not assume there is any valid input from stdin. r, err := stdinReader() if err != nil { - return nil, nil, err + return nil, "", err } return decodeStdin(r) + case explicitlySpecifyingPurlList(userInput): + filepath := strings.TrimPrefix(userInput, purlInputPrefix) + return openFile(filepath) + case explicitlySpecifyingSBOM(userInput): filepath := strings.TrimPrefix(userInput, "sbom:") - return parseSBOM("sbom", filepath) + return openFile(filepath) case isPossibleSBOM(userInput): - return parseSBOM("", userInput) + return openFile(userInput) default: - return nil, nil, errDoesNotProvide + return nil, "", errDoesNotProvide } } -func parseSBOM(scheme, path string) (io.ReadSeeker, *inputInfo, error) { - r, err := openFile(path) - if err != nil { - return nil, nil, err - } - info := newInputInfo(scheme, "sbom") - return r, info, nil -} - -func decodeStdin(r io.Reader) (io.ReadSeeker, *inputInfo, error) { +func decodeStdin(r io.Reader) (io.ReadSeeker, string, error) { b, err := io.ReadAll(r) if err != nil { - return nil, nil, fmt.Errorf("failed reading stdin: %w", err) + return nil, "", fmt.Errorf("failed reading stdin: %w", err) } reader := bytes.NewReader(b) _, err = reader.Seek(0, io.SeekStart) if err != nil { - return nil, nil, fmt.Errorf("failed to parse stdin: %w", err) + return nil, "", fmt.Errorf("failed to parse stdin: %w", err) } - return reader, newInputInfo("", "sbom"), nil + return reader, "", nil } func stdinReader() (io.Reader, error) { @@ -131,37 +130,26 @@ func stdinReader() (io.Reader, error) { return os.Stdin, nil } -func closeFile(f *os.File) { - if f == nil { - return - } - - err := f.Close() - if err != nil { - log.Warnf("failed to close file %s: %v", f.Name(), err) - } -} - -func openFile(path string) (*os.File, error) { +func openFile(path string) (io.ReadSeekCloser, string, error) { expandedPath, err := homedir.Expand(path) if err != nil { - return nil, fmt.Errorf("unable to open SBOM: %w", err) + return nil, path, fmt.Errorf("unable to open SBOM: %w", err) } f, err := os.Open(expandedPath) if err != nil { - return nil, fmt.Errorf("unable to open file %s: %w", expandedPath, err) + return nil, path, fmt.Errorf("unable to open file %s: %w", expandedPath, err) } - return f, nil + return f, path, nil } func isPossibleSBOM(userInput string) bool { - f, err := openFile(userInput) + f, path, err := openFile(userInput) if err != nil { return false } - defer closeFile(f) + defer log.CloseAndLogError(f, path) mType, err := mimetype.DetectReader(f) if err != nil { @@ -185,3 +173,7 @@ func isAncestorOfMimetype(mType *mimetype.MIME, expected string) bool { func explicitlySpecifyingSBOM(userInput string) bool { return strings.HasPrefix(userInput, "sbom:") } + +func explicitlySpecifyingPurlList(userInput string) bool { + return strings.HasPrefix(userInput, purlInputPrefix) +} diff --git a/grype/pkg/syft_sbom_provider_test.go b/grype/pkg/syft_sbom_provider_test.go index 2d2e48614d1..5d18d06570c 100644 --- a/grype/pkg/syft_sbom_provider_test.go +++ b/grype/pkg/syft_sbom_provider_test.go @@ -1,15 +1,19 @@ package pkg import ( + "slices" "strings" "testing" "github.com/go-test/deep" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/anchore/grype/grype/distro" "github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/linux" + "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -219,8 +223,8 @@ func TestParseSyftJSON(t *testing.T) { }, }, }, - Distro: &linux.Release{ - Name: "alpine", + Distro: &distro.Distro{ + Type: "alpine", Version: "3.12.0", }, }, @@ -337,9 +341,184 @@ var springImageTestCase = struct { RepoDigests: []string{"springio/gs-spring-boot-docker@sha256:39c2ffc784f5f34862e22c1f2ccdbcb62430736114c13f60111eabdb79decb08"}, }, }, - Distro: &linux.Release{ - Name: "debian", + Distro: &distro.Distro{ + Type: "debian", Version: "9", }, }, } + +func Test_PurlList(t *testing.T) { + tests := []struct { + name string + userInput string + context Context + pkgs []Package + wantErr require.ErrorAssertionFunc + }{ + { + name: "takes multiple purls", + userInput: "purl:test-fixtures/purl/valid-purl.txt", + context: Context{ + Distro: &distro.Distro{ + Type: "debian", + IDLike: []string{"debian"}, + Version: "8", + }, + Source: &source.Description{ + Metadata: SBOMFileMetadata{ + Path: "test-fixtures/purl/valid-purl.txt", + }, + }, + }, + pkgs: []Package{ + { + Name: "ant", + Version: "1.10.8", + Type: pkg.JavaPkg, + PURL: "pkg:maven/org.apache.ant/ant@1.10.8", + Metadata: JavaMetadata{ + PomArtifactID: "ant", + PomGroupID: "org.apache.ant", + }, + }, + { + Name: "log4j-core", + Version: "2.14.1", + Type: pkg.JavaPkg, + PURL: "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1", + Metadata: JavaMetadata{ + PomArtifactID: "log4j-core", + PomGroupID: "org.apache.logging.log4j", + }, + }, + { + Name: "sysv-rc", + Version: "2.88dsf-59", + Type: pkg.DebPkg, + PURL: "pkg:deb/debian/sysv-rc@2.88dsf-59?arch=all&distro=debian-8&upstream=sysvinit", + Distro: &distro.Distro{Type: distro.Debian, Version: "8", Codename: "", IDLike: []string{"debian"}}, + Upstreams: []UpstreamPackage{ + { + Name: "sysvinit", + }, + }, + }, + }, + }, + { + name: "infer context when distro is present for multiple similar purls", + userInput: "purl:test-fixtures/purl/homogeneous-os.txt", + context: Context{ + Distro: &distro.Distro{ + Type: "alpine", + IDLike: []string{"alpine"}, + Version: "3.20.3", + }, + Source: &source.Description{ + Metadata: SBOMFileMetadata{ + Path: "test-fixtures/purl/homogeneous-os.txt", + }, + }, + }, + pkgs: []Package{ + { + Name: "openssl", + Version: "3.2.1", + Type: pkg.ApkPkg, + PURL: "pkg:apk/openssl@3.2.1?arch=aarch64&distro=alpine-3.20.3", + Distro: &distro.Distro{Type: distro.Alpine, Version: "3.20.3", Codename: "", IDLike: []string{"alpine"}}, + }, + { + Name: "curl", + Version: "7.61.1", + Type: pkg.ApkPkg, + PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.3", + Distro: &distro.Distro{Type: distro.Alpine, Version: "3.20.3", Codename: "", IDLike: []string{"alpine"}}, + }, + }, + }, + { + name: "different distro info in purls does not infer context", + userInput: "purl:test-fixtures/purl/different-os.txt", + context: Context{ + // important: no distro info inferred + Source: &source.Description{ + Metadata: SBOMFileMetadata{ + Path: "test-fixtures/purl/different-os.txt", + }, + }, + }, + pkgs: []Package{ + { + Name: "openssl", + Version: "3.2.1", + Type: pkg.ApkPkg, + PURL: "pkg:apk/openssl@3.2.1?arch=aarch64&distro=alpine-3.20.3", + Distro: &distro.Distro{Type: distro.Alpine, Version: "3.20.3", Codename: "", IDLike: []string{"alpine"}}, + }, + { + Name: "curl", + Version: "7.61.1", + Type: pkg.ApkPkg, + PURL: "pkg:apk/curl@7.61.1?arch=aarch64&distro=alpine-3.20.2", + Distro: &distro.Distro{Type: distro.Alpine, Version: "3.20.2", Codename: "", IDLike: []string{"alpine"}}, + }, + }, + }, + { + name: "fails on path with nonexistant file", + userInput: "purl:tttt/empty.txt", + wantErr: require.Error, + }, + { + name: "fails on invalid path", + userInput: "purl:~&&", + wantErr: require.Error, + }, + { + name: "fails for empty purl file", + userInput: "purl:test-fixtures/purl/empty.json", + wantErr: require.Error, + }, + { + name: "fails on invalid purl in file", + userInput: "purl:test-fixtures/purl/invalid-purl.txt", + wantErr: require.Error, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + if tc.wantErr == nil { + tc.wantErr = require.NoError + } + + packages, ctx, _, err := Provide(tc.userInput, ProviderConfig{}) + + tc.wantErr(t, err) + if err != nil { + require.Nil(t, packages) + return + } + + if d := cmp.Diff(tc.context, ctx, diffOpts...); d != "" { + t.Errorf("unexpected context (-want +got):\n%s", d) + } + require.Len(t, packages, len(tc.pkgs)) + + slices.SortFunc(packages, func(a, b Package) int { + return strings.Compare(a.Name, b.Name) + }) + slices.SortFunc(tc.pkgs, func(a, b Package) int { + return strings.Compare(a.Name, b.Name) + }) + + for idx, expected := range tc.pkgs { + if d := cmp.Diff(expected, packages[idx], diffOpts...); d != "" { + t.Errorf("unexpected context (-want +got):\n%s", d) + } + } + }) + } +} diff --git a/grype/pkg/upstream_package.go b/grype/pkg/upstream_package.go index 73326cda1ac..c841852b0af 100644 --- a/grype/pkg/upstream_package.go +++ b/grype/pkg/upstream_package.go @@ -35,6 +35,7 @@ func UpstreamPackages(p Package) (pkgs []Package) { c.Attributes.Version = u.Version } + // use BindToFmtString because we search against unescaped CPE strings updatedCPEString := strings.ReplaceAll(c.Attributes.BindToFmtString(), p.Name, u.Name) cpeStrings.Add(updatedCPEString) diff --git a/grype/presenter/cyclonedx/presenter_test.go b/grype/presenter/cyclonedx/presenter_test.go index 6c4637e658c..093bb92304e 100644 --- a/grype/presenter/cyclonedx/presenter_test.go +++ b/grype/presenter/cyclonedx/presenter_test.go @@ -3,6 +3,9 @@ package cyclonedx import ( "bytes" "flag" + "fmt" + "os/exec" + "strings" "testing" "github.com/google/go-cmp/cmp" @@ -16,6 +19,73 @@ import ( ) var update = flag.Bool("update", false, "update the *.golden files for cyclonedx presenters") +var validatorImage = "cyclonedx/cyclonedx-cli:0.27.2@sha256:829c9ea8f2104698bc3c1228575bfa495f6cc4ec151329323c013ca94408477f" + +func Test_CycloneDX_Valid(t *testing.T) { + tests := []struct { + name string + scheme internal.SyftSource + }{ + { + name: "json directory", + scheme: internal.DirectorySource, + }, + { + name: "json image", + scheme: internal.ImageSource, + }, + { + name: "xml directory", + scheme: internal.DirectorySource, + }, + { + name: "xml image", + scheme: internal.ImageSource, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + format := strings.Split(tc.name, " ")[0] + var buffer bytes.Buffer + + pb := internal.GeneratePresenterConfig(t, tc.scheme) + + var pres *Presenter + switch format { + case "json": + pres = NewJSONPresenter(pb) + case "xml": + pres = NewXMLPresenter(pb) + default: + t.Fatalf("invalid format: %s", format) + } + + err := pres.Present(&buffer) + require.NoError(t, err) + + contents := buffer.String() + + cmd := exec.Command("docker", "run", "--rm", "-i", "--entrypoint", "/bin/sh", validatorImage, + "-c", fmt.Sprintf("tee &> /dev/null && cyclonedx validate --input-version v1_6 --fail-on-errors --input-format %s", format)) + + out := bytes.Buffer{} + cmd.Stdout = &out + cmd.Stderr = &out + + // pipe to the docker command + cmd.Stdin = strings.NewReader(contents) + + err = cmd.Run() + if err != nil || cmd.ProcessState.ExitCode() != 0 { + // not valid + t.Fatalf("error validating CycloneDX %s document: %s \nBOM:\n%s", format, out.String(), contents) + } + }) + } +} func Test_noTypedNils(t *testing.T) { s := sbom.SBOM{ diff --git a/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterDir.golden b/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterDir.golden index 4c8e813faf5..ea165dcfa68 100644 --- a/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterDir.golden +++ b/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterDir.golden @@ -2,10 +2,10 @@ "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", "bomFormat": "CycloneDX", "specVersion": "1.6", - "serialNumber": "urn:uuid:d78e14f4-ade8-4948-991c-73318b661114", + "serialNumber": "urn:uuid:56b98bde-7bce-4e5a-a184-1a0b80515c18", "version": 1, "metadata": { - "timestamp": "2025-02-27T13:15:13-05:00", + "timestamp": "2025-07-02T13:38:20-04:00", "tools": { "components": [ { @@ -19,16 +19,16 @@ "component": { "bom-ref": "163686ac6e30c752", "type": "file", - "name": "/var/folders/c0/4y79v5k56bz8v34chcmvq2k80000gp/T/TestCycloneDxPresenterDir3112352663/001" + "name": "/var/folders/c0/4y79v5k56bz8v34chcmvq2k80000gp/T/TestCycloneDxPresenterDir86489560/001" } }, "components": [ { - "bom-ref": "9baa2db122fea516", + "bom-ref": "bbb0ba712c2b94ea", "type": "library", "name": "package-1", "version": "1.1.1", - "cpe": "cpe:2.3:a:anchore:engine:0.9.2:*:*:python:*:*:*:*", + "cpe": "cpe:2.3:a:anchore\\:oss:anchore\\/engine:0.9.2:*:*:en:*:*:*:*", "properties": [ { "name": "syft:package:type", @@ -57,7 +57,7 @@ ] }, { - "bom-ref": "pkg:deb/package-2@2.2.2?package-id=7bb53d560434bc7f", + "bom-ref": "pkg:deb/package-2@2.2.2?package-id=74378afe15713625", "type": "library", "name": "package-2", "version": "2.2.2", @@ -73,7 +73,7 @@ } } ], - "cpe": "cpe:2.3:a:anchore:engine:2.2.2:*:*:python:*:*:*:*", + "cpe": "cpe:2.3:a:anchore:engine:2.2.2:*:*:en:*:*:*:*", "purl": "pkg:deb/package-2@2.2.2", "properties": [ { @@ -89,60 +89,75 @@ ], "vulnerabilities": [ { - "bom-ref": "urn:uuid:e47db483-7ab2-4177-a82b-416b5a37bf93", + "bom-ref": "urn:uuid:7f443ac1-d745-42cb-90f0-62b70d423bad", "id": "CVE-1999-0001", - "source": { - "name": "source-1" - }, + "source": {}, "references": [ { "id": "CVE-1999-0001", - "source": { - "name": "source-1" - } + "source": {} } ], "ratings": [ { - "score": 4, + "score": 8.2, "severity": "low", - "method": "CVSSv3", - "vector": "another vector" + "method": "CVSSv31", + "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H" + }, + { + "source": { + "name": "FIRST", + "url": "https://www.first.org/epss/" + }, + "score": 0.03, + "method": "other" } ], - "description": "1999-01 description", "affects": [ { - "ref": "9baa2db122fea516" + "ref": "bbb0ba712c2b94ea" } ] }, { - "bom-ref": "urn:uuid:3e3fbcf0-8dda-41a7-97ed-d1122923bf79", + "bom-ref": "urn:uuid:f97f70c8-a9b3-4aa7-b207-c63daf59b989", "id": "CVE-1999-0002", - "source": { - "name": "source-2" - }, + "source": {}, "references": [ { "id": "CVE-1999-0002", - "source": { - "name": "source-2" - } + "source": {} } ], "ratings": [ { - "score": 1, + "score": 8.5, "severity": "critical", - "method": "CVSSv2", - "vector": "vector" + "method": "CVSSv31", + "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H" + }, + { + "source": { + "name": "FIRST", + "url": "https://www.first.org/epss/" + }, + "score": 0.08, + "method": "other" + }, + { + "source": { + "name": "CISA KEV Catalog", + "url": "https://www.cisa.gov/known-exploited-vulnerabilities-catalog" + }, + "score": 1, + "method": "other", + "justification": "Listed in CISA KEV" } ], - "description": "1999-02 description", "affects": [ { - "ref": "pkg:deb/package-2@2.2.2?package-id=7bb53d560434bc7f" + "ref": "pkg:deb/package-2@2.2.2?package-id=74378afe15713625" } ] } diff --git a/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterImage.golden b/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterImage.golden index f5676aedbb4..2afe0af9a7a 100644 --- a/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterImage.golden +++ b/grype/presenter/cyclonedx/test-fixtures/snapshot/TestCycloneDxPresenterImage.golden @@ -2,10 +2,10 @@ "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", "bomFormat": "CycloneDX", "specVersion": "1.6", - "serialNumber": "urn:uuid:24d4fb73-8881-462c-940a-61b6d37b579d", + "serialNumber": "urn:uuid:1ca232fc-acb3-4b0f-9640-ba68fb1c7452", "version": 1, "metadata": { - "timestamp": "2025-02-27T13:15:13-05:00", + "timestamp": "2025-07-02T13:38:20-04:00", "tools": { "components": [ { @@ -25,11 +25,11 @@ }, "components": [ { - "bom-ref": "9baa2db122fea516", + "bom-ref": "bbb0ba712c2b94ea", "type": "library", "name": "package-1", "version": "1.1.1", - "cpe": "cpe:2.3:a:anchore:engine:0.9.2:*:*:python:*:*:*:*", + "cpe": "cpe:2.3:a:anchore\\:oss:anchore\\/engine:0.9.2:*:*:en:*:*:*:*", "properties": [ { "name": "syft:package:type", @@ -58,7 +58,7 @@ ] }, { - "bom-ref": "pkg:deb/package-2@2.2.2?package-id=7bb53d560434bc7f", + "bom-ref": "pkg:deb/package-2@2.2.2?package-id=74378afe15713625", "type": "library", "name": "package-2", "version": "2.2.2", @@ -74,7 +74,7 @@ } } ], - "cpe": "cpe:2.3:a:anchore:engine:2.2.2:*:*:python:*:*:*:*", + "cpe": "cpe:2.3:a:anchore:engine:2.2.2:*:*:en:*:*:*:*", "purl": "pkg:deb/package-2@2.2.2", "properties": [ { @@ -90,60 +90,75 @@ ], "vulnerabilities": [ { - "bom-ref": "urn:uuid:638bc45b-a838-4017-b690-dc5d8d3cfe8d", + "bom-ref": "urn:uuid:c13f8188-e9ea-47b0-b2ef-d99062917f08", "id": "CVE-1999-0001", - "source": { - "name": "source-1" - }, + "source": {}, "references": [ { "id": "CVE-1999-0001", - "source": { - "name": "source-1" - } + "source": {} } ], "ratings": [ { - "score": 4, + "score": 8.2, "severity": "low", - "method": "CVSSv3", - "vector": "another vector" + "method": "CVSSv31", + "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H" + }, + { + "source": { + "name": "FIRST", + "url": "https://www.first.org/epss/" + }, + "score": 0.03, + "method": "other" } ], - "description": "1999-01 description", "affects": [ { - "ref": "9baa2db122fea516" + "ref": "bbb0ba712c2b94ea" } ] }, { - "bom-ref": "urn:uuid:6592d365-1128-49a0-8a68-570d8188965a", + "bom-ref": "urn:uuid:185ab413-ef21-48ae-acca-a6894c0c22d1", "id": "CVE-1999-0002", - "source": { - "name": "source-2" - }, + "source": {}, "references": [ { "id": "CVE-1999-0002", - "source": { - "name": "source-2" - } + "source": {} } ], "ratings": [ { - "score": 1, + "score": 8.5, "severity": "critical", - "method": "CVSSv2", - "vector": "vector" + "method": "CVSSv31", + "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H" + }, + { + "source": { + "name": "FIRST", + "url": "https://www.first.org/epss/" + }, + "score": 0.08, + "method": "other" + }, + { + "source": { + "name": "CISA KEV Catalog", + "url": "https://www.cisa.gov/known-exploited-vulnerabilities-catalog" + }, + "score": 1, + "method": "other", + "justification": "Listed in CISA KEV" } ], - "description": "1999-02 description", "affects": [ { - "ref": "pkg:deb/package-2@2.2.2?package-id=7bb53d560434bc7f" + "ref": "pkg:deb/package-2@2.2.2?package-id=74378afe15713625" } ] } diff --git a/grype/presenter/cyclonedx/vulnerability.go b/grype/presenter/cyclonedx/vulnerability.go index 6fad2e72e90..9695611cf2a 100644 --- a/grype/presenter/cyclonedx/vulnerability.go +++ b/grype/presenter/cyclonedx/vulnerability.go @@ -105,6 +105,34 @@ func generateCDXRatings(metadata models.VulnerabilityMetadata) []cyclonedx.Vulne }) } + // Add EPSS score if available + if len(metadata.EPSS) > 0 { + epssScore := metadata.EPSS[0].EPSS + + ratings = append(ratings, cyclonedx.VulnerabilityRating{ + Method: cyclonedx.ScoringMethod("EPSS"), + Score: &epssScore, + Source: &cyclonedx.Source{ + Name: "FIRST", + URL: "https://www.first.org/epss/", + }, + }) + } + // Add KEV indication if available + if len(metadata.KnownExploited) > 0 { + kevScore := 1.0 + + ratings = append(ratings, cyclonedx.VulnerabilityRating{ + Method: cyclonedx.ScoringMethodOther, + Score: &kevScore, + Source: &cyclonedx.Source{ + Name: "CISA KEV Catalog", + URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog", + }, + Justification: "Listed in CISA KEV", + }) + } + return ratings } diff --git a/grype/presenter/cyclonedx/vulnerability_test.go b/grype/presenter/cyclonedx/vulnerability_test.go index b734f15a22b..8a5a16dedcc 100644 --- a/grype/presenter/cyclonedx/vulnerability_test.go +++ b/grype/presenter/cyclonedx/vulnerability_test.go @@ -145,3 +145,49 @@ func TestNewVulnerability_AlwaysIncludesSeverity(t *testing.T) { }) } } + +func TestNewVulnerability_IncludesEPSSAndKEV(t *testing.T) { + match := models.Match{ + Vulnerability: models.Vulnerability{ + VulnerabilityMetadata: models.VulnerabilityMetadata{ + ID: "CVE-2025-0001", + Severity: "High", + EPSS: []models.EPSS{ + { + EPSS: 0.87, + }, + }, + KnownExploited: []models.KnownExploited{ + { + KnownRansomwareCampaignUse: "known", + }, + }, + }, + }, + Artifact: models.Package{}, + MatchDetails: nil, + } + + vuln, err := NewVulnerability(match) + require.NoError(t, err) + + ratings := *vuln.Ratings + require.Len(t, ratings, 3, "should include 1 CVSS + 1 EPSS + 1 KEV rating") + + var foundEPSS, foundKEV bool + for _, r := range ratings { + if r.Method == "EPSS" { + foundEPSS = true + assert.NotNil(t, r.Score) + assert.InDelta(t, 0.87, *r.Score, 0.001) + assert.Equal(t, "FIRST", r.Source.Name) + } + if r.Method == "other" && r.Source != nil && r.Source.Name == "CISA KEV Catalog" { + foundKEV = true + assert.NotNil(t, r.Score) + assert.Equal(t, 1.0, *r.Score) + } + } + assert.True(t, foundEPSS, "should include EPSS rating") + assert.True(t, foundKEV, "should include KEV rating") +} diff --git a/grype/presenter/internal/test_helpers.go b/grype/presenter/internal/test_helpers.go index 8c3e8fb12cc..61581c6d6ec 100644 --- a/grype/presenter/internal/test_helpers.go +++ b/grype/presenter/internal/test_helpers.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/clio" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/pkg" "github.com/anchore/grype/grype/presenter/models" @@ -15,7 +16,6 @@ import ( "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" syftSource "github.com/anchore/syft/syft/source" @@ -99,7 +99,7 @@ func Redact(s []byte) []byte { return s } -func generateMatches(t *testing.T, p1, p2 pkg.Package) match.Matches { +func generateMatches(t *testing.T, p1, p2 pkg.Package) match.Matches { // nolint:funlen t.Helper() matches := []match.Match{ @@ -114,6 +114,29 @@ func generateMatches(t *testing.T, p1, p2 pkg.Package) match.Matches { Versions: []string{"1.2.1", "2.1.3", "3.4.0"}, State: vulnerability.FixStateFixed, }, + Metadata: &vulnerability.Metadata{ + ID: "CVE-1999-0001", + Severity: "Low", + Cvss: []vulnerability.Cvss{ + { + Source: "nvd", + Type: "CVSS", + Version: "3.1", + Vector: "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H", + Metrics: vulnerability.CvssMetrics{ + BaseScore: 8.2, + }, + }, + }, + KnownExploited: nil, + EPSS: []vulnerability.EPSS{ + { + CVE: "CVE-1999-0001", + EPSS: 0.03, + Percentile: 0.42, + }, + }, + }, }, Package: p1, Details: []match.Detail{ @@ -139,6 +162,34 @@ func generateMatches(t *testing.T, p1, p2 pkg.Package) match.Matches { ID: "CVE-1999-0002", Namespace: "source-2", }, + Metadata: &vulnerability.Metadata{ + ID: "CVE-1999-0002", + Severity: "Critical", + Cvss: []vulnerability.Cvss{ + { + Source: "nvd", + Type: "CVSS", + Version: "3.1", + Vector: "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H", + Metrics: vulnerability.CvssMetrics{ + BaseScore: 8.5, + }, + }, + }, + KnownExploited: []vulnerability.KnownExploited{ + { + CVE: "CVE-1999-0002", + KnownRansomwareCampaignUse: "Known", + }, + }, + EPSS: []vulnerability.EPSS{ + { + CVE: "CVE-1999-0002", + EPSS: 0.08, + Percentile: 0.53, + }, + }, + }, }, Package: p2, Details: []match.Detail{ @@ -173,6 +224,29 @@ func generateIgnoredMatches(t *testing.T, p pkg.Package) []match.IgnoredMatch { ID: "CVE-1999-0001", Namespace: "source-1", }, + Metadata: &vulnerability.Metadata{ + ID: "CVE-1999-0001", + Severity: "Low", + Cvss: []vulnerability.Cvss{ + { + Source: "nvd", + Type: "CVSS", + Version: "3.1", + Vector: "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H", + Metrics: vulnerability.CvssMetrics{ + BaseScore: 8.2, + }, + }, + }, + KnownExploited: nil, + EPSS: []vulnerability.EPSS{ + { + CVE: "CVE-1999-0001", + EPSS: 0.03, + Percentile: 0.42, + }, + }, + }, }, Package: p, Details: []match.Detail{ @@ -200,6 +274,34 @@ func generateIgnoredMatches(t *testing.T, p pkg.Package) []match.IgnoredMatch { ID: "CVE-1999-0002", Namespace: "source-2", }, + Metadata: &vulnerability.Metadata{ + ID: "CVE-1999-0002", + Severity: "Critical", + Cvss: []vulnerability.Cvss{ + { + Source: "nvd", + Type: "CVSS", + Version: "3.1", + Vector: "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H", + Metrics: vulnerability.CvssMetrics{ + BaseScore: 8.5, + }, + }, + }, + KnownExploited: []vulnerability.KnownExploited{ + { + CVE: "CVE-1999-0002", + KnownRansomwareCampaignUse: "Known", + }, + }, + EPSS: []vulnerability.EPSS{ + { + CVE: "CVE-1999-0002", + EPSS: 0.08, + Percentile: 0.53, + }, + }, + }, }, Package: p, Details: []match.Detail{ @@ -224,6 +326,28 @@ func generateIgnoredMatches(t *testing.T, p pkg.Package) []match.IgnoredMatch { ID: "CVE-1999-0004", Namespace: "source-2", }, + Metadata: &vulnerability.Metadata{ + ID: "CVE-1999-0004", + Severity: "High", + Cvss: []vulnerability.Cvss{ + { + Source: "nvd", + Type: "CVSS", + Version: "3.1", + Vector: "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:L/A:L", + Metrics: vulnerability.CvssMetrics{ + BaseScore: 7.2, + }, + }, + }, + EPSS: []vulnerability.EPSS{ + { + CVE: "CVE-1999-0004", + EPSS: 0.03, + Percentile: 0.75, + }, + }, + }, }, Package: p, Details: []match.Detail{ @@ -266,10 +390,10 @@ func generatePackages(t *testing.T) []syftPkg.Package { { Attributes: cpe.Attributes{ Part: "a", - Vendor: "anchore", - Product: "engine", + Vendor: "anchore:oss", + Product: "anchore/engine", Version: "0.9.2", - Language: "python", + Language: "en", }, }, }, @@ -291,7 +415,7 @@ func generatePackages(t *testing.T) []syftPkg.Package { Vendor: "anchore", Product: "engine", Version: "2.2.2", - Language: "python", + Language: "en", }, }, }, @@ -382,8 +506,8 @@ func generateContext(t *testing.T, scheme SyftSource) pkg.Context { return pkg.Context{ Source: &desc, - Distro: &linux.Release{ - Name: "centos", + Distro: &distro.Distro{ + Type: "centos", IDLike: []string{ "centos", }, diff --git a/grype/presenter/json/presenter.go b/grype/presenter/json/presenter.go index 03d46c67042..27ca2a912d5 100644 --- a/grype/presenter/json/presenter.go +++ b/grype/presenter/json/presenter.go @@ -4,25 +4,18 @@ import ( "encoding/json" "io" - "github.com/anchore/clio" "github.com/anchore/grype/grype/presenter/models" ) type Presenter struct { - id clio.Identification - document models.Document - appConfig interface{} - dbStatus interface{} - pretty bool + document models.Document + pretty bool } func NewPresenter(pb models.PresenterConfig) *Presenter { return &Presenter{ - id: pb.ID, - document: pb.Document, - appConfig: pb.AppConfig, - dbStatus: pb.DBStatus, - pretty: pb.Pretty, + document: pb.Document, + pretty: pb.Pretty, } } diff --git a/grype/presenter/json/presenter_test.go b/grype/presenter/json/presenter_test.go index 3910b55ce9e..dc2929ae518 100644 --- a/grype/presenter/json/presenter_test.go +++ b/grype/presenter/json/presenter_test.go @@ -12,11 +12,11 @@ import ( "github.com/anchore/clio" "github.com/anchore/go-testutils" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/pkg" "github.com/anchore/grype/grype/presenter/internal" "github.com/anchore/grype/grype/presenter/models" - "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/source" ) @@ -85,8 +85,8 @@ func TestEmptyJsonPresenter(t *testing.T) { ctx := pkg.Context{ Source: &source.Description{}, - Distro: &linux.Release{ - ID: "centos", + Distro: &distro.Distro{ + Type: "centos", IDLike: []string{"rhel"}, Version: "8.0", }, diff --git a/grype/presenter/json/test-fixtures/snapshot/TestJsonDirsPresenter.golden b/grype/presenter/json/test-fixtures/snapshot/TestJsonDirsPresenter.golden index 05f412100c6..db14134e30b 100644 --- a/grype/presenter/json/test-fixtures/snapshot/TestJsonDirsPresenter.golden +++ b/grype/presenter/json/test-fixtures/snapshot/TestJsonDirsPresenter.golden @@ -4,20 +4,28 @@ "vulnerability": { "id": "CVE-1999-0001", "dataSource": "", - "namespace": "source-1", "severity": "Low", "urls": [], - "description": "1999-01 description", "cvss": [ { - "version": "3.0", - "vector": "another vector", + "source": "nvd", + "type": "CVSS", + "version": "3.1", + "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H", "metrics": { - "baseScore": 4 + "baseScore": 8.2 }, "vendorMetadata": {} } ], + "epss": [ + { + "cve": "CVE-1999-0001", + "epss": 0.03, + "percentile": 0.42, + "date": "0001-01-01" + } + ], "fix": { "versions": [ "1.2.1", @@ -26,7 +34,8 @@ ], "state": "fixed" }, - "advisories": [] + "advisories": [], + "risk": 1.68 }, "relatedVulnerabilities": [], "matchDetails": [ @@ -48,7 +57,7 @@ } ], "artifact": { - "id": "9baa2db122fea516", + "id": "bbb0ba712c2b94ea", "name": "package-1", "version": "1.1.1", "type": "rpm", @@ -61,7 +70,7 @@ "language": "", "licenses": [], "cpes": [ - "cpe:2.3:a:anchore:engine:0.9.2:*:*:python:*:*:*:*" + "cpe:2.3:a:anchore\\:oss:anchore\\/engine:0.9.2:*:*:en:*:*:*:*" ], "purl": "", "upstreams": [], @@ -76,30 +85,40 @@ "vulnerability": { "id": "CVE-1999-0002", "dataSource": "", - "namespace": "source-2", "severity": "Critical", "urls": [], - "description": "1999-02 description", "cvss": [ { - "version": "2.0", - "vector": "vector", + "source": "nvd", + "type": "CVSS", + "version": "3.1", + "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H", "metrics": { - "baseScore": 1, - "exploitabilityScore": 2, - "impactScore": 3 + "baseScore": 8.5 }, - "vendorMetadata": { - "BaseSeverity": "Low", - "Status": "verified" - } + "vendorMetadata": {} + } + ], + "knownExploited": [ + { + "cve": "CVE-1999-0002", + "knownRansomwareCampaignUse": "Known" + } + ], + "epss": [ + { + "cve": "CVE-1999-0002", + "epss": 0.08, + "percentile": 0.53, + "date": "0001-01-01" } ], "fix": { "versions": [], "state": "" }, - "advisories": [] + "advisories": [], + "risk": 96.25000000000001 }, "relatedVulnerabilities": [], "matchDetails": [ @@ -115,7 +134,7 @@ } ], "artifact": { - "id": "7bb53d560434bc7f", + "id": "74378afe15713625", "name": "package-2", "version": "2.2.2", "type": "deb", @@ -131,7 +150,7 @@ "MIT" ], "cpes": [ - "cpe:2.3:a:anchore:engine:2.2.2:*:*:python:*:*:*:*" + "cpe:2.3:a:anchore:engine:2.2.2:*:*:en:*:*:*:*" ], "purl": "pkg:deb/package-2@2.2.2", "upstreams": [] diff --git a/grype/presenter/json/test-fixtures/snapshot/TestJsonImgsPresenter.golden b/grype/presenter/json/test-fixtures/snapshot/TestJsonImgsPresenter.golden index adb4ab0de50..1e740f2a456 100644 --- a/grype/presenter/json/test-fixtures/snapshot/TestJsonImgsPresenter.golden +++ b/grype/presenter/json/test-fixtures/snapshot/TestJsonImgsPresenter.golden @@ -4,20 +4,28 @@ "vulnerability": { "id": "CVE-1999-0001", "dataSource": "", - "namespace": "source-1", "severity": "Low", "urls": [], - "description": "1999-01 description", "cvss": [ { - "version": "3.0", - "vector": "another vector", + "source": "nvd", + "type": "CVSS", + "version": "3.1", + "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H", "metrics": { - "baseScore": 4 + "baseScore": 8.2 }, "vendorMetadata": {} } ], + "epss": [ + { + "cve": "CVE-1999-0001", + "epss": 0.03, + "percentile": 0.42, + "date": "0001-01-01" + } + ], "fix": { "versions": [ "1.2.1", @@ -26,7 +34,8 @@ ], "state": "fixed" }, - "advisories": [] + "advisories": [], + "risk": 1.68 }, "relatedVulnerabilities": [], "matchDetails": [ @@ -48,7 +57,7 @@ } ], "artifact": { - "id": "9baa2db122fea516", + "id": "bbb0ba712c2b94ea", "name": "package-1", "version": "1.1.1", "type": "rpm", @@ -61,7 +70,7 @@ "language": "", "licenses": [], "cpes": [ - "cpe:2.3:a:anchore:engine:0.9.2:*:*:python:*:*:*:*" + "cpe:2.3:a:anchore\\:oss:anchore\\/engine:0.9.2:*:*:en:*:*:*:*" ], "purl": "", "upstreams": [], @@ -76,30 +85,40 @@ "vulnerability": { "id": "CVE-1999-0002", "dataSource": "", - "namespace": "source-2", "severity": "Critical", "urls": [], - "description": "1999-02 description", "cvss": [ { - "version": "2.0", - "vector": "vector", + "source": "nvd", + "type": "CVSS", + "version": "3.1", + "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H", "metrics": { - "baseScore": 1, - "exploitabilityScore": 2, - "impactScore": 3 + "baseScore": 8.5 }, - "vendorMetadata": { - "BaseSeverity": "Low", - "Status": "verified" - } + "vendorMetadata": {} + } + ], + "knownExploited": [ + { + "cve": "CVE-1999-0002", + "knownRansomwareCampaignUse": "Known" + } + ], + "epss": [ + { + "cve": "CVE-1999-0002", + "epss": 0.08, + "percentile": 0.53, + "date": "0001-01-01" } ], "fix": { "versions": [], "state": "" }, - "advisories": [] + "advisories": [], + "risk": 96.25000000000001 }, "relatedVulnerabilities": [], "matchDetails": [ @@ -115,7 +134,7 @@ } ], "artifact": { - "id": "7bb53d560434bc7f", + "id": "74378afe15713625", "name": "package-2", "version": "2.2.2", "type": "deb", @@ -131,7 +150,7 @@ "MIT" ], "cpes": [ - "cpe:2.3:a:anchore:engine:2.2.2:*:*:python:*:*:*:*" + "cpe:2.3:a:anchore:engine:2.2.2:*:*:en:*:*:*:*" ], "purl": "pkg:deb/package-2@2.2.2", "upstreams": [] diff --git a/grype/presenter/models/descriptor.go b/grype/presenter/models/descriptor.go index 33cfb6ad2ef..e604fb1142f 100644 --- a/grype/presenter/models/descriptor.go +++ b/grype/presenter/models/descriptor.go @@ -2,9 +2,9 @@ package models // descriptor describes what created the document as well as surrounding metadata type descriptor struct { - Name string `json:"name"` - Version string `json:"version"` - Configuration interface{} `json:"configuration,omitempty"` - VulnerabilityDBStatus interface{} `json:"db,omitempty"` - Timestamp string `json:"timestamp"` + Name string `json:"name"` + Version string `json:"version"` + Configuration any `json:"configuration,omitempty"` + DB any `json:"db,omitempty"` + Timestamp string `json:"timestamp"` } diff --git a/grype/presenter/models/distribution.go b/grype/presenter/models/distribution.go index 7e5bf242e39..a8b9d792afa 100644 --- a/grype/presenter/models/distribution.go +++ b/grype/presenter/models/distribution.go @@ -2,8 +2,6 @@ package models import ( "github.com/anchore/grype/grype/distro" - "github.com/anchore/grype/internal/log" - "github.com/anchore/syft/syft/linux" ) // distribution provides information about a detected Linux distribution. @@ -14,27 +12,14 @@ type distribution struct { } // newDistribution creates a struct with the Linux distribution to be represented in JSON. -func newDistribution(r *linux.Release) distribution { - if r == nil { +func newDistribution(d *distro.Distro) distribution { + if d == nil { return distribution{} } - // attempt to use the strong distro type (like the matchers do) - d, err := distro.NewFromRelease(*r) - if err != nil { - log.Warnf("unable to determine linux distribution: %+v", err) - - // as a fallback use the raw release information - return distribution{ - Name: r.ID, - Version: r.VersionID, - IDLike: cleanIDLike(r.IDLike), - } - } - return distribution{ Name: d.Name(), - Version: d.FullVersion(), + Version: d.Version, IDLike: cleanIDLike(d.IDLike), } } diff --git a/grype/presenter/models/document.go b/grype/presenter/models/document.go index 93dbadcaada..068486d1eb1 100644 --- a/grype/presenter/models/document.go +++ b/grype/presenter/models/document.go @@ -20,7 +20,7 @@ type Document struct { } // NewDocument creates and populates a new Document struct, representing the populated JSON document. -func NewDocument(id clio.Identification, packages []pkg.Package, context pkg.Context, matches match.Matches, ignoredMatches []match.IgnoredMatch, metadataProvider vulnerability.MetadataProvider, appConfig any, dbStatus any, strategy SortStrategy) (Document, error) { +func NewDocument(id clio.Identification, packages []pkg.Package, context pkg.Context, matches match.Matches, ignoredMatches []match.IgnoredMatch, metadataProvider vulnerability.MetadataProvider, appConfig any, dbInfo any, strategy SortStrategy) (Document, error) { timestamp, timestampErr := time.Now().Local().MarshalText() if timestampErr != nil { return Document{}, timestampErr @@ -78,11 +78,11 @@ func NewDocument(id clio.Identification, packages []pkg.Package, context pkg.Con Source: src, Distro: newDistribution(context.Distro), Descriptor: descriptor{ - Name: id.Name, - Version: id.Version, - Configuration: appConfig, - VulnerabilityDBStatus: dbStatus, - Timestamp: string(timestamp), + Name: id.Name, + Version: id.Version, + Configuration: appConfig, + DB: dbInfo, + Timestamp: string(timestamp), }, }, nil } diff --git a/grype/presenter/models/document_test.go b/grype/presenter/models/document_test.go index d81f0dc144f..0cb95916935 100644 --- a/grype/presenter/models/document_test.go +++ b/grype/presenter/models/document_test.go @@ -7,10 +7,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/clio" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/pkg" "github.com/anchore/grype/grype/vulnerability" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" syftSource "github.com/anchore/syft/syft/source" ) @@ -75,8 +75,8 @@ func TestPackagesAreSorted(t *testing.T) { Source: &syftSource.Description{ Metadata: syftSource.DirectoryMetadata{}, }, - Distro: &linux.Release{ - ID: "centos", + Distro: &distro.Distro{ + Type: "centos", IDLike: []string{"rhel"}, Version: "8.0", }, @@ -136,8 +136,8 @@ func TestFixSuggestedVersion(t *testing.T) { Source: &syftSource.Description{ Metadata: syftSource.DirectoryMetadata{}, }, - Distro: &linux.Release{ - ID: "centos", + Distro: &distro.Distro{ + Type: "centos", IDLike: []string{"rhel"}, Version: "8.0", }, diff --git a/grype/presenter/models/match.go b/grype/presenter/models/match.go index a90ba7c0d11..b7c4f9b3004 100644 --- a/grype/presenter/models/match.go +++ b/grype/presenter/models/match.go @@ -113,8 +113,10 @@ func calculateSuggestedFixedVersion(p pkg.Package, fixedVersions []string, forma } sort.SliceStable(fixedVersions, func(i, j int) bool { - v1, err1 := version.NewVersion(fixedVersions[i], format) - v2, err2 := version.NewVersion(fixedVersions[j], format) + v1 := version.NewVersion(fixedVersions[i], format) + v2 := version.NewVersion(fixedVersions[j], format) + err1 := v1.Validate() + err2 := v2.Validate() if err1 != nil || err2 != nil { log.WithFields("package", p.Name).Trace("error while parsing version for sorting") return false diff --git a/grype/presenter/models/package.go b/grype/presenter/models/package.go index 47cd920e27a..ec545bb79aa 100644 --- a/grype/presenter/models/package.go +++ b/grype/presenter/models/package.go @@ -31,7 +31,8 @@ type UpstreamPackage struct { func newPackage(p pkg.Package) Package { var cpes = make([]string, 0) for _, c := range p.CPEs { - cpes = append(cpes, c.Attributes.BindToFmtString()) + // use .String() to ensure proper escaping + cpes = append(cpes, c.Attributes.String()) } licenses := p.Licenses diff --git a/grype/presenter/models/presenter_bundle.go b/grype/presenter/models/presenter_bundle.go index cdd99b89cbc..106e963592c 100644 --- a/grype/presenter/models/presenter_bundle.go +++ b/grype/presenter/models/presenter_bundle.go @@ -6,10 +6,8 @@ import ( ) type PresenterConfig struct { - ID clio.Identification - Document Document - SBOM *sbom.SBOM - AppConfig interface{} - DBStatus interface{} - Pretty bool + ID clio.Identification + Document Document + SBOM *sbom.SBOM + Pretty bool } diff --git a/grype/presenter/models/sort.go b/grype/presenter/models/sort.go index 8bb8ee10ab9..f1c5895e44f 100644 --- a/grype/presenter/models/sort.go +++ b/grype/presenter/models/sort.go @@ -10,13 +10,18 @@ import ( type SortStrategy string const ( - SortByPackage SortStrategy = "package" + SortByPackage SortStrategy = "package" + SortBySeverity SortStrategy = "severity" + SortByThreat SortStrategy = "epss" + SortByRisk SortStrategy = "risk" + SortByKEV SortStrategy = "kev" + SortByVulnerability SortStrategy = "vulnerability" - defaultSortStrategy = SortByPackage + DefaultSortStrategy = SortByRisk ) func SortStrategies() []SortStrategy { - return []SortStrategy{SortByPackage} + return []SortStrategy{SortByPackage, SortBySeverity, SortByThreat, SortByRisk, SortByKEV, SortByVulnerability} } func (s SortStrategy) String() string { @@ -37,14 +42,85 @@ type sortStrategyImpl []compareFunc // matchSortStrategy provides predefined sort strategies for Match var matchSortStrategy = map[SortStrategy]sortStrategyImpl{ SortByPackage: { - compareByPackageName, - compareByPackageVersion, - compareByPackageType, + comparePackageAttributes, + compareVulnerabilityAttributes, + }, + SortByVulnerability: { + compareVulnerabilityAttributes, + comparePackageAttributes, + }, + SortBySeverity: { + // severity and tangential attributes... + compareBySeverity, + compareByRisk, + compareByEPSSPercentile, + // followed by package attributes... + comparePackageAttributes, + // followed by the remaining vulnerability attributes... + compareByVulnerabilityID, + }, + SortByThreat: { + // epss and tangential attributes... + compareByEPSSPercentile, + compareByRisk, + compareBySeverity, + // followed by package attributes... + comparePackageAttributes, + // followed by the remaining vulnerability attributes... + compareByVulnerabilityID, + }, + SortByRisk: { + // risk and tangential attributes... + compareByRisk, + compareBySeverity, + compareByEPSSPercentile, + // followed by package attributes... + comparePackageAttributes, + // followed by the remaining vulnerability attributes... + compareByVulnerabilityID, + }, + SortByKEV: { + compareByKEV, + // risk and tangential attributes... + compareByRisk, compareBySeverity, + compareByEPSSPercentile, + // followed by package attributes... + comparePackageAttributes, + // followed by the remaining vulnerability attributes... compareByVulnerabilityID, }, } +func compareVulnerabilityAttributes(a, b Match) int { + return combine( + compareByVulnerabilityID, + compareByRisk, + compareBySeverity, + compareByEPSSPercentile, + )(a, b) +} + +func comparePackageAttributes(a, b Match) int { + return combine( + compareByPackageName, + compareByPackageVersion, + compareByPackageType, + )(a, b) +} + +func combine(impls ...compareFunc) compareFunc { + return func(a, b Match) int { + for _, impl := range impls { + result := impl(a, b) + if result != 0 { + return result + } + } + return 0 + } +} + // SortMatches sorts matches based on a strategy name func SortMatches(matches []Match, strategyName SortStrategy) { sortWithStrategy(matches, getSortStrategy(strategyName)) @@ -53,8 +129,8 @@ func SortMatches(matches []Match, strategyName SortStrategy) { func getSortStrategy(strategyName SortStrategy) sortStrategyImpl { strategy, exists := matchSortStrategy[strategyName] if !exists { - log.WithFields("strategy", strategyName).Debugf("unknown sort strategy, falling back to default of %q", defaultSortStrategy) - strategy = matchSortStrategy[defaultSortStrategy] + log.WithFields("strategy", strategyName).Debugf("unknown sort strategy, falling back to default of %q", DefaultSortStrategy) + strategy = matchSortStrategy[DefaultSortStrategy] } return strategy } @@ -87,8 +163,22 @@ func compareByVulnerabilityID(a, b Match) int { } func compareBySeverity(a, b Match) int { - aScore := severityScore(a.Vulnerability.Severity) - bScore := severityScore(b.Vulnerability.Severity) + aScore := severityPriority(a.Vulnerability.Severity) + bScore := severityPriority(b.Vulnerability.Severity) + + switch { + case aScore < bScore: // higher severity first + return -1 + case aScore > bScore: + return 1 + default: + return 0 + } +} + +func compareByEPSSPercentile(a, b Match) int { + aScore := epssPercentile(a.Vulnerability.EPSS) + bScore := epssPercentile(b.Vulnerability.EPSS) switch { case aScore > bScore: // higher severity first @@ -142,20 +232,61 @@ func compareByPackageType(a, b Match) int { } } -// severityScore maps severity strings to numeric scores for comparison -func severityScore(severity string) int { +func compareByRisk(a, b Match) int { + aRisk := a.Vulnerability.Risk + bRisk := b.Vulnerability.Risk + + switch { + case aRisk > bRisk: + return -1 + case aRisk < bRisk: + return 1 + default: + return 0 + } +} + +func compareByKEV(a, b Match) int { + aKEV := len(a.Vulnerability.KnownExploited) + bKEV := len(b.Vulnerability.KnownExploited) + + switch { + case aKEV > bKEV: + return -1 + case aKEV < bKEV: + return 1 + default: + return 0 + } +} + +func epssPercentile(es []EPSS) float64 { + switch len(es) { + case 0: + return 0.0 + case 1: + return es[0].Percentile + } + sort.Slice(es, func(i, j int) bool { + return es[i].Percentile > es[j].Percentile + }) + return es[0].Percentile +} + +// severityPriority maps severity strings to numeric priority for comparison (the lowest value is most severe) +func severityPriority(severity string) int { switch strings.ToLower(severity) { case "critical": - return 5 + return 1 case "high": - return 4 + return 2 case "medium": return 3 case "low": - return 2 + return 4 case "negligible": - return 1 + return 5 default: - return 0 + return 100 // least severe } } diff --git a/grype/presenter/models/sort_test.go b/grype/presenter/models/sort_test.go index 478550fcab4..e86ae0c3a83 100644 --- a/grype/presenter/models/sort_test.go +++ b/grype/presenter/models/sort_test.go @@ -4,16 +4,154 @@ import ( "testing" "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestSortMatches(t *testing.T) { - matches := []Match{ +func TestSortStrategies(t *testing.T) { + strategies := SortStrategies() + expected := []SortStrategy{ + SortByPackage, + SortBySeverity, + SortByThreat, + SortByRisk, + SortByKEV, + SortByVulnerability, + } + assert.Equal(t, expected, strategies) +} + +func TestSortStrategyString(t *testing.T) { + assert.Equal(t, "package", SortByPackage.String()) + assert.Equal(t, "severity", SortBySeverity.String()) + assert.Equal(t, "epss", SortByThreat.String()) + assert.Equal(t, "risk", SortByRisk.String()) + assert.Equal(t, "kev", SortByKEV.String()) + assert.Equal(t, "vulnerability", SortByVulnerability.String()) +} + +func TestGetSortStrategy(t *testing.T) { + tests := []struct { + name string + strategyName SortStrategy + expected bool + }{ + { + name: "Valid strategy", + strategyName: SortByPackage, + expected: true, + }, + { + name: "Invalid strategy", + strategyName: "invalid", + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + strategy := getSortStrategy(tt.strategyName) + validStrategy, _ := matchSortStrategy[tt.strategyName] + + if tt.expected { + require.NotNil(t, strategy) + assert.Equal(t, validStrategy, strategy) + } else { + // Should fallback to default strategy + assert.NotNil(t, strategy) + assert.Equal(t, matchSortStrategy[DefaultSortStrategy], strategy) + } + }) + } +} + +func TestEPSSPercentile(t *testing.T) { + tests := []struct { + name string + epss []EPSS + expected float64 + }{ + { + name: "Empty slice", + epss: []EPSS{}, + expected: 0.0, + }, + { + name: "Single item", + epss: []EPSS{ + {Percentile: 0.75}, + }, + expected: 0.75, + }, + { + name: "Multiple items, already sorted", + epss: []EPSS{ + {Percentile: 0.95}, + {Percentile: 0.75}, + {Percentile: 0.50}, + }, + expected: 0.95, + }, + { + name: "Multiple items, unsorted", + epss: []EPSS{ + {Percentile: 0.50}, + {Percentile: 0.95}, + {Percentile: 0.75}, + }, + expected: 0.95, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := epssPercentile(tt.epss) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestSeverityPriority(t *testing.T) { + tests := []struct { + severity string + expected int + }{ + {"critical", 1}, + {"CRITICAL", 1}, + {"high", 2}, + {"HIGH", 2}, + {"medium", 3}, + {"MEDIUM", 3}, + {"low", 4}, + {"LOW", 4}, + {"negligible", 5}, + {"NEGLIGIBLE", 5}, + {"unknown", 100}, + {"", 100}, + } + + for _, tt := range tests { + t.Run(tt.severity, func(t *testing.T) { + result := severityPriority(tt.severity) + assert.Equal(t, tt.expected, result) + }) + } +} + +func createTestMatches() []Match { + return []Match{ { + // match 0: medium severity, high risk, high EPSS, no KEV Vulnerability: Vulnerability{ VulnerabilityMetadata: VulnerabilityMetadata{ ID: "CVE-2023-1111", Severity: "medium", + EPSS: []EPSS{ + {Percentile: 0.90}, + }, + KnownExploited: []KnownExploited{}, // empty KEV }, + Risk: 75.0, }, Artifact: Package{ Name: "package-b", @@ -22,11 +160,17 @@ func TestSortMatches(t *testing.T) { }, }, { + // match 1: critical severity, medium risk, medium EPSS, no KEV Vulnerability: Vulnerability{ VulnerabilityMetadata: VulnerabilityMetadata{ ID: "CVE-2023-2222", Severity: "critical", + EPSS: []EPSS{ + {Percentile: 0.70}, + }, + KnownExploited: []KnownExploited{}, // empty KEV }, + Risk: 50.0, }, Artifact: Package{ Name: "package-a", @@ -35,11 +179,19 @@ func TestSortMatches(t *testing.T) { }, }, { + // match 2: high severity, low risk, low EPSS, has KEV Vulnerability: Vulnerability{ VulnerabilityMetadata: VulnerabilityMetadata{ ID: "CVE-2023-3333", Severity: "high", + EPSS: []EPSS{ + {Percentile: 0.30}, + }, + KnownExploited: []KnownExploited{ + {CVE: "CVE-2023-3333", KnownRansomwareCampaignUse: "No"}, + }, // has KEV }, + Risk: 25.0, }, Artifact: Package{ Name: "package-a", @@ -48,11 +200,17 @@ func TestSortMatches(t *testing.T) { }, }, { + // match 3: low severity, very low risk, very low EPSS, no KEV Vulnerability: Vulnerability{ VulnerabilityMetadata: VulnerabilityMetadata{ ID: "CVE-2023-4444", Severity: "low", + EPSS: []EPSS{ + {Percentile: 0.10}, + }, + KnownExploited: []KnownExploited{}, // empty KEV }, + Risk: 10.0, }, Artifact: Package{ Name: "package-c", @@ -61,11 +219,20 @@ func TestSortMatches(t *testing.T) { }, }, { + // match 4: critical severity, very low risk, medium EPSS, has KEV with ransomware Vulnerability: Vulnerability{ VulnerabilityMetadata: VulnerabilityMetadata{ ID: "CVE-2023-5555", Severity: "critical", + EPSS: []EPSS{ + {Percentile: 0.50}, + }, + KnownExploited: []KnownExploited{ + {CVE: "CVE-2023-5555", KnownRansomwareCampaignUse: "Known"}, + {CVE: "CVE-2023-5555", KnownRansomwareCampaignUse: "Known", Product: "Different Product"}, + }, // has multiple KEV entries with ransomware }, + Risk: 5.0, }, Artifact: Package{ Name: "package-a", @@ -74,122 +241,262 @@ func TestSortMatches(t *testing.T) { }, }, } +} - t.Run("SortByPackage", func(t *testing.T) { - testMatches := deepCopyMatches(matches) - SortMatches(testMatches, SortByPackage) - - expected := []Match{ - // package-a with 1.0.0 version, docker type first (alphabetical) - matches[4], // package-a, 1.0.0, docker, critical - matches[2], // package-a, 1.0.0, npm, high - matches[1], // package-a, 2.0.0, docker, critical - matches[0], // package-b, 1.2.0, npm, medium - matches[3], // package-c, 3.1.0, gem, low - } +func TestAllSortStrategies(t *testing.T) { + matches := createTestMatches() - if diff := cmp.Diff(expected, testMatches); diff != "" { - t.Errorf("SortByPackage mismatch (-want +got):\n%s", diff) - } - }) + tests := []struct { + strategy SortStrategy + expected []int // indexes into the original matches slice + }{ + { + strategy: SortByPackage, + expected: []int{4, 2, 1, 0, 3}, // sorted by package name, version, type + }, + { + strategy: SortByVulnerability, + expected: []int{0, 1, 2, 3, 4}, // sorted by vulnerability ID + }, + { + strategy: SortBySeverity, + expected: []int{1, 4, 2, 0, 3}, // sorted by severity: critical, critical, high, medium, low + }, + { + strategy: SortByThreat, + expected: []int{0, 1, 4, 2, 3}, // sorted by EPSS percentile: 0.90, 0.70, 0.50, 0.30, 0.10 + }, + { + strategy: SortByRisk, + expected: []int{0, 1, 2, 3, 4}, // sorted by risk: 75.0, 50.0, 25.0, 10.0, 5.0 + }, + { + strategy: SortByKEV, + expected: []int{4, 2, 0, 1, 3}, // sorted by KEV count: 2, 1, 0, 0, 0 (with ties broken by risk) + }, + } - t.Run("UnknownStrategy", func(t *testing.T) { - testMatches := deepCopyMatches(matches) - // should use default (package) strategy for unknown strategy names - SortMatches(testMatches, "unknown") - - expected := []Match{ - matches[4], // package-a, 1.0.0, docker - matches[2], // package-a, 1.0.0, npm - matches[1], // package-a, 2.0.0, docker - matches[0], // package-b, 1.2.0, npm - matches[3], // package-c, 3.1.0, gem - } + for _, tt := range tests { + t.Run(string(tt.strategy), func(t *testing.T) { + testMatches := deepCopyMatches(matches) + SortMatches(testMatches, tt.strategy) - if diff := cmp.Diff(expected, testMatches); diff != "" { - t.Errorf("Unknown strategy mismatch (-want +got):\n%s", diff) - } - }) + expected := make([]Match, len(tt.expected)) + for i, idx := range tt.expected { + expected[i] = matches[idx] + } + + if diff := cmp.Diff(expected, testMatches); diff != "" { + t.Errorf("%s mismatch (-want +got):\n%s", tt.strategy, diff) + } + }) + } } -func TestEdgeCases(t *testing.T) { - t.Run("EmptySlice", func(t *testing.T) { - matches := []Match{} - // should not panic on empty slice - SortMatches(matches, SortByPackage) +func TestIndividualCompareFunctions(t *testing.T) { + ms := createTestMatches() + m0 := ms[0] // medium severity, high risk, high EPSS, no KEV + m1 := ms[1] // critical severity, medium risk, medium EPSS, no KEV + m2 := ms[2] // high severity, low risk, low EPSS, has KEV + m3 := ms[3] // low severity, very low risk, very low EPSS, no KEV + m4 := ms[4] // critical severity, very low risk, medium EPSS, has KEV with ransomware - expected := []Match{} - if diff := cmp.Diff(expected, matches); diff != "" { - t.Errorf("Empty slice mismatch (-want +got):\n%s", diff) + tests := []struct { + name string + compareFunc compareFunc + pairs []struct { + a, b Match + expected int } + }{ + { + name: "compareByVulnerabilityID", + compareFunc: compareByVulnerabilityID, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m1, -1}, // CVE-2023-1111 < CVE-2023-2222 + {m1, m0, 1}, // CVE-2023-2222 > CVE-2023-1111 + {m0, m0, 0}, // Same ID + }, + }, + { + name: "compareBySeverity", + compareFunc: compareBySeverity, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m1, 1}, // medium > critical + {m1, m0, -1}, // critical < medium + {m1, m4, 0}, // both critical + {m2, m3, -1}, // high < low + }, + }, + { + name: "compareByEPSSPercentile", + compareFunc: compareByEPSSPercentile, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m1, -1}, // 0.90 > 0.70 + {m1, m0, 1}, // 0.70 < 0.90 + {m1, m4, -1}, // 0.70 > 0.50 + {m4, m1, 1}, // 0.50 < 0.70 + }, + }, + { + name: "compareByPackageName", + compareFunc: compareByPackageName, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m1, 1}, // package-b > package-a + {m1, m0, -1}, // package-a < package-b + {m1, m2, 0}, // both package-a + }, + }, + { + name: "compareByPackageVersion", + compareFunc: compareByPackageVersion, + pairs: []struct { + a, b Match + expected int + }{ + {m1, m2, 1}, // 2.0.0 > 1.0.0 + {m2, m1, -1}, // 1.0.0 < 2.0.0 + {m2, m4, 0}, // both 1.0.0 + }, + }, + { + name: "compareByPackageType", + compareFunc: compareByPackageType, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m1, 1}, // npm > docker + {m1, m0, -1}, // docker < npm + {m0, m2, 0}, // both npm + }, + }, + { + name: "compareByRisk", + compareFunc: compareByRisk, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m1, -1}, // 75.0 > 50.0 + {m1, m0, 1}, // 50.0 < 75.0 + {m3, m4, -1}, // 10.0 > 5.0 + }, + }, + { + name: "compareByKEV", + compareFunc: compareByKEV, + pairs: []struct { + a, b Match + expected int + }{ + {m0, m2, 1}, // 0 < 1 KEV entry + {m2, m0, -1}, // 1 > 0 KEV entry + {m2, m4, 1}, // 1 < 2 KEV entries + {m4, m2, -1}, // 2 > 1 KEV entry + {m0, m1, 0}, // both 0 KEV entries + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + for _, pair := range tt.pairs { + result := tt.compareFunc(pair.a, pair.b) + assert.Equal(t, pair.expected, result, "comparing %v and %v", pair.a.Vulnerability.ID, pair.b.Vulnerability.ID) + } + }) + } +} + +func TestCombinedCompareFunctions(t *testing.T) { + ms := createTestMatches() + m0 := ms[0] // medium severity, high risk, high EPSS, no KEV, package-b + m1 := ms[1] // critical severity, medium risk, medium EPSS, no KEV, package-a + m2 := ms[2] // high severity, low risk, low EPSS, has KEV, package-a + + t.Run("compareVulnerabilityAttributes", func(t *testing.T) { + result := compareVulnerabilityAttributes(m0, m1) + assert.Equal(t, -1, result, "CVE-2023-1111 should come before CVE-2023-2222") + + result = compareVulnerabilityAttributes(m1, m0) + assert.Equal(t, 1, result, "CVE-2023-2222 should come after CVE-2023-1111") }) - t.Run("SingleItem", func(t *testing.T) { - matches := []Match{ - { - Vulnerability: Vulnerability{ - VulnerabilityMetadata: VulnerabilityMetadata{ - ID: "CVE-2023-1111", - Severity: "medium", - }, - }, - Artifact: Package{ - Name: "package-a", - Version: "1.0.0", - }, - }, - } - expected := deepCopyMatches(matches) - // should not change anything with a single item - SortMatches(matches, SortByPackage) + t.Run("comparePackageAttributes", func(t *testing.T) { + result := comparePackageAttributes(m0, m1) + assert.Equal(t, 1, result, "package-b should come after package-a") - if diff := cmp.Diff(expected, matches); diff != "" { - t.Errorf("Single item mismatch (-want +got):\n%s", diff) - } + result = comparePackageAttributes(m1, m2) + assert.Equal(t, 1, result, "package-a 2.0.0 should come after package-a 1.0.0") + + result = comparePackageAttributes(m1, m1) + assert.Equal(t, 0, result, "same package should be equal") }) - t.Run("NilValues", func(t *testing.T) { - matches := []Match{ - { - Vulnerability: Vulnerability{ - VulnerabilityMetadata: VulnerabilityMetadata{ - ID: "CVE-2023-1111", - Severity: "", - }, - }, - Artifact: Package{ - Name: "", - Version: "", - }, - }, - { - Vulnerability: Vulnerability{ - VulnerabilityMetadata: VulnerabilityMetadata{ - ID: "CVE-2023-2222", - Severity: "low", - }, - }, - Artifact: Package{ - Name: "package-a", - Version: "1.0.0", - }, - }, - } + t.Run("combine function", func(t *testing.T) { + // create a combined function that first compares by severity, then by risk if severity is equal + combined := combine(compareBySeverity, compareByRisk) - expected := []Match{ - matches[0], // empty name comes first alphabetically - matches[1], // "package-a" - } + result := combined(m0, m1) + assert.Equal(t, 1, result, "medium should come after critical regardless of risk") - // should handle empty strings properly - SortMatches(matches, SortByPackage) + // create two matches with the same severity but different risk + m5 := m1 // critical severity, risk 50.0 + m6 := m1 + m6.Vulnerability.Risk = 60.0 // critical severity, risk 60.0 - if diff := cmp.Diff(expected, matches); diff != "" { - t.Errorf("Nil values mismatch (-want +got):\n%s", diff) - } + result = combined(m5, m6) + assert.Equal(t, 1, result, "with equal severity, lower risk (50.0) should come after higher risk (60.0)") + + result = combined(m6, m5) + assert.Equal(t, -1, result, "with equal severity, higher risk (60.0) should come before lower risk (50.0)") }) } +func TestSortWithStrategy(t *testing.T) { + matches := createTestMatches() + + // create a custom strategy that sorts only by vulnerability ID + customStrategy := sortStrategyImpl{compareByVulnerabilityID} + + expected := []Match{ + matches[0], // CVE-2023-1111 + matches[1], // CVE-2023-2222 + matches[2], // CVE-2023-3333 + matches[3], // CVE-2023-4444 + matches[4], // CVE-2023-5555 + } + + testMatches := deepCopyMatches(matches) + sortWithStrategy(testMatches, customStrategy) + + if diff := cmp.Diff(expected, testMatches); diff != "" { + t.Errorf("sortWithStrategy mismatch (-want +got):\n%s", diff) + } + + // create an empty strategy (should not change the order) + emptyStrategy := sortStrategyImpl{} + originalMatches := deepCopyMatches(matches) + sortWithStrategy(originalMatches, emptyStrategy) + + if diff := cmp.Diff(matches, originalMatches); diff != "" { + t.Errorf("Empty strategy should not change order (-original +after):\n%s", diff) + } +} + func deepCopyMatches(matches []Match) []Match { result := make([]Match, len(matches)) copy(result, matches) diff --git a/grype/presenter/models/source.go b/grype/presenter/models/source.go index 6bbb1fe6158..f21c0674f2f 100644 --- a/grype/presenter/models/source.go +++ b/grype/presenter/models/source.go @@ -15,9 +15,9 @@ type source struct { // newSource creates a new source object to be represented into JSON. func newSource(src syftSource.Description) (source, error) { switch m := src.Metadata.(type) { - case pkg.PURLFileMetadata: + case pkg.SBOMFileMetadata: return source{ - Type: "purl-file", + Type: "sbom-file", Target: m.Path, }, nil case pkg.PURLLiteralMetadata: diff --git a/grype/presenter/models/source_test.go b/grype/presenter/models/source_test.go index 325b772a4e8..851fdf9a076 100644 --- a/grype/presenter/models/source_test.go +++ b/grype/presenter/models/source_test.go @@ -65,12 +65,12 @@ func TestNewSource(t *testing.T) { { name: "purl-file", metadata: syftSource.Description{ - Metadata: pkg.PURLFileMetadata{ + Metadata: pkg.SBOMFileMetadata{ Path: "/path/to/purls.txt", }, }, expected: source{ - Type: "purl-file", + Type: "sbom-file", Target: "/path/to/purls.txt", }, }, diff --git a/grype/presenter/models/vulnerability.go b/grype/presenter/models/vulnerability.go index 391a9f636de..836888e4bb6 100644 --- a/grype/presenter/models/vulnerability.go +++ b/grype/presenter/models/vulnerability.go @@ -12,6 +12,7 @@ type Vulnerability struct { VulnerabilityMetadata Fix Fix `json:"fix"` Advisories []Advisory `json:"advisories"` + Risk float64 `json:"risk"` } type Fix struct { @@ -52,41 +53,46 @@ func NewVulnerability(vuln vulnerability.Vulnerability, metadata *vulnerability. State: string(vuln.Fix.State), }, Advisories: advisories, + Risk: metadata.RiskScore(), } } + func sortVersions(fixedVersions []string, format version.Format) []string { if len(fixedVersions) <= 1 { return fixedVersions } - // First, create Version objects from strings (only once) + // first, create Version objects from strings (only once) versionObjs := make([]*version.Version, 0, len(fixedVersions)) + var invalidVersions []string for _, vStr := range fixedVersions { - v, err := version.NewVersion(vStr, format) + v := version.NewVersion(vStr, format) + err := v.Validate() if err != nil { log.WithFields("version", vStr, "error", err).Trace("error parsing version, skipping") + invalidVersions = append(invalidVersions, vStr) continue } versionObjs = append(versionObjs, v) } - // Sort the Version objects + // sort the Version objects sort.Slice(versionObjs, func(i, j int) bool { - // Compare returns -1 if v[i] < v[j], so we negate for descending order - // (higher versions first) comparison, err := versionObjs[i].Compare(versionObjs[j]) if err != nil { log.WithFields("error", err).Trace("error comparing versions") return false } - return comparison > 0 // Descending order + return comparison < 0 }) - // Convert back to strings - result := make([]string, len(versionObjs)) - for i, v := range versionObjs { - result[i] = v.Raw + // convert back to strings + var result []string + for _, v := range versionObjs { + result = append(result, v.Raw) } + result = append(result, invalidVersions...) + return result } diff --git a/grype/presenter/models/vulnerability_metadata.go b/grype/presenter/models/vulnerability_metadata.go index 98910f7955d..d02e6243227 100644 --- a/grype/presenter/models/vulnerability_metadata.go +++ b/grype/presenter/models/vulnerability_metadata.go @@ -71,9 +71,9 @@ func toKnownExploited(knownExploited []vulnerability.KnownExploited) []KnownExpl CVE: ke.CVE, VendorProject: ke.VendorProject, Product: ke.Product, - DateAdded: ke.DateAdded.Format(time.DateOnly), + DateAdded: formatDate(ke.DateAdded), RequiredAction: ke.RequiredAction, - DueDate: ke.DueDate.Format(time.DateOnly), + DueDate: formatDate(ke.DueDate), KnownRansomwareCampaignUse: ke.KnownRansomwareCampaignUse, Notes: ke.Notes, URLs: ke.URLs, @@ -83,6 +83,13 @@ func toKnownExploited(knownExploited []vulnerability.KnownExploited) []KnownExpl return result } +func formatDate(t *time.Time) string { + if t == nil { + return "" + } + return t.Format(time.DateOnly) +} + func toEPSS(epss []vulnerability.EPSS) []EPSS { result := make([]EPSS, len(epss)) for idx, e := range epss { diff --git a/grype/presenter/models/vulnerability_test.go b/grype/presenter/models/vulnerability_test.go new file mode 100644 index 00000000000..c443ef0d0c9 --- /dev/null +++ b/grype/presenter/models/vulnerability_test.go @@ -0,0 +1,78 @@ +package models + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + + "github.com/anchore/grype/grype/version" +) + +func Test_sortVersions(t *testing.T) { + tests := []struct { + name string + versions []string + expected []string + }{ + { + name: "empty slice", + versions: []string{}, + expected: []string{}, + }, + { + name: "single version", + versions: []string{"1.0.0"}, + expected: []string{"1.0.0"}, + }, + { + name: "already sorted versions", + versions: []string{"1.0.0", "1.1.0", "2.0.0"}, + expected: []string{"1.0.0", "1.1.0", "2.0.0"}, + }, + { + name: "unsorted versions", + versions: []string{"2.0.0", "1.0.0", "1.1.0"}, + expected: []string{"1.0.0", "1.1.0", "2.0.0"}, + }, + { + name: "patch versions", + versions: []string{"1.0.2", "1.0.1", "1.0.0"}, + expected: []string{"1.0.0", "1.0.1", "1.0.2"}, + }, + { + name: "versions with pre-release", + versions: []string{"1.0.0", "1.0.0-alpha", "1.0.0-beta"}, + expected: []string{"1.0.0-alpha", "1.0.0-beta", "1.0.0"}, + }, + { + name: "mixed pre-release and regular", + versions: []string{"2.0.0", "1.0.0-alpha", "1.0.0", "1.0.0-beta"}, + expected: []string{"1.0.0-alpha", "1.0.0-beta", "1.0.0", "2.0.0"}, + }, + { + name: "versions with build metadata", + versions: []string{"1.0.0+build.2", "1.0.0+build.1", "1.0.0"}, + expected: []string{"1.0.0+build.2", "1.0.0+build.1", "1.0.0"}, + }, + { + name: "complex semantic versions", + versions: []string{"1.0.0-alpha.1", "1.0.0-alpha", "1.0.0-beta.2", "1.0.0-beta.11", "1.0.0-rc.1"}, + expected: []string{"1.0.0-alpha", "1.0.0-alpha.1", "1.0.0-beta.2", "1.0.0-beta.11", "1.0.0-rc.1"}, + }, + { + name: "invalid versions are appended to the end (in the order they were found in)", + versions: []string{"invalid", "2.0.0", "also-invalid", "1.0.0"}, + expected: []string{"1.0.0", "2.0.0", "invalid", "also-invalid"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := sortVersions(tt.versions, version.SemanticFormat) + + if d := cmp.Diff(tt.expected, result); d != "" { + t.Errorf("sortVersions() mismatch (-want +got):\n%s", d) + } + }) + } +} diff --git a/grype/presenter/sarif/presenter.go b/grype/presenter/sarif/presenter.go index ac7cdfc2190..4bd6a3cc765 100644 --- a/grype/presenter/sarif/presenter.go +++ b/grype/presenter/sarif/presenter.go @@ -329,9 +329,23 @@ func securitySeverityValue(m models.Match) string { return "0.0" } +func levelValue(m models.Match) string { + severity := vulnerability.ParseSeverity(m.Vulnerability.Severity) + switch severity { + case vulnerability.CriticalSeverity: + return "error" + case vulnerability.HighSeverity: + return "error" + case vulnerability.MediumSeverity: + return "warning" + } + + return "note" +} + // subtitle generates a subtitle for the given match func subtitle(m models.Match) string { - subtitle := m.Vulnerability.VulnerabilityMetadata.Description + subtitle := m.Vulnerability.Description if subtitle != "" { return subtitle } @@ -360,6 +374,7 @@ func (p Presenter) sarifResults() []*sarif.Result { for _, m := range p.document.Matches { out = append(out, &sarif.Result{ RuleID: sp(p.ruleID(m)), + Level: sp(levelValue(m)), Message: p.resultMessage(m), // According to the SARIF spec, it may be correct to use AnalysisTarget.URI to indicate a logical // file such as a "Dockerfile" but GitHub does not work well with this @@ -392,8 +407,8 @@ func (p Presenter) resultMessage(m models.Match) sarif.Message { src = fmt.Sprintf("at: %s", path) case pkg.PURLLiteralMetadata: src = fmt.Sprintf("from purl literal %q", meta.PURL) - case pkg.PURLFileMetadata: - src = fmt.Sprintf("from purl file %s", meta.Path) + case pkg.SBOMFileMetadata: + src = fmt.Sprintf("from SBOM file %s", meta.Path) } message := fmt.Sprintf("A %s vulnerability in %s package: %s, version %s was found %s", severityText(m), m.Artifact.Type, m.Artifact.Name, m.Artifact.Version, src) diff --git a/grype/presenter/sarif/presenter_test.go b/grype/presenter/sarif/presenter_test.go index 50693d09da0..2b30cfec612 100644 --- a/grype/presenter/sarif/presenter_test.go +++ b/grype/presenter/sarif/presenter_test.go @@ -6,6 +6,7 @@ import ( "os/exec" "testing" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -17,7 +18,7 @@ import ( "github.com/anchore/syft/syft/source/directorysource" ) -var updateSnapshot = flag.Bool("update-sarif", false, "update .golden files for sarif presenters") +var updateSnapshot = flag.Bool("update", false, "update .golden files for sarif presenters") var validatorImage = "ghcr.io/anchore/sarif-validator:0.1.0@sha256:a0729d695e023740f5df6bcb50d134e88149bea59c63a896a204e88f62b564c6" func TestSarifPresenter(t *testing.T) { @@ -57,8 +58,8 @@ func TestSarifPresenter(t *testing.T) { actual = internal.Redact(actual) expected = internal.Redact(expected) - if !bytes.Equal(expected, actual) { - assert.JSONEq(t, string(expected), string(actual)) + if d := cmp.Diff(string(expected), string(actual)); d != "" { + t.Fatalf("(-want +got):\n%s", d) } }) } @@ -286,6 +287,7 @@ func TestToSarifReport(t *testing.T) { assert.Len(t, run.Results, 2) result := run.Results[0] assert.Equal(t, "CVE-1999-0001-package-1", *result.RuleID) + assert.Equal(t, "note", *result.Level) assert.Len(t, result.Locations, 1) location := result.Locations[0] expectedLocation, ok := tc.locations[*result.RuleID] @@ -296,6 +298,7 @@ func TestToSarifReport(t *testing.T) { result = run.Results[1] assert.Equal(t, "CVE-1999-0002-package-2", *result.RuleID) + assert.Equal(t, "error", *result.Level) assert.Len(t, result.Locations, 1) location = result.Locations[0] expectedLocation, ok = tc.locations[*result.RuleID] diff --git a/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_directory.golden b/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_directory.golden index 9edb968a3a0..91900ee484b 100644 --- a/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_directory.golden +++ b/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_directory.golden @@ -16,15 +16,15 @@ "text": "CVE-1999-0001 low vulnerability for package-1 package" }, "fullDescription": { - "text": "1999-01 description" + "text": "Version 1.1.1 is affected with an available fix in versions 1.2.1,2.1.3,3.4.0" }, "helpUri": "https://github.com/anchore/grype", "help": { - "text": "Vulnerability CVE-1999-0001\nSeverity: low\nPackage: package-1\nVersion: 1.1.1\nFix Version: 1.2.1,2.1.3,3.4.0\nType: rpm\nLocation: /some/path/somefile-1.txt\nData Namespace: source-1\nLink: CVE-1999-0001", - "markdown": "**Vulnerability CVE-1999-0001**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| low | package-1 | 1.1.1 | 1.2.1,2.1.3,3.4.0 | rpm | /some/path/somefile-1.txt | source-1 | CVE-1999-0001 |\n" + "text": "Vulnerability CVE-1999-0001\nSeverity: low\nPackage: package-1\nVersion: 1.1.1\nFix Version: 1.2.1,2.1.3,3.4.0\nType: rpm\nLocation: /some/path/somefile-1.txt\nData Namespace: \nLink: CVE-1999-0001", + "markdown": "**Vulnerability CVE-1999-0001**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| low | package-1 | 1.1.1 | 1.2.1,2.1.3,3.4.0 | rpm | /some/path/somefile-1.txt | | CVE-1999-0001 |\n" }, "properties": { - "security-severity": "4.0" + "security-severity": "8.2" } }, { @@ -34,18 +34,18 @@ "text": "CVE-1999-0002 critical vulnerability for package-2 package" }, "fullDescription": { - "text": "1999-02 description" + "text": "Version 2.2.2 is affected with no fixes reported yet." }, "helpUri": "https://github.com/anchore/grype", "help": { - "text": "Vulnerability CVE-1999-0002\nSeverity: critical\nPackage: package-2\nVersion: 2.2.2\nFix Version: \nType: deb\nLocation: /some/path/somefile-2.txt\nData Namespace: source-2\nLink: CVE-1999-0002", - "markdown": "**Vulnerability CVE-1999-0002**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| critical | package-2 | 2.2.2 | | deb | /some/path/somefile-2.txt | source-2 | CVE-1999-0002 |\n" + "text": "Vulnerability CVE-1999-0002\nSeverity: critical\nPackage: package-2\nVersion: 2.2.2\nFix Version: \nType: deb\nLocation: /some/path/somefile-2.txt\nData Namespace: \nLink: CVE-1999-0002", + "markdown": "**Vulnerability CVE-1999-0002**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| critical | package-2 | 2.2.2 | | deb | /some/path/somefile-2.txt | | CVE-1999-0002 |\n" }, "properties": { "purls": [ "pkg:deb/package-2@2.2.2" ], - "security-severity": "1.0" + "security-severity": "8.5" } } ] @@ -54,6 +54,7 @@ "results": [ { "ruleId": "CVE-1999-0001-package-1", + "level": "note", "message": { "text": "A low vulnerability in rpm package: package-1, version 1.1.1 was found at: /some/path/somefile-1.txt" }, @@ -78,6 +79,7 @@ }, { "ruleId": "CVE-1999-0002-package-2", + "level": "error", "message": { "text": "A critical vulnerability in deb package: package-2, version 2.2.2 was found at: /some/path/somefile-2.txt" }, diff --git a/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_image.golden b/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_image.golden index 027cd809648..b9d33518d2b 100644 --- a/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_image.golden +++ b/grype/presenter/sarif/test-fixtures/snapshot/TestSarifPresenter_image.golden @@ -16,15 +16,15 @@ "text": "CVE-1999-0001 low vulnerability for package-1 package" }, "fullDescription": { - "text": "1999-01 description" + "text": "Version 1.1.1 is affected with an available fix in versions 1.2.1,2.1.3,3.4.0" }, "helpUri": "https://github.com/anchore/grype", "help": { - "text": "Vulnerability CVE-1999-0001\nSeverity: low\nPackage: package-1\nVersion: 1.1.1\nFix Version: 1.2.1,2.1.3,3.4.0\nType: rpm\nLocation: somefile-1.txt\nData Namespace: source-1\nLink: CVE-1999-0001", - "markdown": "**Vulnerability CVE-1999-0001**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| low | package-1 | 1.1.1 | 1.2.1,2.1.3,3.4.0 | rpm | somefile-1.txt | source-1 | CVE-1999-0001 |\n" + "text": "Vulnerability CVE-1999-0001\nSeverity: low\nPackage: package-1\nVersion: 1.1.1\nFix Version: 1.2.1,2.1.3,3.4.0\nType: rpm\nLocation: somefile-1.txt\nData Namespace: \nLink: CVE-1999-0001", + "markdown": "**Vulnerability CVE-1999-0001**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| low | package-1 | 1.1.1 | 1.2.1,2.1.3,3.4.0 | rpm | somefile-1.txt | | CVE-1999-0001 |\n" }, "properties": { - "security-severity": "4.0" + "security-severity": "8.2" } }, { @@ -34,18 +34,18 @@ "text": "CVE-1999-0002 critical vulnerability for package-2 package" }, "fullDescription": { - "text": "1999-02 description" + "text": "Version 2.2.2 is affected with no fixes reported yet." }, "helpUri": "https://github.com/anchore/grype", "help": { - "text": "Vulnerability CVE-1999-0002\nSeverity: critical\nPackage: package-2\nVersion: 2.2.2\nFix Version: \nType: deb\nLocation: somefile-2.txt\nData Namespace: source-2\nLink: CVE-1999-0002", - "markdown": "**Vulnerability CVE-1999-0002**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| critical | package-2 | 2.2.2 | | deb | somefile-2.txt | source-2 | CVE-1999-0002 |\n" + "text": "Vulnerability CVE-1999-0002\nSeverity: critical\nPackage: package-2\nVersion: 2.2.2\nFix Version: \nType: deb\nLocation: somefile-2.txt\nData Namespace: \nLink: CVE-1999-0002", + "markdown": "**Vulnerability CVE-1999-0002**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| critical | package-2 | 2.2.2 | | deb | somefile-2.txt | | CVE-1999-0002 |\n" }, "properties": { "purls": [ "pkg:deb/package-2@2.2.2" ], - "security-severity": "1.0" + "security-severity": "8.5" } } ] @@ -54,6 +54,7 @@ "results": [ { "ruleId": "CVE-1999-0001-package-1", + "level": "note", "message": { "text": "A low vulnerability in rpm package: package-1, version 1.1.1 was found in image user-input at: somefile-1.txt" }, @@ -84,6 +85,7 @@ }, { "ruleId": "CVE-1999-0002-package-2", + "level": "error", "message": { "text": "A critical vulnerability in deb package: package-2, version 2.2.2 was found in image user-input at: somefile-2.txt" }, diff --git a/grype/presenter/table/__snapshots__/presenter_test.snap b/grype/presenter/table/__snapshots__/presenter_test.snap index 73149c75fee..9b1fc7b8f45 100755 --- a/grype/presenter/table/__snapshots__/presenter_test.snap +++ b/grype/presenter/table/__snapshots__/presenter_test.snap @@ -1,15 +1,15 @@ [TestTablePresenter/no_color - 1] -NAME INSTALLED FIXED-IN TYPE VULNERABILITY SEVERITY -package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low -package-2 2.2.2 deb CVE-1999-0002 Critical +NAME INSTALLED FIXED IN TYPE VULNERABILITY SEVERITY EPSS % RISK +package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low 42.00 1.7 +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev) --- [TestTablePresenter/with_color - 1] -NAME INSTALLED FIXED-IN TYPE VULNERABILITY SEVERITY -package-1 1.1.1 1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low -package-2 2.2.2 deb CVE-1999-0002 Critical +NAME INSTALLED FIXED IN TYPE VULNERABILITY SEVERITY EPSS % RISK +package-1 1.1.1 1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low 42.00 1.7 +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 KEV --- @@ -19,18 +19,35 @@ No vulnerabilities found --- [TestHidesIgnoredMatches - 1] -NAME INSTALLED FIXED-IN TYPE VULNERABILITY SEVERITY -package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low -package-2 2.2.2 deb CVE-1999-0002 Critical +NAME INSTALLED FIXED IN TYPE VULNERABILITY SEVERITY EPSS % RISK +package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low 42.00 1.7 +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev) --- [TestDisplaysIgnoredMatches - 1] -NAME INSTALLED FIXED-IN TYPE VULNERABILITY SEVERITY -package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low -package-2 2.2.2 deb CVE-1999-0002 Critical -package-2 2.2.2 deb CVE-1999-0001 Low (suppressed) -package-2 2.2.2 deb CVE-1999-0002 Critical (suppressed) -package-2 2.2.2 deb CVE-1999-0004 Critical (suppressed by VEX) +NAME INSTALLED FIXED IN TYPE VULNERABILITY SEVERITY EPSS % RISK +package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low 42.00 1.7 +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev) +package-2 2.2.2 deb CVE-1999-0001 Low 42.00 1.7 (suppressed) +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev, suppressed) +package-2 2.2.2 deb CVE-1999-0004 High 75.00 2.2 (suppressed by VEX) + +--- + +[TestDisplaysDistro - 1] +NAME INSTALLED FIXED IN TYPE VULNERABILITY SEVERITY EPSS % RISK +package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low 42.00 1.7 (ubuntu:2.5) +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev, ubuntu:3.5) + +--- + +[TestDisplaysIgnoredMatchesAndDistro - 1] +NAME INSTALLED FIXED IN TYPE VULNERABILITY SEVERITY EPSS % RISK +package-1 1.1.1 *1.2.1, 2.1.3, 3.4.0 rpm CVE-1999-0001 Low 42.00 1.7 (ubuntu:2.5) +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev, ubuntu:3.5) +package-2 2.2.2 deb CVE-1999-0001 Low 42.00 1.7 (ubuntu:2.5, suppressed) +package-2 2.2.2 deb CVE-1999-0002 Critical 53.00 96.3 (kev, ubuntu:3.5, suppressed) +package-2 2.2.2 deb CVE-1999-0004 High 75.00 2.2 (suppressed by VEX) --- diff --git a/grype/presenter/table/presenter.go b/grype/presenter/table/presenter.go index 3e0671490e1..5bbae0c9e2f 100644 --- a/grype/presenter/table/presenter.go +++ b/grype/presenter/table/presenter.go @@ -1,20 +1,24 @@ package table import ( + "fmt" "io" "strings" "github.com/charmbracelet/lipgloss" "github.com/olekukonko/tablewriter" + "github.com/olekukonko/tablewriter/renderer" + "github.com/olekukonko/tablewriter/tw" "github.com/scylladb/go-set/strset" + "github.com/anchore/grype/grype/db/v5/namespace/distro" "github.com/anchore/grype/grype/presenter/models" "github.com/anchore/grype/grype/vulnerability" ) const ( - appendSuppressed = " (suppressed)" - appendSuppressedVEX = " (suppressed by VEX)" + appendSuppressed = "suppressed" + appendSuppressedVEX = "suppressed by VEX" ) // Presenter is a generic struct for holding fields needed for reporting @@ -24,6 +28,14 @@ type Presenter struct { withColor bool recommendedFixStyle lipgloss.Style + kevStyle lipgloss.Style + criticalStyle lipgloss.Style + highStyle lipgloss.Style + mediumStyle lipgloss.Style + lowStyle lipgloss.Style + negligibleStyle lipgloss.Style + auxiliaryStyle lipgloss.Style + unknownStyle lipgloss.Style } type rows []row @@ -35,6 +47,25 @@ type row struct { PackageType string VulnerabilityID string Severity string + EPSS epss + Risk string + Annotation string +} + +type epss struct { + Score float64 + Percentile float64 +} + +func (e epss) String() string { + percentile := e.Percentile * 100 + switch { + case percentile == 0: + return " N/A" + case percentile < 0.1: + return "< 0.1%" + } + return fmt.Sprintf("%5.2f", percentile) } // NewPresenter is a *Presenter constructor @@ -42,13 +73,22 @@ func NewPresenter(pb models.PresenterConfig, showSuppressed bool) *Presenter { withColor := supportsColor() fixStyle := lipgloss.NewStyle().Border(lipgloss.Border{Left: "*"}, false, false, false, true) if withColor { - fixStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("6")).Bold(true).Underline(true) + fixStyle = lipgloss.NewStyle() } return &Presenter{ document: pb.Document, showSuppressed: showSuppressed, withColor: withColor, recommendedFixStyle: fixStyle, + negligibleStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("240")), // dark gray + lowStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("36")), // cyan/teal + mediumStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("178")), // gold/amber + highStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("203")), // salmon/light red + criticalStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("198")).Bold(true), // bright pink + kevStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("198")).Reverse(true).Bold(true), // white on bright pink + //kevStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("198")), // bright pink + auxiliaryStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("240")), // dark gray + unknownStyle: lipgloss.NewStyle().Foreground(lipgloss.Color("12")), // light blue } } @@ -61,48 +101,68 @@ func (p *Presenter) Present(output io.Writer) error { return err } - table := tablewriter.NewWriter(output) - table.SetHeader([]string{"Name", "Installed", "Fixed-In", "Type", "Vulnerability", "Severity"}) - table.SetAutoWrapText(false) - table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) - table.SetAlignment(tablewriter.ALIGN_LEFT) - - table.SetHeaderLine(false) - table.SetBorder(false) - table.SetAutoFormatHeaders(true) - table.SetCenterSeparator("") - table.SetColumnSeparator("") - table.SetRowSeparator("") - table.SetTablePadding(" ") - table.SetNoWhiteSpace(true) - - if p.withColor { - for _, row := range rs.Deduplicate() { - severityColor := getSeverityColor(row.Severity) - table.Rich(row.Columns(), []tablewriter.Colors{ - {}, // name - {}, // version - {}, // fix - {}, // package type - {}, // vulnerability ID - severityColor, // severity - }) - } - } else { - table.AppendBulk(rs.Render()) + table := newTable(output, []string{"Name", "Installed", "Fixed In", "Type", "Vulnerability", "Severity", "EPSS%", "Risk"}) + + if err := table.Bulk(rs.Render()); err != nil { + return fmt.Errorf("failed to add table rows: %w", err) } - table.Render() + return table.Render() +} - return nil +func newTable(output io.Writer, columns []string) *tablewriter.Table { + return tablewriter.NewTable(output, + tablewriter.WithHeader(columns), + tablewriter.WithHeaderAutoWrap(tw.WrapNone), + tablewriter.WithRowAutoWrap(tw.WrapNone), + tablewriter.WithAutoHide(tw.On), + tablewriter.WithRenderer(renderer.NewBlueprint()), + tablewriter.WithBehavior( + tw.Behavior{ + TrimSpace: tw.On, + AutoHide: tw.On, + }, + ), + tablewriter.WithPadding( + tw.Padding{ + Right: " ", + }, + ), + tablewriter.WithRendition( + tw.Rendition{ + Symbols: tw.NewSymbols(tw.StyleNone), + Settings: tw.Settings{ + Lines: tw.Lines{ + ShowTop: tw.Off, + ShowBottom: tw.Off, + ShowHeaderLine: tw.Off, + ShowFooterLine: tw.Off, + }, + }, + }, + ), + ) } func (p *Presenter) getRows(doc models.Document, showSuppressed bool) rows { var rs rows + multipleDistros := false + existingDistro := "" + for _, m := range doc.Matches { + if _, err := distro.FromString(m.Vulnerability.Namespace); err == nil { + if existingDistro == "" { + existingDistro = m.Vulnerability.Namespace + } else if existingDistro != m.Vulnerability.Namespace { + multipleDistros = true + break + } + } + } + // generate rows for matching vulnerabilities for _, m := range doc.Matches { - rs = append(rs, p.newRow(m, "")) + rs = append(rs, p.newRow(m, "", multipleDistros)) } // generate rows for suppressed vulnerabilities @@ -116,7 +176,7 @@ func (p *Presenter) getRows(doc models.Document, showSuppressed bool) rows { } } } - rs = append(rs, p.newRow(m.Match, msg)) + rs = append(rs, p.newRow(m.Match, msg, multipleDistros)) } } return rs @@ -126,10 +186,34 @@ func supportsColor() bool { return lipgloss.NewStyle().Foreground(lipgloss.Color("5")).Render("") != "" } -func (p *Presenter) newRow(m models.Match, severitySuffix string) row { - severity := m.Vulnerability.Severity - if severity != "" { - severity += severitySuffix +func (p *Presenter) newRow(m models.Match, extraAnnotation string, showDistro bool) row { + var annotations []string + + if showDistro { + if d, err := distro.FromString(m.Vulnerability.Namespace); err == nil { + annotations = append(annotations, p.auxiliaryStyle.Render(fmt.Sprintf("%s:%s", d.DistroType(), d.Version()))) + } + } + + if extraAnnotation != "" { + annotations = append(annotations, p.auxiliaryStyle.Render(extraAnnotation)) + } + + var kev, annotation string + if len(m.Vulnerability.KnownExploited) > 0 { + if p.withColor { + kev = p.kevStyle.Reverse(false).Render("") + p.kevStyle.Render("KEV") + p.kevStyle.Reverse(false).Render("") // ⚡❋◆◉፨⿻⨳✖• + } else { + annotations = append([]string{"kev"}, annotations...) + } + } + + if len(annotations) > 0 { + annotation = p.auxiliaryStyle.Render("(") + strings.Join(annotations, p.auxiliaryStyle.Render(", ")) + p.auxiliaryStyle.Render(")") + } + + if kev != "" { + annotation = kev + " " + annotation } return row{ @@ -138,11 +222,58 @@ func (p *Presenter) newRow(m models.Match, severitySuffix string) row { Fix: p.formatFix(m), PackageType: string(m.Artifact.Type), VulnerabilityID: m.Vulnerability.ID, - Severity: severity, + Severity: p.formatSeverity(m.Vulnerability.Severity), + EPSS: newEPSS(m.Vulnerability.EPSS), + Risk: p.formatRisk(m.Vulnerability.Risk), + Annotation: annotation, + } +} + +func newEPSS(es []models.EPSS) epss { + if len(es) == 0 { + return epss{} + } + return epss{ + Score: es[0].EPSS, + Percentile: es[0].Percentile, } } +func (p *Presenter) formatSeverity(severity string) string { + var severityStyle *lipgloss.Style + switch strings.ToLower(severity) { + case "critical": + severityStyle = &p.criticalStyle + case "high": + severityStyle = &p.highStyle + case "medium": + severityStyle = &p.mediumStyle + case "low": + severityStyle = &p.lowStyle + case "negligible": + severityStyle = &p.negligibleStyle + } + + if severityStyle == nil { + severityStyle = &p.unknownStyle + } + + return severityStyle.Render(severity) +} + +func (p *Presenter) formatRisk(risk float64) string { + // TODO: add color to risk? + switch { + case risk == 0: + return " N/A" + case risk < 0.1: + return "< 0.1" + } + return fmt.Sprintf("%5.1f", risk) +} + func (p *Presenter) formatFix(m models.Match) string { + // adjust the model fix state values for better presentation switch m.Vulnerability.Fix.State { case vulnerability.FixStateWontFix.String(): return "(won't fix)" @@ -150,6 +281,19 @@ func (p *Presenter) formatFix(m models.Match) string { return "" } + // do our best to summarize the fixed versions, de-epmhasize non-recommended versions + // also, since there is not a lot of screen real estate, we will truncate the list of fixed versions + // to ~30 characters (or so) to avoid wrapping. + return p.applyTruncation( + p.formatVersionsToDisplay( + m, + getRecommendedVersions(m), + ), + m.Vulnerability.Fix.Versions, + ) +} + +func getRecommendedVersions(m models.Match) *strset.Set { recommended := strset.New() for _, d := range m.MatchDetails { if d.Fix == nil { @@ -159,22 +303,72 @@ func (p *Presenter) formatFix(m models.Match) string { recommended.Add(d.Fix.SuggestedVersion) } } + return recommended +} - var vers []string +const maxVersionFieldLength = 30 + +func (p *Presenter) formatVersionsToDisplay(m models.Match, recommendedVersions *strset.Set) []string { hasMultipleVersions := len(m.Vulnerability.Fix.Versions) > 1 + shouldHighlightRecommended := hasMultipleVersions && recommendedVersions.Size() > 0 + + var currentCharacterCount int + added := strset.New() + var vers []string + for _, v := range m.Vulnerability.Fix.Versions { - if hasMultipleVersions && recommended.Has(v) { - vers = append(vers, p.recommendedFixStyle.Render(v)) - continue + if added.Has(v) { + continue // skip duplicates + } + + if shouldHighlightRecommended { + if recommendedVersions.Has(v) { + // recommended versions always get added + added.Add(v) + currentCharacterCount += len(v) + vers = append(vers, p.recommendedFixStyle.Render(v)) + continue + } + + // skip not-necessarily-recommended versions if we're running out of space + if currentCharacterCount+len(v) > maxVersionFieldLength { + continue + } + + // add not-necessarily-recommended versions with auxiliary styling + currentCharacterCount += len(v) + added.Add(v) + vers = append(vers, p.auxiliaryStyle.Render(v)) + } else { + // when not prioritizing, add all versions + added.Add(v) + vers = append(vers, v) } - vers = append(vers, v) } - return strings.Join(vers, ", ") + return vers +} + +func (p *Presenter) applyTruncation(formattedVersions []string, allVersions []string) string { + finalVersions := strings.Join(formattedVersions, p.auxiliaryStyle.Render(", ")) + + var characterCount int + for _, v := range allVersions { + characterCount += len(v) + } + + if characterCount > maxVersionFieldLength && len(allVersions) > 1 { + finalVersions += p.auxiliaryStyle.Render(", ...") + } + + return finalVersions } func (r row) Columns() []string { - return []string{r.Name, r.Version, r.Fix, r.PackageType, r.VulnerabilityID, r.Severity} + if r.Annotation != "" { + return []string{r.Name, r.Version, r.Fix, r.PackageType, r.VulnerabilityID, r.Severity, r.EPSS.String(), r.Risk, r.Annotation} + } + return []string{r.Name, r.Version, r.Fix, r.PackageType, r.VulnerabilityID, r.Severity, r.EPSS.String(), r.Risk} } func (r row) String() string { @@ -209,23 +403,3 @@ func (rs rows) Deduplicate() []row { // render final columns return deduped } - -func getSeverityColor(severity string) tablewriter.Colors { - severityFontType, severityColor := tablewriter.Normal, tablewriter.Normal - - switch strings.ToLower(severity) { - case "critical": - severityFontType = tablewriter.Bold - severityColor = tablewriter.FgRedColor - case "high": - severityColor = tablewriter.FgRedColor - case "medium": - severityColor = tablewriter.FgYellowColor - case "low": - severityColor = tablewriter.FgGreenColor - case "negligible": - severityColor = tablewriter.FgBlueColor - } - - return tablewriter.Colors{severityFontType, severityColor} -} diff --git a/grype/presenter/table/presenter_test.go b/grype/presenter/table/presenter_test.go index bdd93f97bde..24bd168611a 100644 --- a/grype/presenter/table/presenter_test.go +++ b/grype/presenter/table/presenter_test.go @@ -33,18 +33,26 @@ func TestCreateRow(t *testing.T) { Versions: []string{"1.0.2", "2.0.1", "3.0.4"}, State: vulnerability.FixStateFixed.String(), }, + Risk: 87.2, VulnerabilityMetadata: models.VulnerabilityMetadata{ ID: "CVE-1999-0001", Namespace: "source-1", Description: "1999-01 description", - Severity: "Low", + Severity: "Medium", Cvss: []models.Cvss{ { Metrics: models.CvssMetrics{ - BaseScore: 4, + BaseScore: 7, }, - Vector: "another vector", - Version: "3.0", + Vector: "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:L/I:L/A:H", + Version: "3.1", + }, + }, + EPSS: []models.EPSS{ + { + CVE: "CVE-1999-0001", + EPSS: 0.3, + Percentile: 0.5, }, }, }, @@ -60,30 +68,43 @@ func TestCreateRow(t *testing.T) { }, }, } + + matchWithKev := match1 + matchWithKev.Vulnerability.KnownExploited = append(matchWithKev.Vulnerability.KnownExploited, models.KnownExploited{ + CVE: "CVE-1999-0001", + KnownRansomwareCampaignUse: "Known", + }) + cases := []struct { - name string - match models.Match - severitySuffix string - expectedRow []string + name string + match models.Match + extraAnnotation string + expectedRow []string }{ { - name: "create row for vulnerability", - match: match1, - severitySuffix: "", - expectedRow: []string{match1.Artifact.Name, match1.Artifact.Version, "1.0.2, *2.0.1, 3.0.4", string(match1.Artifact.Type), match1.Vulnerability.ID, "Low"}, + name: "create row for vulnerability", + match: match1, + extraAnnotation: "", + expectedRow: []string{match1.Artifact.Name, match1.Artifact.Version, "1.0.2, *2.0.1, 3.0.4", string(match1.Artifact.Type), match1.Vulnerability.ID, "Medium", "50.00", " 87.2"}, }, { - name: "create row for suppressed vulnerability", - match: match1, - severitySuffix: appendSuppressed, - expectedRow: []string{match1.Artifact.Name, match1.Artifact.Version, "1.0.2, *2.0.1, 3.0.4", string(match1.Artifact.Type), match1.Vulnerability.ID, "Low (suppressed)"}, + name: "create row for suppressed vulnerability", + match: match1, + extraAnnotation: appendSuppressed, + expectedRow: []string{match1.Artifact.Name, match1.Artifact.Version, "1.0.2, *2.0.1, 3.0.4", string(match1.Artifact.Type), match1.Vulnerability.ID, "Medium", "50.00", " 87.2", "(suppressed)"}, + }, + { + name: "create row for suppressed vulnerability + Kev", + match: matchWithKev, + extraAnnotation: appendSuppressed, + expectedRow: []string{match1.Artifact.Name, match1.Artifact.Version, "1.0.2, *2.0.1, 3.0.4", string(match1.Artifact.Type), match1.Vulnerability.ID, "Medium", "50.00", " 87.2", "(kev, suppressed)"}, }, } for _, testCase := range cases { t.Run(testCase.name, func(t *testing.T) { p := NewPresenter(models.PresenterConfig{}, false) - row := p.newRow(testCase.match, testCase.severitySuffix) + row := p.newRow(testCase.match, testCase.extraAnnotation, false) cols := rows{row}.Render()[0] assert.Equal(t, testCase.expectedRow, cols) @@ -175,6 +196,45 @@ func TestDisplaysIgnoredMatches(t *testing.T) { snaps.MatchSnapshot(t, actual) } +func TestDisplaysDistro(t *testing.T) { + var buffer bytes.Buffer + pb := models.PresenterConfig{ + Document: internal.GenerateAnalysisWithIgnoredMatches(t, internal.ImageSource), + } + + pb.Document.Matches[0].Vulnerability.Namespace = "ubuntu:distro:ubuntu:2.5" + pb.Document.Matches[1].Vulnerability.Namespace = "ubuntu:distro:ubuntu:3.5" + + pres := NewPresenter(pb, false) + + err := pres.Present(&buffer) + require.NoError(t, err) + + actual := buffer.String() + snaps.MatchSnapshot(t, actual) +} + +func TestDisplaysIgnoredMatchesAndDistro(t *testing.T) { + var buffer bytes.Buffer + pb := models.PresenterConfig{ + Document: internal.GenerateAnalysisWithIgnoredMatches(t, internal.ImageSource), + } + + pb.Document.Matches[0].Vulnerability.Namespace = "ubuntu:distro:ubuntu:2.5" + pb.Document.Matches[1].Vulnerability.Namespace = "ubuntu:distro:ubuntu:3.5" + + pb.Document.IgnoredMatches[0].Vulnerability.Namespace = "ubuntu:distro:ubuntu:2.5" + pb.Document.IgnoredMatches[1].Vulnerability.Namespace = "ubuntu:distro:ubuntu:3.5" + + pres := NewPresenter(pb, true) + + err := pres.Present(&buffer) + require.NoError(t, err) + + actual := buffer.String() + snaps.MatchSnapshot(t, actual) +} + func TestRowsRender(t *testing.T) { t.Run("empty rows returns empty slice", func(t *testing.T) { @@ -191,7 +251,7 @@ func TestRowsRender(t *testing.T) { result := rs.Render() expected := [][]string{ - {"pkg1", "1.0.0", "1.1.0", "os", "CVE-2023-1234", "critical"}, + {"pkg1", "1.0.0", "1.1.0", "os", "CVE-2023-1234", "critical", "75.00", " N/A"}, } if diff := cmp.Diff(expected, result); diff != "" { @@ -209,9 +269,9 @@ func TestRowsRender(t *testing.T) { result := rs.Render() expected := [][]string{ - {"pkgA", "1.0.0", "", "os", "CVE-2023-1234", "critical"}, - {"pkgB", "2.0.0", "(won't fix)", "os", "CVE-2023-5678", "high"}, - {"pkgC", "3.0.0", "3.1.0", "os", "CVE-2023-9012", "medium"}, + {"pkgA", "1.0.0", "", "os", "CVE-2023-1234", "critical", "75.00", " N/A"}, + {"pkgB", "2.0.0", "(won't fix)", "os", "CVE-2023-5678", "high", "75.00", " N/A"}, + {"pkgC", "3.0.0", "3.1.0", "os", "CVE-2023-9012", "medium", "75.00", " N/A"}, } if diff := cmp.Diff(expected, result); diff != "" { @@ -226,17 +286,15 @@ func TestRowsRender(t *testing.T) { result := rs.Render() expected := [][]string{ - {"pkg1", "1.0.0", "1.1.0", "os", "CVE-2023-1234", "critical"}, + {"pkg1", "1.0.0", "1.1.0", "os", "CVE-2023-1234", "critical", "75.00", " N/A"}, } if diff := cmp.Diff(expected, result); diff != "" { t.Errorf("Render() mismatch (-want +got):\n%s", diff) } - // should have 7 columns: name, version, fix, packageType, vulnID, severity - if len(result[0]) != 6 { - t.Errorf("Expected 7 columns, got %d", len(result[0])) - } + // expected columns: name, version, fix, packageType, vulnID, severity, epss, risk + assert.Len(t, result[0], 8) }) } @@ -251,6 +309,24 @@ func createTestRow(name, version, fix, pkgType, vulnID, severity string, fixStat VulnerabilityMetadata: models.VulnerabilityMetadata{ ID: vulnID, Severity: severity, + Cvss: []models.Cvss{ + { + Source: "nvd", + Type: "CVSS", + Version: "3.1", + Vector: "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:L/A:L", + Metrics: models.CvssMetrics{ + BaseScore: 7.2, + }, + }, + }, + EPSS: []models.EPSS{ + { + CVE: vulnID, + EPSS: 0.03, + Percentile: 0.75, + }, + }, }, }, Artifact: models.Package{ @@ -261,7 +337,7 @@ func createTestRow(name, version, fix, pkgType, vulnID, severity string, fixStat } p := NewPresenter(models.PresenterConfig{}, false) - r := p.newRow(m, "") + r := p.newRow(m, "", false) return r, nil } diff --git a/grype/presenter/template/presenter.go b/grype/presenter/template/presenter.go index bf61c055b80..402b92017c5 100644 --- a/grype/presenter/template/presenter.go +++ b/grype/presenter/template/presenter.go @@ -8,9 +8,9 @@ import ( "text/template" "github.com/Masterminds/sprig/v3" - "github.com/mitchellh/go-homedir" "github.com/anchore/clio" + "github.com/anchore/go-homedir" "github.com/anchore/grype/grype/presenter/models" ) diff --git a/grype/presenter/template/test-fixtures/snapshot/TestPresenter_Present.golden b/grype/presenter/template/test-fixtures/snapshot/TestPresenter_Present.golden index 0ac37fa30dc..05d638465b3 100644 --- a/grype/presenter/template/test-fixtures/snapshot/TestPresenter_Present.golden +++ b/grype/presenter/template/test-fixtures/snapshot/TestPresenter_Present.golden @@ -2,11 +2,11 @@ Identified distro as centos version 8.0. Vulnerability: CVE-1999-0001 Severity: Low Package: package-1 version 1.1.1 (rpm) - CPEs: ["cpe:2.3:a:anchore:engine:0.9.2:*:*:python:*:*:*:*"] + CPEs: ["cpe:2.3:a:anchore\\:oss:anchore\\/engine:0.9.2:*:*:en:*:*:*:*"] Matched by: dpkg-matcher Vulnerability: CVE-1999-0002 Severity: Critical Package: package-2 version 2.2.2 (deb) - CPEs: ["cpe:2.3:a:anchore:engine:2.2.2:*:*:python:*:*:*:*"] + CPEs: ["cpe:2.3:a:anchore:engine:2.2.2:*:*:en:*:*:*:*"] Matched by: dpkg-matcher diff --git a/grype/search/distro.go b/grype/search/distro.go index da5ffa809d0..65f526bf20e 100644 --- a/grype/search/distro.go +++ b/grype/search/distro.go @@ -24,7 +24,7 @@ type DistroCriteria struct { func (c *DistroCriteria) MatchesVulnerability(value vulnerability.Vulnerability) (bool, string, error) { ns, err := namespace.FromString(value.Namespace) if err != nil { - return false, fmt.Sprintf("unable to determine namespace for vulnerability %v: %v", value.Reference.ID, err), nil + return false, fmt.Sprintf("unable to determine namespace for vulnerability %v: %v", value.ID, err), nil } dns, ok := ns.(*distroNs.Namespace) if !ok || dns == nil { @@ -63,13 +63,13 @@ func matchesDistro(d *distro.Distro, ns *distroNs.Namespace) bool { return false } - ty := namespace.DistroTypeString(d.Type) - - distroType := ns.DistroType() - if distroType != d.Type && distroType != distro.Type(ty) { + distroType := mimicV6DistroTypeOverrides(ns.DistroType()) + targetType := mimicV6DistroTypeOverrides(d.Type) + if distroType != targetType { return false } - return compatibleVersion(d.FullVersion(), ns.Version()) + + return compatibleVersion(d.Version, ns.Version()) } // compatibleVersion returns true when the versions are the same or the partial version describes the matching parts @@ -86,3 +86,34 @@ func compatibleVersion(fullVersion string, partialVersion string) bool { } return false } + +// TODO: this is a temporary workaround... in the long term the mock should more strongly enforce +// data overrides and not require this kind of logic being baked into mocks directly. +func mimicV6DistroTypeOverrides(t distro.Type) distro.Type { + overrideMap := map[string]string{ + "centos": "rhel", + "rocky": "rhel", + "rockylinux": "rhel", + "alma": "rhel", + "almalinux": "rhel", + "gentoo": "rhel", + "archlinux": "arch", + "oracle": "ol", + "oraclelinux": "ol", + "amazon": "amzn", + "amazonlinux": "amzn", + } + + applyMapping := func(i string) distro.Type { + if replacement, exists := distro.IDMapping[i]; exists { + return replacement + } + return distro.Type(i) + } + + if replacement, exists := overrideMap[string(t)]; exists { + return applyMapping(replacement) + } + + return applyMapping(string(t)) +} diff --git a/grype/search/distro_test.go b/grype/search/distro_test.go index 08bf6c6ebbc..e4ede881142 100644 --- a/grype/search/distro_test.go +++ b/grype/search/distro_test.go @@ -11,8 +11,7 @@ import ( ) func Test_ByDistro(t *testing.T) { - deb8, err := distro.New(distro.Debian, "8", "") - require.NoError(t, err) + deb8 := distro.New(distro.Debian, "8", "") tests := []struct { name string diff --git a/grype/search/ecosystem.go b/grype/search/ecosystem.go index 6ce6954166d..17aa8e8df91 100644 --- a/grype/search/ecosystem.go +++ b/grype/search/ecosystem.go @@ -25,7 +25,7 @@ type EcosystemCriteria struct { func (c *EcosystemCriteria) MatchesVulnerability(value vulnerability.Vulnerability) (bool, string, error) { ns, err := namespace.FromString(value.Namespace) if err != nil { - return false, fmt.Sprintf("unable to determine namespace for vulnerability %v: %v", value.Reference.ID, err), nil + return false, fmt.Sprintf("unable to determine namespace for vulnerability %v: %v", value.ID, err), nil } lang, ok := ns.(*language.Namespace) if !ok || lang == nil { diff --git a/grype/search/version_constraint.go b/grype/search/version_constraint.go index aed3f153649..3335733c5ee 100644 --- a/grype/search/version_constraint.go +++ b/grype/search/version_constraint.go @@ -25,8 +25,8 @@ func ByVersion(v version.Version) vulnerability.Criteria { return ByConstraintFunc(func(constraint version.Constraint) (bool, error) { satisfied, err := constraint.Satisfied(&v) if err != nil { - var formatErr *version.UnsupportedFormatError - if errors.As(err, &formatErr) { + var unsupportedError *version.UnsupportedComparisonError + if errors.As(err, &unsupportedError) { // if the format is unsupported, then the constraint is not satisfied, but this should not be conveyed as an error log.WithFields("reason", err).Trace("unsatisfied constraint") return false, nil diff --git a/grype/search/version_constraint_test.go b/grype/search/version_constraint_test.go index 2fbfbbe1381..d375f7bc3c2 100644 --- a/grype/search/version_constraint_test.go +++ b/grype/search/version_constraint_test.go @@ -40,8 +40,7 @@ func Test_ByVersion(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - v, err := version.NewVersion(tt.version, version.SemanticFormat) - require.NoError(t, err) + v := version.NewVersion(tt.version, version.SemanticFormat) constraint := ByVersion(*v) matches, reason, err := constraint.MatchesVulnerability(tt.input) wantErr := require.NoError diff --git a/grype/version/apk_constraint.go b/grype/version/apk_constraint.go deleted file mode 100644 index 679a405819d..00000000000 --- a/grype/version/apk_constraint.go +++ /dev/null @@ -1,73 +0,0 @@ -//nolint:dupl -package version - -import "fmt" - -type apkConstraint struct { - raw string - expression constraintExpression -} - -func newApkConstraint(raw string) (apkConstraint, error) { - if raw == "" { - // empty constraints are always satisfied - return apkConstraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newApkComparator) - if err != nil { - return apkConstraint{}, fmt.Errorf("unable to parse apk constraint phrase: %w", err) - } - - return apkConstraint{ - raw: raw, - expression: constraints, - }, nil -} - -func newApkComparator(unit constraintUnit) (Comparator, error) { - ver, err := newApkVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint version (%s): %w", unit.version, err) - } - - return ver, nil -} - -func (c apkConstraint) supported(format Format) bool { - return format == ApkFormat -} - -func (c apkConstraint) Satisfied(version *Version) (bool, error) { - if c.raw == "" && version != nil { - // empty constraints are always satisfied - return true, nil - } - - if version == nil { - if c.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(ApkFormat, version.Format) - } - - if version.rich.apkVer == nil { - return false, fmt.Errorf("no rich apk version given: %+v", version) - } - - return c.expression.satisfied(version) -} - -func (c apkConstraint) String() string { - if c.raw == "" { - return "none (apk)" - } - - return fmt.Sprintf("%s (apk)", c.raw) -} diff --git a/grype/version/apk_constraint_test.go b/grype/version/apk_constraint_test.go deleted file mode 100644 index 184ab7ab6c8..00000000000 --- a/grype/version/apk_constraint_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionApk(t *testing.T) { - tests := []testCase{ - {version: "2.3.1", constraint: "", satisfied: true}, - // compound conditions - {version: "2.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, - {version: "2.0.0", constraint: "> 1.0.0, <= 2.0.0", satisfied: true}, - {version: "2.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.0.0", constraint: ">= 1.0.0, < 2.0.0", satisfied: true}, - {version: "1.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.9.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - // fixed-in scenarios - {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3.1", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3.2", satisfied: true}, - {version: "2.3.1", constraint: "< 2.4", satisfied: true}, - {version: "2.3.1", constraint: "< 3", satisfied: true}, - {version: "2.3.1", constraint: "< 3.0", satisfied: true}, - {version: "2.3.1", constraint: "< 3.0.0", satisfied: true}, - // alpine specific scenarios - // https://wiki.alpinelinux.org/wiki/APKBUILD_Reference#pkgver - {version: "1.5.1-r1", constraint: "< 1.5.1", satisfied: false}, - {version: "1.5.1-r1", constraint: "> 1.5.1", satisfied: true}, - {version: "9.3.2-r4", constraint: "< 9.3.4-r2", satisfied: true}, - {version: "9.3.4-r2", constraint: "> 9.3.4", satisfied: true}, - {version: "4.2.52_p2-r1", constraint: "< 4.2.52_p4-r2", satisfied: true}, - {version: "4.2.52_p2-r1", constraint: "> 4.2.52_p4-r2", satisfied: false}, - {version: "0.1.0_alpha", constraint: "< 0.1.3_alpha", satisfied: true}, - {version: "0.1.0_alpha2", constraint: "> 0.1.0_alpha", satisfied: true}, - {version: "1.1", constraint: "> 1.1_alpha1", satisfied: true}, - {version: "1.1", constraint: "< 1.1_alpha1", satisfied: false}, - {version: "2.3.0b-r1", constraint: "< 2.3.0b-r2", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - constraint, err := newApkConstraint(test.constraint) - - assert.NoError(t, err, "unexpected error from newApkConstraint: %v", err) - test.assertVersionConstraint(t, ApkFormat, constraint) - - }) - } -} diff --git a/grype/version/apk_version.go b/grype/version/apk_version.go index a70c2941560..241f9f27e91 100644 --- a/grype/version/apk_version.go +++ b/grype/version/apk_version.go @@ -1,35 +1,35 @@ package version import ( - "fmt" - apk "github.com/knqyf263/go-apk-version" ) +var _ Comparator = (*apkVersion)(nil) + type apkVersion struct { obj apk.Version } -func newApkVersion(raw string) (*apkVersion, error) { +func newApkVersion(raw string) (apkVersion, error) { ver, err := apk.NewVersion(raw) if err != nil { - return nil, err + return apkVersion{}, invalidFormatError(ApkFormat, raw, err) } - return &apkVersion{ + return apkVersion{ obj: ver, }, nil } -func (a *apkVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, ApkFormat) - if err != nil { - return -1, err +func (v apkVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - if other.rich.apkVer == nil { - return -1, fmt.Errorf("given empty apkVersion object") + apkVer, err := newApkVersion(other.Raw) + if err != nil { + return -1, err } - return other.rich.apkVer.obj.Compare(a.obj), nil + return v.obj.Compare(apkVer.obj), nil } diff --git a/grype/version/apk_version_test.go b/grype/version/apk_version_test.go index 43584f09f53..cf7be62e468 100644 --- a/grype/version/apk_version_test.go +++ b/grype/version/apk_version_test.go @@ -8,7 +8,60 @@ import ( "github.com/stretchr/testify/require" ) -func TestApkVersionCompare(t *testing.T) { +func TestApkVersion_Constraint(t *testing.T) { + tests := []testCase{ + {version: "2.3.1", constraint: "", satisfied: true}, + // compound conditions + {version: "2.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, + {version: "2.0.0", constraint: "> 1.0.0, <= 2.0.0", satisfied: true}, + {version: "2.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.0.0", constraint: ">= 1.0.0, < 2.0.0", satisfied: true}, + {version: "1.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.9.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + // fixed-in scenarios + {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3.1", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3.2", satisfied: true}, + {version: "2.3.1", constraint: "< 2.4", satisfied: true}, + {version: "2.3.1", constraint: "< 3", satisfied: true}, + {version: "2.3.1", constraint: "< 3.0", satisfied: true}, + {version: "2.3.1", constraint: "< 3.0.0", satisfied: true}, + // alpine specific scenarios + // https://wiki.alpinelinux.org/wiki/APKBUILD_Reference#pkgver + {version: "1.5.1-r1", constraint: "< 1.5.1", satisfied: false}, + {version: "1.5.1-r1", constraint: "> 1.5.1", satisfied: true}, + {version: "9.3.2-r4", constraint: "< 9.3.4-r2", satisfied: true}, + {version: "9.3.4-r2", constraint: "> 9.3.4", satisfied: true}, + {version: "4.2.52_p2-r1", constraint: "< 4.2.52_p4-r2", satisfied: true}, + {version: "4.2.52_p2-r1", constraint: "> 4.2.52_p4-r2", satisfied: false}, + {version: "0.1.0_alpha", constraint: "< 0.1.3_alpha", satisfied: true}, + {version: "0.1.0_alpha2", constraint: "> 0.1.0_alpha", satisfied: true}, + {version: "1.1", constraint: "> 1.1_alpha1", satisfied: true}, + {version: "1.1", constraint: "< 1.1_alpha1", satisfied: false}, + {version: "2.3.0b-r1", constraint: "< 2.3.0b-r2", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, ApkFormat) + + assert.NoError(t, err, "unexpected error from newApkConstraint: %v", err) + test.assertVersionConstraint(t, ApkFormat, constraint) + + }) + } +} + +func TestApkVersion_Compare(t *testing.T) { tests := []struct { name string thisVersion string @@ -25,20 +78,19 @@ func TestApkVersionCompare(t *testing.T) { expectError: false, }, { - name: "different format returns error", - thisVersion: "1.2.3-r4", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", + name: "different format does not return error", + thisVersion: "1.2.3-r4", + otherVersion: "1.2.3", + otherFormat: SemanticFormat, + expectError: false, }, { - name: "different format returns error - deb", + name: "different format does not return error - deb", thisVersion: "1.2.3-r4", otherVersion: "1.2.3-1", otherFormat: DebFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", + expectError: false, + errorSubstring: "unsupported version comparison", }, { name: "unknown format attempts upgrade - valid apk format", @@ -53,7 +105,7 @@ func TestApkVersionCompare(t *testing.T) { otherVersion: "not-valid-apk-format", otherFormat: UnknownFormat, expectError: true, - errorSubstring: "unsupported version format for comparison", + errorSubstring: "invalid version", }, } @@ -62,13 +114,12 @@ func TestApkVersionCompare(t *testing.T) { thisVer, err := newApkVersion(test.thisVersion) require.NoError(t, err) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) if test.expectError { - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) @@ -81,46 +132,31 @@ func TestApkVersionCompare(t *testing.T) { } } -func TestApkVersionCompareEdgeCases(t *testing.T) { +func TestApkVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*apkVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*apkVersion, *Version) { - thisVer, _ := newApkVersion("1.2.3-r4") + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3-r4", ApkFormat) return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty apkVersion in other object", - setupFunc: func() (*apkVersion, *Version) { - thisVer, _ := newApkVersion("1.2.3-r4") - otherVer := &Version{ - Raw: "1.2.3-r5", - Format: ApkFormat, - rich: rich{}, // don't populate the rich.apkVer field - } - - return thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty apkVersion object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/bitnami_version.go b/grype/version/bitnami_version.go new file mode 100644 index 00000000000..05f93a3bac0 --- /dev/null +++ b/grype/version/bitnami_version.go @@ -0,0 +1,61 @@ +package version + +import ( + "fmt" + "strings" + + bitnami "github.com/bitnami/go-version/pkg/version" + + hashiVer "github.com/anchore/go-version" +) + +var _ Comparator = (*bitnamiVersion)(nil) + +type bitnamiVersion struct { + obj *hashiVer.Version +} + +func newBitnamiVersion(raw string) (bitnamiVersion, error) { + bv, err := bitnami.Parse(raw) + if err != nil { + fmtErr := err + verObj, err := hashiVer.NewVersion(raw) + if err != nil { + return bitnamiVersion{}, invalidFormatError(BitnamiFormat, raw, fmtErr) + } + var segments []string + for _, segment := range verObj.Segments() { + segments = append(segments, fmt.Sprintf("%d", segment)) + } + // drop any pre-release info + raw = strings.Join(segments, ".") + } else { + raw = fmt.Sprintf("%d.%d.%d", bv.Major(), bv.Minor(), bv.Patch()) + } + + // We can't assume Bitnami revisions can potentially address a + // known vulnerability given Bitnami package revisions use + // exactly the same upstream source code used to create the + // previous version. Then, we discard it. + verObj, err := hashiVer.NewVersion(raw) + if err != nil { + return bitnamiVersion{}, invalidFormatError(BitnamiFormat, raw, err) + } + return bitnamiVersion{ + obj: verObj, + }, nil +} + +func (v bitnamiVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided + } + + bv, err := newBitnamiVersion(other.Raw) + + if err != nil { + return 0, err + } + + return v.obj.Compare(bv.obj), nil +} diff --git a/grype/version/bitnami_version_test.go b/grype/version/bitnami_version_test.go new file mode 100644 index 00000000000..a7b24f5c5f1 --- /dev/null +++ b/grype/version/bitnami_version_test.go @@ -0,0 +1,166 @@ +package version + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestBitnamiVersion_Constraint(t *testing.T) { + tests := []testCase{ + // empty values + {version: "2.3.1", constraint: "", satisfied: true}, + // typical cases + {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "2.3.1", satisfied: true}, + {version: "2.3.1", constraint: "= 2.3.1", satisfied: true}, + {version: "2.3.1", constraint: " = 2.3.1", satisfied: true}, + {version: "2.3.1", constraint: ">= 2.3.1", satisfied: true}, + {version: "2.3.1", constraint: "> 2.0.0", satisfied: true}, + {version: "2.3.1", constraint: "> 2.0", satisfied: true}, + {version: "2.3.1", constraint: "> 2", satisfied: true}, + {version: "2.3.1", constraint: "> 2, < 3", satisfied: true}, + {version: "2.3.1", constraint: "> 2.3, < 3.1", satisfied: true}, + {version: "2.3.1", constraint: "> 2.3.0, < 3.1", satisfied: true}, + {version: "2.3.1", constraint: ">= 2.3.1, < 3.1", satisfied: true}, + {version: "2.3.1", constraint: " = 2.3.2", satisfied: false}, + {version: "2.3.1", constraint: ">= 2.3.2", satisfied: false}, + {version: "2.3.1", constraint: "> 2.3.1", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2", satisfied: false}, + {version: "2.3.1", constraint: "< 2, > 3", satisfied: false}, + {version: "2.3.1-1", constraint: "2.3.1", satisfied: true}, + {version: "2.3.1-1", constraint: "= 2.3.1", satisfied: true}, + {version: "2.3.1-1", constraint: " = 2.3.1", satisfied: true}, + {version: "2.3.1-1", constraint: ">= 2.3.1", satisfied: true}, + {version: "2.3.1-1", constraint: "> 2.0.0", satisfied: true}, + {version: "2.3.1-1", constraint: "> 2.0", satisfied: true}, + {version: "2.3.1-1", constraint: "> 2", satisfied: true}, + {version: "2.3.1-1", constraint: "> 2, < 3", satisfied: true}, + {version: "2.3.1-1", constraint: "> 2.3, < 3.1", satisfied: true}, + {version: "2.3.1-1", constraint: "> 2.3.0, < 3.1", satisfied: true}, + {version: "2.3.1-1", constraint: ">= 2.3.1, < 3.1", satisfied: true}, + {version: "2.3.1-1", constraint: " = 2.3.2", satisfied: false}, + {version: "2.3.1-1", constraint: ">= 2.3.2", satisfied: false}, + {version: "2.3.1-1", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1-1", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1-1", constraint: "< 2", satisfied: false}, + {version: "2.3.1-1", constraint: "< 2, > 3", satisfied: false}, + // ignoring revisions + {version: "2.3.1-1", constraint: "> 2.3.1", satisfied: false}, + {version: "2.3.1-1", constraint: "< 2.3.1-2", satisfied: false}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, BitnamiFormat) + + require.NoError(t, err) + test.assertVersionConstraint(t, BitnamiFormat, constraint) + }) + } +} + +func TestBitnamiVersion_Compare(t *testing.T) { + tests := []struct { + name string + thisVersion string + otherVersion string + otherFormat Format + expectError bool + errorSubstring string + }{ + { + name: "same format successful comparison", + thisVersion: "1.2.3-4", + otherVersion: "1.2.3-5", + otherFormat: BitnamiFormat, + expectError: false, + }, + { + name: "semantic versioning successful comparison", + thisVersion: "1.2.3-4", + otherVersion: "1.2.3", + otherFormat: SemanticFormat, + expectError: false, + }, + { + name: "unknown format attempts upgrade - valid semver format", + thisVersion: "1.2.3-4", + otherVersion: "1.2.3-5", + otherFormat: UnknownFormat, + expectError: false, + }, + { + name: "unknown format attempts upgrade - invalid semver format", + thisVersion: "1.2.3-4", + otherVersion: "not-valid-semver-format", + otherFormat: UnknownFormat, + expectError: true, + errorSubstring: "invalid semantic version", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + thisVer, err := newBitnamiVersion(test.thisVersion) + require.NoError(t, err) + + otherVer := NewVersion(test.otherVersion, test.otherFormat) + + result, err := thisVer.Compare(otherVer) + + if test.expectError { + require.Error(t, err) + if test.errorSubstring != "" { + assert.True(t, strings.Contains(err.Error(), test.errorSubstring), + "Expected error to contain '%s', got: %v", test.errorSubstring, err) + } + } else { + assert.NoError(t, err) + assert.Contains(t, []int{-1, 0, 1}, result, "Expected comparison result to be -1, 0, or 1") + } + }) + } +} + +func TestBitnamiVersion_Compare_EdgeCases(t *testing.T) { + tests := []struct { + name string + setupFunc func(testing.TB) (*Version, *Version) + expectError bool + errorSubstring string + }{ + { + name: "nil version object", + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3-4", BitnamiFormat) + + return thisVer, nil + }, + expectError: true, + errorSubstring: "no version provided for comparison", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + thisVer, otherVer := test.setupFunc(t) + + _, err := thisVer.Compare(otherVer) + + require.Error(t, err) + if test.errorSubstring != "" { + assert.True(t, strings.Contains(err.Error(), test.errorSubstring), + "Expected error to contain '%s', got: %v", test.errorSubstring, err) + } + }) + } +} diff --git a/grype/version/combined_constraint.go b/grype/version/combined_constraint.go new file mode 100644 index 00000000000..2769bb2ec7b --- /dev/null +++ b/grype/version/combined_constraint.go @@ -0,0 +1,82 @@ +package version + +import ( + "fmt" + "strings" + + "github.com/scylladb/go-set/strset" +) + +func CombineConstraints(constraints ...Constraint) Constraint { + constraints = uniqueConstraints(constraints...) + + if len(constraints) == 0 { + return nil + } + if len(constraints) == 1 { + return constraints[0] + } + + return combinedConstraint{ + OrOperands: constraints, + } +} + +type combinedConstraint struct { + OrOperands []Constraint +} + +func (c combinedConstraint) String() string { + return fmt.Sprintf("%s (%s)", c.Value(), strings.ToLower(c.Format().String())) +} + +func (c combinedConstraint) Value() string { + // TODO: there is room for improvement here to make this more readable (filter out redundant constraints... e.g. <1.0 || < 2.0 should just be < 2.0) + var str string + for i, op := range c.OrOperands { + if i > 0 { + str += " || " + } + str += op.Value() + } + return str +} + +func (c combinedConstraint) Format() Format { + format := UnknownFormat + if len(c.OrOperands) > 0 { + format = c.OrOperands[0].Format() + } + return format +} + +func (c combinedConstraint) Satisfied(version *Version) (bool, error) { + if version == nil { + return false, fmt.Errorf("cannot evaluate combined constraint with nil version") + } + + for _, op := range c.OrOperands { + satisfied, err := op.Satisfied(version) + if err != nil { + return false, fmt.Errorf("error evaluating constraint %s: %w", op, err) + } + if satisfied { + return true, nil + } + } + + return false, nil +} + +func uniqueConstraints(constraints ...Constraint) []Constraint { + var nonNil []Constraint + seen := strset.New() + for _, c := range constraints { + if c == nil || seen.Has(c.Value()) { + continue + } + seen.Add(c.Value()) + nonNil = append(nonNil, c) + } + return nonNil +} diff --git a/grype/version/combined_constraint_test.go b/grype/version/combined_constraint_test.go new file mode 100644 index 00000000000..4b9338ce847 --- /dev/null +++ b/grype/version/combined_constraint_test.go @@ -0,0 +1,307 @@ +package version + +import ( + "errors" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestCombineConstraints(t *testing.T) { + tests := []struct { + name string + constraints []Constraint + want Constraint + }{ + { + name: "no constraints returns nil", + constraints: []Constraint{}, + want: nil, + }, + { + name: "single constraint returns same constraint", + constraints: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + }, + want: MustGetConstraint(">= 1.0.0", SemanticFormat), + }, + { + name: "multiple constraints returns combined constraint", + constraints: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + MustGetConstraint("< 2.0.0", SemanticFormat), + }, + want: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + MustGetConstraint("< 2.0.0", SemanticFormat), + }, + }, + }, + { + name: "nil constraints are filtered out", + constraints: []Constraint{ + nil, + MustGetConstraint(">= 1.0.0", SemanticFormat), + nil, + }, + want: MustGetConstraint(">= 1.0.0", SemanticFormat), + }, + { + name: "duplicate constraints are filtered out", + constraints: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + MustGetConstraint(">= 1.0.0", SemanticFormat), + MustGetConstraint("< 2.0.0", SemanticFormat), + }, + want: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + MustGetConstraint("< 2.0.0", SemanticFormat), + }, + }, + }, + { + name: "all nil constraints returns nil", + constraints: []Constraint{ + nil, + nil, + }, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CombineConstraints(tt.constraints...) + require.Equal(t, tt.want, got) + }) + } +} + +func TestCombinedConstraint_Methods(t *testing.T) { + tests := []struct { + name string + constraint combinedConstraint + version *Version + wantValue string + wantString string + wantFormat Format + wantSatisfied bool + wantErr require.ErrorAssertionFunc + }{ + { + name: "single operand semantic constraint satisfied", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantValue: ">= 1.0.0", + wantString: ">= 1.0.0 (semantic)", + wantFormat: SemanticFormat, + wantSatisfied: true, + }, + { + name: "single operand semantic constraint not satisfied", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 2.0.0", SemanticFormat), + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantValue: ">= 2.0.0", + wantString: ">= 2.0.0 (semantic)", + wantFormat: SemanticFormat, + wantSatisfied: false, + }, + { + name: "multiple operands with OR logic - first satisfies", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + MustGetConstraint(">= 3.0.0", SemanticFormat), + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantValue: ">= 1.0.0 || >= 3.0.0", + wantString: ">= 1.0.0 || >= 3.0.0 (semantic)", + wantFormat: SemanticFormat, + wantSatisfied: true, + }, + { + name: "multiple operands with OR logic - second satisfies", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 2.0.0", SemanticFormat), + MustGetConstraint("< 2.0.0", SemanticFormat), + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantValue: ">= 2.0.0 || < 2.0.0", + wantString: ">= 2.0.0 || < 2.0.0 (semantic)", + wantFormat: SemanticFormat, + wantSatisfied: true, + }, + { + name: "multiple operands with OR logic - none satisfy", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 2.0.0", SemanticFormat), + MustGetConstraint(">= 3.0.0", SemanticFormat), + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantValue: ">= 2.0.0 || >= 3.0.0", + wantString: ">= 2.0.0 || >= 3.0.0 (semantic)", + wantFormat: SemanticFormat, + wantSatisfied: false, + }, + { + name: "empty operands returns unknown format", + constraint: combinedConstraint{ + OrOperands: []Constraint{}, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantValue: "", + wantString: " (unknown)", + wantFormat: UnknownFormat, + wantSatisfied: false, + }, + { + name: "rpm format constraint", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 1.0.0", RpmFormat), + MustGetConstraint("< 0.5.0", RpmFormat), + }, + }, + version: NewVersion("1.5.0", RpmFormat), + wantValue: ">= 1.0.0 || < 0.5.0", + wantString: ">= 1.0.0 || < 0.5.0 (rpm)", + wantFormat: RpmFormat, + wantSatisfied: true, + }, + { + name: "nil version returns error", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + MustGetConstraint(">= 1.0.0", SemanticFormat), + }, + }, + version: nil, + wantValue: ">= 1.0.0", + wantString: ">= 1.0.0 (semantic)", + wantFormat: SemanticFormat, + wantSatisfied: false, + wantErr: require.Error, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.wantErr == nil { + tt.wantErr = require.NoError + } + + // test Value() method + gotValue := tt.constraint.Value() + require.Equal(t, tt.wantValue, gotValue) + + // test String() method + gotString := tt.constraint.String() + require.Equal(t, tt.wantString, gotString) + + // test Format() method + gotFormat := tt.constraint.Format() + require.Equal(t, tt.wantFormat, gotFormat) + + // test Satisfied() method + gotSatisfied, err := tt.constraint.Satisfied(tt.version) + tt.wantErr(t, err) + + if err != nil { + return + } + require.Equal(t, tt.wantSatisfied, gotSatisfied) + }) + } +} + +func TestCombinedConstraint_Satisfied_WithErrors(t *testing.T) { + tests := []struct { + name string + constraint combinedConstraint + version *Version + wantErr require.ErrorAssertionFunc + }{ + { + name: "error from first constraint", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + mockConstraint{value: ">= 1.0.0", format: SemanticFormat, returnErr: true}, + mockConstraint{value: "< 2.0.0", format: SemanticFormat, satisfied: true}, + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantErr: require.Error, + }, + { + name: "error from second constraint when first doesn't satisfy", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + mockConstraint{value: ">= 1.0.0", format: SemanticFormat, satisfied: false}, + mockConstraint{value: "< 2.0.0", format: SemanticFormat, returnErr: true}, + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantErr: require.Error, + }, + { + name: "no error when first constraint satisfies", + constraint: combinedConstraint{ + OrOperands: []Constraint{ + mockConstraint{value: ">= 1.0.0", format: SemanticFormat, satisfied: true}, + mockConstraint{value: "< 2.0.0", format: SemanticFormat, returnErr: true}, + }, + }, + version: NewVersion("1.5.0", SemanticFormat), + wantErr: require.NoError, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := tt.constraint.Satisfied(tt.version) + tt.wantErr(t, err) + }) + } +} + +type mockConstraint struct { + value string + format Format + satisfied bool + returnErr bool +} + +func (m mockConstraint) String() string { + return m.value + " (" + strings.ToLower(m.format.String()) + ")" +} + +func (m mockConstraint) Value() string { + return m.value +} + +func (m mockConstraint) Format() Format { + return m.format +} + +func (m mockConstraint) Satisfied(*Version) (bool, error) { + if m.returnErr { + return false, errors.New("mock constraint error") + } + return m.satisfied, nil +} diff --git a/grype/version/comparator.go b/grype/version/comparator.go index e77cab12306..daa88dc99bf 100644 --- a/grype/version/comparator.go +++ b/grype/version/comparator.go @@ -1,26 +1,8 @@ package version -type comparatorGenerator func(constraintUnit) (Comparator, error) - type Comparator interface { + // Compare compares this version to another version. + // This returns -1, 0, or 1 if this version is smaller, + // equal, or larger than the other version, respectively. Compare(*Version) (int, error) } - -func finalizeComparisonVersion(version *Version, targetFormat Format) (*Version, error) { - if version == nil { - return nil, ErrNoVersionProvided - } - switch version.Format { - case targetFormat: - return version, nil - case UnknownFormat: - upgradedVersion, err := NewVersion(version.Raw, targetFormat) - if err != nil { - // unable to upgrade the unknown version to the target version - return nil, NewUnsupportedFormatError(targetFormat, version.Format) - } - return upgradedVersion, nil - } - - return nil, NewUnsupportedFormatError(targetFormat, version.Format) -} diff --git a/grype/version/constraint.go b/grype/version/constraint.go index 00a83ce56e0..871f4fbde28 100644 --- a/grype/version/constraint.go +++ b/grype/version/constraint.go @@ -1,61 +1,57 @@ package version -import ( - "fmt" -) +import "fmt" type Constraint interface { fmt.Stringer + Value() string + Format() Format Satisfied(*Version) (bool, error) } func GetConstraint(constStr string, format Format) (Constraint, error) { + var c Constraint + var err error + switch format { case ApkFormat: - return newApkConstraint(constStr) - case SemanticFormat, GemFormat: - return newSemanticConstraint(constStr) + c, err = newGenericConstraint(ApkFormat, constStr) + case SemanticFormat: + c, err = newGenericConstraint(SemanticFormat, constStr) + case BitnamiFormat: + c, err = newGenericConstraint(BitnamiFormat, constStr) + case GemFormat: + c, err = newGenericConstraint(GemFormat, constStr) case DebFormat: - return newDebConstraint(constStr) + c, err = newGenericConstraint(DebFormat, constStr) case GolangFormat: - return newGolangConstraint(constStr) + c, err = newGenericConstraint(GolangFormat, constStr) case MavenFormat: - return newMavenConstraint(constStr) + c, err = newGenericConstraint(MavenFormat, constStr) case RpmFormat: - return newRpmConstraint(constStr) + c, err = newGenericConstraint(RpmFormat, constStr) case PythonFormat: - return newPep440Constraint(constStr) + c, err = newGenericConstraint(PythonFormat, constStr) case KBFormat: - return newKBConstraint(constStr) + c, err = newKBConstraint(constStr) case PortageFormat: - return newPortageConstraint(constStr) + c, err = newGenericConstraint(PortageFormat, constStr) case JVMFormat: - return newJvmConstraint(constStr) + c, err = newGenericConstraint(JVMFormat, constStr) case UnknownFormat: - return newFuzzyConstraint(constStr, "unknown") + c, err = newFuzzyConstraint(constStr, "unknown") + default: + return nil, fmt.Errorf("could not find constraint for given format: %s", format) } - return nil, fmt.Errorf("could not find constraint for given format: %s", format) + + return c, err } // MustGetConstraint is meant for testing only, do not use within the library func MustGetConstraint(constStr string, format Format) Constraint { - constraint, err := GetConstraint(constStr, format) + c, err := GetConstraint(constStr, format) if err != nil { panic(err) } - return constraint -} - -// NonFatalConstraintError should be used any time an unexpected but recoverable condition is encountered while -// checking version constraint satisfaction. The error should get returned by any implementer of the Constraint -// interface. If returned by the Satisfied method on the Constraint interface, this error will be caught and -// logged as a warning in the FindMatchesByPackageDistro function in grype/matcher/common/distro_matchers.go -type NonFatalConstraintError struct { - constraint Constraint - version *Version - message string -} - -func (e NonFatalConstraintError) Error() string { - return fmt.Sprintf("Matching raw constraint %s against version %s caused a non-fatal error: %s", e.constraint, e.version, e.message) + return c } diff --git a/grype/version/constraint_expression_test.go b/grype/version/constraint_expression_test.go deleted file mode 100644 index 664fa465443..00000000000 --- a/grype/version/constraint_expression_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package version - -import ( - "testing" - - "github.com/go-test/deep" -) - -func TestScanExpression(t *testing.T) { - tests := []struct { - phrase string - expected [][]string - err bool - }{ - { - phrase: "x,y||z", - expected: [][]string{ - { - "x", - "y", - }, - { - "z", - }, - }, - }, - { - phrase: "<1.0, >=2.0|| 3.0 || =4.0", - expected: [][]string{ - { - "<1.0", - ">=2.0", - }, - { - "3.0", - }, - { - "=4.0", - }, - }, - }, - { - // parenthetical expression are not supported yet - phrase: "(<1.0, >=2.0|| 3.0) || =4.0", - err: true, - }, - { - phrase: ` > 1.0, <= 2.0,,, || = 3.0 `, - expected: [][]string{ - { - ">1.0", - "<=2.0", - }, - { - "=3.0", - }, - }, - }, - { - phrase: ` > 1.0, <= " (2.0||),,, ", || = 3.0 `, - expected: [][]string{ - { - ">1.0", - `<=" (2.0||),,, "`, - }, - { - "=3.0", - }, - }, - }, - } - - for _, test := range tests { - t.Run(test.phrase, func(t *testing.T) { - actual, err := scanExpression(test.phrase) - if err != nil && test.err == false { - t.Fatalf("expected no error, got %+v", err) - } else if err == nil && test.err { - t.Fatalf("expected an error but did not get one") - } - - for _, d := range deep.Equal(test.expected, actual) { - t.Errorf("difference: %+v", d) - } - - }) - } -} diff --git a/grype/version/constraint_unit.go b/grype/version/constraint_unit.go deleted file mode 100644 index 23aef540ea5..00000000000 --- a/grype/version/constraint_unit.go +++ /dev/null @@ -1,76 +0,0 @@ -package version - -import ( - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/anchore/grype/internal/stringutil" -) - -// operator group only matches on range operators (GT, LT, GTE, LTE, E) -// version group matches on everything except for whitespace and operators (range or boolean) -var constraintPartPattern = regexp.MustCompile(`\s*(?P[><=]*)\s*(?P.+)`) - -type constraintUnit struct { - rangeOperator operator - version string -} - -func parseUnit(phrase string) (*constraintUnit, error) { - match := stringutil.MatchCaptureGroups(constraintPartPattern, phrase) - version, exists := match["version"] - if !exists { - return nil, nil - } - - version = strings.Trim(version, " ") - - // version may have quotes, attempt to unquote it (ignore errors) - unquoted, err := trimQuotes(version) - if err == nil { - version = unquoted - } - - op, err := parseOperator(match["operator"]) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint operator=%q: %+v", match["operator"], err) - } - return &constraintUnit{ - rangeOperator: op, - version: version, - }, nil -} - -// TrimQuotes will attempt to remove double quotes. -// If removing double quotes is unsuccessful, it will attempt to remove single quotes. -// If neither operation is successful, it will return an error. -func trimQuotes(s string) (string, error) { - unquoted, err := strconv.Unquote(s) - switch { - case err == nil: - return unquoted, nil - case strings.HasPrefix(s, "'") && strings.HasSuffix(s, "'"): - return strings.Trim(s, "'"), nil - default: - return s, fmt.Errorf("string %s is not single or double quoted", s) - } -} - -func (c *constraintUnit) Satisfied(comparison int) bool { - switch c.rangeOperator { - case EQ: - return comparison == 0 - case GT: - return comparison > 0 - case GTE: - return comparison >= 0 - case LT: - return comparison < 0 - case LTE: - return comparison <= 0 - default: - panic(fmt.Errorf("unknown operator: %s", c.rangeOperator)) - } -} diff --git a/grype/version/deb_constraint.go b/grype/version/deb_constraint.go deleted file mode 100644 index d70125f1efb..00000000000 --- a/grype/version/deb_constraint.go +++ /dev/null @@ -1,67 +0,0 @@ -//nolint:dupl -package version - -import "fmt" - -type debConstraint struct { - raw string - expression constraintExpression -} - -func newDebConstraint(raw string) (debConstraint, error) { - if raw == "" { - // an empty constraint is always satisfied - return debConstraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newDebComparator) - if err != nil { - return debConstraint{}, fmt.Errorf("unable to parse deb constraint phrase: %w", err) - } - return debConstraint{ - raw: raw, - expression: constraints, - }, nil -} - -func newDebComparator(unit constraintUnit) (Comparator, error) { - ver, err := newDebVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint version (%s): %w", unit.version, err) - } - return ver, nil -} - -func (c debConstraint) supported(format Format) bool { - return format == DebFormat -} - -func (c debConstraint) Satisfied(version *Version) (bool, error) { - if c.raw == "" && version != nil { - // an empty constraint is always satisfied - return true, nil - } else if version == nil { - if c.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(DebFormat, version.Format) - } - - if version.rich.debVer == nil { - return false, fmt.Errorf("no rich deb version given: %+v", version) - } - - return c.expression.satisfied(version) -} - -func (c debConstraint) String() string { - if c.raw == "" { - return "none (deb)" - } - return fmt.Sprintf("%s (deb)", c.raw) -} diff --git a/grype/version/deb_constraint_test.go b/grype/version/deb_constraint_test.go deleted file mode 100644 index 1826363b46c..00000000000 --- a/grype/version/deb_constraint_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionDeb(t *testing.T) { - tests := []testCase{ - // empty values - {version: "2.3.1", constraint: "", satisfied: true}, - // compound conditions - {version: "2.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, - {version: "2.0.0", constraint: "> 1.0.0, <= 2.0.0", satisfied: true}, - {version: "2.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.0.0", constraint: ">= 1.0.0, < 2.0.0", satisfied: true}, - {version: "1.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.9.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - // fixed-in scenarios - {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3.1", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3.2", satisfied: true}, - {version: "2.3.1", constraint: "< 2.4", satisfied: true}, - {version: "2.3.1", constraint: "< 3", satisfied: true}, - {version: "2.3.1", constraint: "< 3.0", satisfied: true}, - {version: "2.3.1", constraint: "< 3.0.0", satisfied: true}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.0.0", satisfied: false}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.0", satisfied: false}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2", satisfied: false}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.3", satisfied: false}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.3.1", satisfied: false}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.3.2", satisfied: true}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.4", satisfied: true}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <3", satisfied: true}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <3.0", satisfied: true}, - {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <3.0.0", satisfied: true}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151-2.6.11-2ubuntu0.14.04.1", satisfied: false}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151-2.6.11", satisfied: false}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151-2.7", satisfied: false}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151", satisfied: false}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u150", satisfied: false}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u152", satisfied: true}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u152-2.6.11-2ubuntu0.14.04.1", satisfied: true}, - {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 8u1-2.6.11-2ubuntu0.14.04.1", satisfied: true}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43", satisfied: false}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0", satisfied: false}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357", satisfied: false}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357.81", satisfied: false}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357.81-0ubuntu0.14.04.1.1089", satisfied: false}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357.82-0ubuntu0.14.04.1.1089", satisfied: true}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2358-0ubuntu0.14.04.1.1089", satisfied: true}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.1-0ubuntu0.14.04.1.1089", satisfied: true}, - {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<44-0ubuntu0.14.04.1.1089", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.tName(), func(t *testing.T) { - constraint, err := newDebConstraint(test.constraint) - assert.NoError(t, err, "unexpected error from newDebConstraint: %v", err) - - test.assertVersionConstraint(t, DebFormat, constraint) - }) - } -} diff --git a/grype/version/deb_version.go b/grype/version/deb_version.go index 08d412668b8..8bf9fb8f1fb 100644 --- a/grype/version/deb_version.go +++ b/grype/version/deb_version.go @@ -1,34 +1,34 @@ package version import ( - "fmt" - deb "github.com/knqyf263/go-deb-version" ) +var _ Comparator = (*debVersion)(nil) + type debVersion struct { obj deb.Version } -func newDebVersion(raw string) (*debVersion, error) { +func newDebVersion(raw string) (debVersion, error) { ver, err := deb.NewVersion(raw) if err != nil { - return nil, err + return debVersion{}, invalidFormatError(DebFormat, raw, err) } - return &debVersion{ + return debVersion{ obj: ver, }, nil } -func (d *debVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, DebFormat) - if err != nil { - return -1, err +func (v debVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - if other.rich.debVer == nil { - return -1, fmt.Errorf("given empty debVersion object") + o, err := newDebVersion(other.Raw) + if err != nil { + return 0, err } - return other.rich.debVer.obj.Compare(d.obj), nil + return v.obj.Compare(o.obj), nil } diff --git a/grype/version/deb_version_test.go b/grype/version/deb_version_test.go index d3cceb42c9c..cade7c5bef5 100644 --- a/grype/version/deb_version_test.go +++ b/grype/version/deb_version_test.go @@ -4,11 +4,77 @@ import ( "strings" "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestDebVersionCompare(t *testing.T) { +func TestDebVersion_Constraint(t *testing.T) { + tests := []testCase{ + // empty values + {version: "2.3.1", constraint: "", satisfied: true}, + // compound conditions + {version: "2.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, + {version: "2.0.0", constraint: "> 1.0.0, <= 2.0.0", satisfied: true}, + {version: "2.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.0.0", constraint: ">= 1.0.0, < 2.0.0", satisfied: true}, + {version: "1.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.9.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + // fixed-in scenarios + {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3.1", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3.2", satisfied: true}, + {version: "2.3.1", constraint: "< 2.4", satisfied: true}, + {version: "2.3.1", constraint: "< 3", satisfied: true}, + {version: "2.3.1", constraint: "< 3.0", satisfied: true}, + {version: "2.3.1", constraint: "< 3.0.0", satisfied: true}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.0.0", satisfied: false}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.0", satisfied: false}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2", satisfied: false}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.3", satisfied: false}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.3.1", satisfied: false}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.3.2", satisfied: true}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <2.4", satisfied: true}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <3", satisfied: true}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <3.0", satisfied: true}, + {version: "2.3.1-1ubuntu0.14.04.1", constraint: " <3.0.0", satisfied: true}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151-2.6.11-2ubuntu0.14.04.1", satisfied: false}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151-2.6.11", satisfied: false}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151-2.7", satisfied: false}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u151", satisfied: false}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u150", satisfied: false}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u152", satisfied: true}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 7u152-2.6.11-2ubuntu0.14.04.1", satisfied: true}, + {version: "7u151-2.6.11-2ubuntu0.14.04.1", constraint: " < 8u1-2.6.11-2ubuntu0.14.04.1", satisfied: true}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43", satisfied: false}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0", satisfied: false}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357", satisfied: false}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357.81", satisfied: false}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357.81-0ubuntu0.14.04.1.1089", satisfied: false}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2357.82-0ubuntu0.14.04.1.1089", satisfied: true}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.0.2358-0ubuntu0.14.04.1.1089", satisfied: true}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<43.1-0ubuntu0.14.04.1.1089", satisfied: true}, + {version: "43.0.2357.81-0ubuntu0.14.04.1.1089", constraint: "<44-0ubuntu0.14.04.1.1089", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, DebFormat) + require.NoError(t, err, "unexpected error from GetConstraint: %v", err) + + test.assertVersionConstraint(t, DebFormat, constraint) + }) + } +} + +func TestDebVersion_Compare(t *testing.T) { tests := []struct { name string thisVersion string @@ -24,22 +90,6 @@ func TestDebVersionCompare(t *testing.T) { otherFormat: DebFormat, expectError: false, }, - { - name: "different format returns error", - thisVersion: "1.2.3-1", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.2.3-1", - otherVersion: "1.2.3-r4", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, { name: "unknown format attempts upgrade - valid deb format", thisVersion: "1.2.3-1", @@ -53,7 +103,7 @@ func TestDebVersionCompare(t *testing.T) { otherVersion: "not-valid-deb-format", otherFormat: UnknownFormat, expectError: true, - errorSubstring: "unsupported version format for comparison", + errorSubstring: "upstream_version must start with digit", }, } @@ -62,36 +112,35 @@ func TestDebVersionCompare(t *testing.T) { thisVer, err := newDebVersion(test.thisVersion) require.NoError(t, err) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) if test.expectError { - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { - assert.True(t, strings.Contains(err.Error(), test.errorSubstring), + require.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) } } else { - assert.NoError(t, err) - assert.Contains(t, []int{-1, 0, 1}, result, "Expected comparison result to be -1, 0, or 1") + require.NoError(t, err) + require.Contains(t, []int{-1, 0, 1}, result, "Expected comparison result to be -1, 0, or 1") } }) } } -func TestDebVersionCompareEdgeCases(t *testing.T) { +func TestDebVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*debVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*debVersion, *Version) { - thisVer, _ := newDebVersion("1.2.3-1") + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3-1", DebFormat) return thisVer, nil }, expectError: true, @@ -99,31 +148,30 @@ func TestDebVersionCompareEdgeCases(t *testing.T) { }, { name: "empty debVersion in other object", - setupFunc: func() (*debVersion, *Version) { - thisVer, _ := newDebVersion("1.2.3-1") - + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3-1", DebFormat) otherVer := &Version{ Raw: "1.2.3-2", Format: DebFormat, - rich: rich{}, // debVer will be nil } return thisVer, otherVer }, - expectError: true, - errorSubstring: "given empty debVersion object", + expectError: false, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + if test.expectError { + require.Error(t, err) + } if test.errorSubstring != "" { - assert.True(t, strings.Contains(err.Error(), test.errorSubstring), + require.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) } }) diff --git a/grype/version/error.go b/grype/version/error.go index d0244035cd0..41a8e3bea09 100644 --- a/grype/version/error.go +++ b/grype/version/error.go @@ -5,32 +5,55 @@ import ( "fmt" ) +// ErrUnsupportedVersion is returned when a version string cannot be parsed because the value is known +// to cause issues or is otherwise problematic (e.g. golang "devel" version). +var ErrUnsupportedVersion = fmt.Errorf("unsupported version value") + +// ErrNoVersionProvided is returned when a version is attempted to be compared, but no other version is provided to compare against. var ErrNoVersionProvided = errors.New("no version provided for comparison") -// UnsupportedFormatError represents an error when a format doesn't match the expected format -type UnsupportedFormatError struct { +// UnsupportedComparisonError represents an error when a format doesn't match the expected format +type UnsupportedComparisonError struct { Left Format - Right Format + Right *Version } -// NewUnsupportedFormatError creates a new UnsupportedFormatError -func NewUnsupportedFormatError(left, right Format) *UnsupportedFormatError { - return &UnsupportedFormatError{ +// newUnsupportedFormatError creates a new UnsupportedComparisonError +func newUnsupportedFormatError(left Format, right *Version) *UnsupportedComparisonError { + return &UnsupportedComparisonError{ Left: left, Right: right, } } -func (e *UnsupportedFormatError) Error() string { - return fmt.Sprintf("(%s) unsupported version format for comparison: %s", e.Left, e.Right) +func (e *UnsupportedComparisonError) Error() string { + return fmt.Sprintf("(%s) unsupported version comparison: value=%q format=%q", e.Left, e.Right.Raw, e.Right.Format) } -func (e *UnsupportedFormatError) Is(target error) bool { - var t *UnsupportedFormatError +func (e *UnsupportedComparisonError) Is(target error) bool { + var t *UnsupportedComparisonError ok := errors.As(target, &t) if !ok { return false } return (t.Left == UnknownFormat || t.Left == e.Left) && - (t.Right == UnknownFormat || t.Right == e.Right) + (t.Right.Format == UnknownFormat || t.Right == e.Right) +} + +func invalidFormatError(format Format, raw string, err error) error { + return fmt.Errorf("invalid %s version from '%s': %w", format.String(), raw, err) +} + +// NonFatalConstraintError should be used any time an unexpected but recoverable condition is encountered while +// checking version constraint satisfaction. The error should get returned by any implementer of the Constraint +// interface. If returned by the Satisfied method on the Constraint interface, this error will be caught and +// logged as a warning in the FindMatchesByPackageDistro function in grype/matcher/common/distro_matchers.go +type NonFatalConstraintError struct { + constraint Constraint + version *Version + message string +} + +func (e NonFatalConstraintError) Error() string { + return fmt.Sprintf("matching raw constraint %s against version %s caused a non-fatal error: %s", e.constraint, e.version, e.message) } diff --git a/grype/version/format.go b/grype/version/format.go index c4a47c89570..0ca022f226a 100644 --- a/grype/version/format.go +++ b/grype/version/format.go @@ -20,6 +20,7 @@ const ( PortageFormat GolangFormat JVMFormat + BitnamiFormat ) type Format int @@ -37,6 +38,7 @@ var formatStr = []string{ "Portage", "Go", "JVM", + "Bitnami", } var Formats = []Format{ @@ -51,6 +53,7 @@ var Formats = []Format{ PortageFormat, GolangFormat, JVMFormat, + BitnamiFormat, } func ParseFormat(userStr string) Format { @@ -59,6 +62,8 @@ func ParseFormat(userStr string) Format { return SemanticFormat case strings.ToLower(ApkFormat.String()), "apk": return ApkFormat + case strings.ToLower(BitnamiFormat.String()), "bitnami": + return BitnamiFormat case strings.ToLower(DebFormat.String()), "dpkg": return DebFormat case strings.ToLower(GolangFormat.String()), "go": @@ -85,6 +90,8 @@ func FormatFromPkg(p pkg.Package) Format { switch p.Type { case syftPkg.ApkPkg: return ApkFormat + case syftPkg.BitnamiPkg: + return BitnamiFormat case syftPkg.DebPkg: return DebFormat case syftPkg.JavaPkg: diff --git a/grype/version/format_test.go b/grype/version/format_test.go index 5471ebaa159..6194c3aed47 100644 --- a/grype/version/format_test.go +++ b/grype/version/format_test.go @@ -17,6 +17,10 @@ func TestParseFormat(t *testing.T) { input: "dpkg", format: DebFormat, }, + { + input: "bitnami", + format: BitnamiFormat, + }, { input: "maven", format: MavenFormat, @@ -56,6 +60,13 @@ func TestFormatFromPkgType(t *testing.T) { p pkg.Package format Format }{ + { + name: "bitnami", + p: pkg.Package{ + Type: syftPkg.BitnamiPkg, + }, + format: BitnamiFormat, + }, { name: "deb", p: pkg.Package{ diff --git a/grype/version/fuzzy_constraint.go b/grype/version/fuzzy_constraint.go index fd68b88fa5d..0a126ed3660 100644 --- a/grype/version/fuzzy_constraint.go +++ b/grype/version/fuzzy_constraint.go @@ -8,37 +8,39 @@ import ( hashiVer "github.com/anchore/go-version" ) -// derived from https://semver.org/, but additionally matches partial versions (e.g. "2.0") -var pseudoSemverPattern = regexp.MustCompile(`^(0|[1-9]\d*)(\.(0|[1-9]\d*))?(\.(0|[1-9]\d*))?(?:(-|alpha|beta|rc)((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`) +// derived from https://semver.org/, but additionally matches: +// - partial versions (e.g. "2.0") +// - optional prefix "v" (e.g. "v1.0.0") +var pseudoSemverPattern = regexp.MustCompile(`^v?(0|[1-9]\d*)(\.(0|[1-9]\d*))?(\.(0|[1-9]\d*))?(?:(-|alpha|beta|rc)((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`) type fuzzyConstraint struct { - rawPhrase string - phraseHint string - semanticConstraint *hashiVer.Constraints - constraints constraintExpression + RawPhrase string + PhraseHint string + SemanticConstraint *hashiVer.Constraints + Constraints simpleRangeExpression } -func newFuzzyConstraint(phrase, hint string) (*fuzzyConstraint, error) { +func newFuzzyConstraint(phrase, hint string) (fuzzyConstraint, error) { if phrase == "" { // an empty constraint is always satisfied - return &fuzzyConstraint{ - rawPhrase: phrase, - phraseHint: hint, + return fuzzyConstraint{ + RawPhrase: phrase, + PhraseHint: hint, }, nil } - constraints, err := newConstraintExpression(phrase, newFuzzyComparator) + constraints, err := parseRangeExpression(phrase) if err != nil { - return nil, fmt.Errorf("could not create fuzzy constraint: %+v", err) + return fuzzyConstraint{}, fmt.Errorf("could not create fuzzy constraint: %+v", err) } var semverConstraint *hashiVer.Constraints // check all version unit phrases to see if this is a valid semver constraint valid := true check: - for _, units := range constraints.units { + for _, units := range constraints.Units { for _, unit := range units { - if !pseudoSemverPattern.MatchString(unit.version) { + if !pseudoSemverPattern.MatchString(unit.Version) { valid = false break check } @@ -49,28 +51,20 @@ check: semverConstraint = &value } - return &fuzzyConstraint{ - rawPhrase: phrase, - phraseHint: hint, - constraints: constraints, - semanticConstraint: semverConstraint, + return fuzzyConstraint{ + RawPhrase: phrase, + PhraseHint: hint, + Constraints: constraints, + SemanticConstraint: semverConstraint, }, nil } -func newFuzzyComparator(unit constraintUnit) (Comparator, error) { - ver, err := newFuzzyVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint version (%s): %w", unit.version, err) - } - return &ver, nil -} - -func (f *fuzzyConstraint) Satisfied(verObj *Version) (bool, error) { - if f.rawPhrase == "" && verObj != nil { +func (f fuzzyConstraint) Satisfied(verObj *Version) (bool, error) { + if f.RawPhrase == "" && verObj != nil { // an empty constraint is always satisfied return true, nil } else if verObj == nil { - if f.rawPhrase != "" { + if f.RawPhrase != "" { // a non-empty constraint with no version given should always fail return false, nil } @@ -81,11 +75,11 @@ func (f *fuzzyConstraint) Satisfied(verObj *Version) (bool, error) { // rebuild temp constraint based off of ver obj if verObj.Format != UnknownFormat { - newConstaint, err := GetConstraint(f.rawPhrase, verObj.Format) + newConstraint, err := GetConstraint(f.RawPhrase, verObj.Format) // check if constraint is not fuzzyConstraint - _, ok := newConstaint.(*fuzzyConstraint) + _, ok := newConstraint.(fuzzyConstraint) if err == nil && !ok { - satisfied, err := newConstaint.Satisfied(verObj) + satisfied, err := newConstraint.Satisfied(verObj) if err == nil { return satisfied, nil } @@ -93,25 +87,35 @@ func (f *fuzzyConstraint) Satisfied(verObj *Version) (bool, error) { } // attempt semver first, then fallback to fuzzy part matching... - if f.semanticConstraint != nil { + if f.SemanticConstraint != nil { if pseudoSemverPattern.MatchString(version) { - if semver, err := newSemanticVersion(version); err == nil && semver != nil { - return f.semanticConstraint.Check(semver.verObj), nil + // we're stricter about accepting looser semver rules here since we have no context about + // the true format of the version, thus we want to reduce the change of false negatives + if semver, err := newSemanticVersion(version, true); err == nil { + return f.SemanticConstraint.Check(semver.obj), nil } } } // semver didn't work, use fuzzy part matching instead... - return f.constraints.satisfied(verObj) + return f.Constraints.satisfied(UnknownFormat, verObj) } -func (f *fuzzyConstraint) String() string { - if f.rawPhrase == "" { +func (f fuzzyConstraint) Format() Format { + return UnknownFormat +} + +func (f fuzzyConstraint) String() string { + if f.RawPhrase == "" { return "none (unknown)" } - if f.phraseHint != "" { - return fmt.Sprintf("%s (%s)", f.rawPhrase, f.phraseHint) + if f.PhraseHint != "" { + return fmt.Sprintf("%s (%s)", f.RawPhrase, f.PhraseHint) } - return fmt.Sprintf("%s (unknown)", f.rawPhrase) + return fmt.Sprintf("%s (unknown)", f.RawPhrase) +} + +func (f fuzzyConstraint) Value() string { + return f.RawPhrase } // Note: the below code is from https://github.com/facebookincubator/nvdtools/blob/688794c4d3a41929eeca89304e198578d4595d53/cvefeed/nvd/smartvercmp.go (apache V2) @@ -176,6 +180,8 @@ func parseVersionParts(v string) (int, int, int) { // !"#$%&'()*+,-./ are dec 33 to 47, :;<=>?@ are dec 58 to 64, [\]^_` are dec 91 to 96 and {|}~ are dec 123 to 126. // So, punctuation is in dec 33-126 range except 48-57, 65-90 and 97-122 gaps. // This inverse logic allows for early short-circuiting for most of the chars and shaves ~20ns in benchmarks. + // linters might yell about De Morgan's law here - we ignore them in this case + //nolint:staticcheck return b >= '!' && b <= '~' && !(b > '/' && b < ':' || b > '@' && b < '[' || diff --git a/grype/version/fuzzy_constraint_test.go b/grype/version/fuzzy_constraint_test.go index 650ad50bafe..da97a48334d 100644 --- a/grype/version/fuzzy_constraint_test.go +++ b/grype/version/fuzzy_constraint_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TestSmartVerCmp(t *testing.T) { +func TestFuzzyVersionComparison(t *testing.T) { cases := []struct { v1, v2 string ret int @@ -69,7 +69,7 @@ func TestSmartVerCmp(t *testing.T) { } } -func TestFuzzyConstraintSatisfaction(t *testing.T) { +func TestFuzzyVersion_Constraint(t *testing.T) { tests := []testCase{ { name: "empty constraint", @@ -193,7 +193,7 @@ func TestFuzzyConstraintSatisfaction(t *testing.T) { }, { name: "bad semver (eq)", - version: "5a2", + version: "5a2", // with the hashicorp lib, without the strict check, this is interpreted as 5.0.0-alpha.2 constraint: "=5a2", satisfied: true, }, @@ -332,7 +332,7 @@ func TestFuzzyConstraintSatisfaction(t *testing.T) { }, { name: "openssl version with letter suffix and r0 are alphabetically greater than their versions", - version: "1.0.2k-r0", + version: "1.0.2k-r0", // the lib is saying the there is a prerelese starting at "k-r0" constraint: ">= 1.0.2", satisfied: true, }, @@ -376,7 +376,7 @@ func TestFuzzyConstraintSatisfaction(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - constraint, err := newFuzzyConstraint(test.constraint, "") + constraint, err := GetConstraint(test.constraint, UnknownFormat) assert.NoError(t, err, "unexpected error from newFuzzyConstraint: %v", err) test.assertVersionConstraint(t, UnknownFormat, constraint) diff --git a/grype/version/fuzzy_version.go b/grype/version/fuzzy_version.go index 9f31438ac46..cc0ca528b6e 100644 --- a/grype/version/fuzzy_version.go +++ b/grype/version/fuzzy_version.go @@ -1,8 +1,6 @@ package version -import ( - "fmt" -) +var _ Comparator = (*fuzzyVersion)(nil) type fuzzyVersion struct { semVer *semanticVersion @@ -11,31 +9,38 @@ type fuzzyVersion struct { //nolint:unparam func newFuzzyVersion(raw string) (fuzzyVersion, error) { - var semVer *semanticVersion - - candidate, err := newSemanticVersion(raw) - if err == nil { - semVer = candidate - } - return fuzzyVersion{ - semVer: semVer, + semVer: newFuzzySemver(raw), raw: raw, }, nil } -func (v *fuzzyVersion) Compare(other *Version) (int, error) { +func (v fuzzyVersion) Compare(other *Version) (int, error) { if other == nil { return -1, ErrNoVersionProvided } - // check if both versions can be compared as semvers... - if other.Format == SemanticFormat && v.semVer != nil { - if other.rich.semVer == nil { - return -1, fmt.Errorf("given empty semver object (fuzzy)") - } - return other.rich.semVer.verObj.Compare(v.semVer.verObj), nil + + semver := newFuzzySemver(other.Raw) + if semver != nil && v.semVer != nil && v.semVer.obj != nil && semver.obj != nil { + return v.semVer.obj.Compare(semver.obj), nil } // one or both are no semver compliant, use fuzzy comparison - return fuzzyVersionComparison(other.Raw, v.raw), nil + return fuzzyVersionComparison(v.raw, other.Raw), nil +} + +func newFuzzySemver(raw string) *semanticVersion { + // we need to be a little more strict here than the hashicorp lib, but not as strict as the semver spec. + // a good example of this is being able to reason about openssl versions like "1.0.2k" or "1.0.2l" which are + // not semver compliant, but we still want to be able to compare them. But the hashicorp lib will not parse + // the postfix letter as a prerelease version, which is wrong. In these cases we want a true fuzzy version + // comparison. + if pseudoSemverPattern.MatchString(raw) { + candidate, err := newSemanticVersion(raw, false) + if err == nil { + return &candidate + } + } + + return nil } diff --git a/grype/version/fuzzy_version_test.go b/grype/version/fuzzy_version_test.go index e79241a6fb0..5b7315b1e61 100644 --- a/grype/version/fuzzy_version_test.go +++ b/grype/version/fuzzy_version_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFuzzyVersionCompare(t *testing.T) { +func TestFuzzyVersion_Compare(t *testing.T) { tests := []struct { name string thisVersion string @@ -56,22 +56,14 @@ func TestFuzzyVersionCompare(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer := fuzzyVersion{ - raw: test.thisVersion, - } - - // if thisVersion is semantic-compatible, populate the semVer field - if semver, err := newSemanticVersion(test.thisVersion); err == nil { - thisVer.semVer = semver - } + thisVer := NewVersion(test.thisVersion, UnknownFormat) // explicitly use the fuzzy version format - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) if test.expectError { - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) @@ -84,60 +76,176 @@ func TestFuzzyVersionCompare(t *testing.T) { } } -func TestFuzzyVersionCompareEdgeCases(t *testing.T) { +func TestFuzzyVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*fuzzyVersion, *Version) - expectError bool + setupFunc func(tb testing.TB) (*Version, *Version) + expectError require.ErrorAssertionFunc errorSubstring string + wantComparison int }{ { name: "nil version object", - setupFunc: func() (*fuzzyVersion, *Version) { - thisVer := &fuzzyVersion{ - raw: "1.2.3", - } - if semver, err := newSemanticVersion("1.2.3"); err == nil { - thisVer.semVer = semver - } + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3", UnknownFormat) + return thisVer, nil }, - expectError: true, + expectError: require.Error, errorSubstring: "no version provided for comparison", }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if test.expectError == nil { + test.expectError = require.NoError + } + thisVer, otherVer := test.setupFunc(t) + + n, err := thisVer.Compare(otherVer) + test.expectError(t, err) + if test.errorSubstring != "" { + assert.True(t, strings.Contains(err.Error(), test.errorSubstring), + "Expected error to contain '%s', got: %v", test.errorSubstring, err) + } + if err != nil { + return + } + assert.Equal(t, test.wantComparison, n, "Expected comparison result to be %d", test.wantComparison) + }) + } +} + +func TestFuzzyVersion_Compare_NilScenarios(t *testing.T) { + tests := []struct { + name string + setupFunc func(tb testing.TB) (fuzzyVersion, *Version) + expectFallback bool // expect fuzzy comparison fallback + }{ + { + name: "both v.semVer and other semver are nil", + setupFunc: func(t testing.TB) (fuzzyVersion, *Version) { + // create fuzzyVersion with nil semVer + fv := fuzzyVersion{ + semVer: nil, + raw: "abc123", + } + + otherVer := NewVersion("def456", UnknownFormat) + + return fv, otherVer + }, + expectFallback: true, + }, { - name: "semantic format but empty semver object", - setupFunc: func() (*fuzzyVersion, *Version) { - thisVer := &fuzzyVersion{ - raw: "1.2.3", + name: "v.semVer is nil, other semver is not nil", + setupFunc: func(t testing.TB) (fuzzyVersion, *Version) { + // create fuzzyVersion with nil semVer + fv := fuzzyVersion{ + semVer: nil, + raw: "abc123", } - if semver, err := newSemanticVersion("1.2.3"); err == nil { - thisVer.semVer = semver + + otherVer := NewVersion("1.2.3", UnknownFormat) + + return fv, otherVer + }, + expectFallback: true, + }, + { + name: "v.semVer is not nil but v.semVer.obj is nil", + setupFunc: func(t testing.TB) (fuzzyVersion, *Version) { + // create fuzzyVersion with semVer that has nil obj + fv := fuzzyVersion{ + semVer: &semanticVersion{obj: nil}, + raw: "abc123", + } + + otherVer := NewVersion("1.2.3", UnknownFormat) + + return fv, otherVer + }, + expectFallback: true, + }, + { + name: "v.semVer is valid but other semver is nil", + setupFunc: func(t testing.TB) (fuzzyVersion, *Version) { + // create fuzzyVersion with valid semVer + semVer, err := newSemanticVersion("1.2.3", false) + require.NoError(t, err) + + fv := fuzzyVersion{ + semVer: &semVer, + raw: "1.2.3", + } + + // create other version that will result in nil semver from newFuzzySemver + otherVer := NewVersion("abc123", UnknownFormat) + + return fv, otherVer + }, + expectFallback: true, + }, + { + name: "v.semVer is valid but other semver.obj is nil", + setupFunc: func(t testing.TB) (fuzzyVersion, *Version) { + // create fuzzyVersion with valid semVer + semVer, err := newSemanticVersion("1.2.3", false) + require.NoError(t, err) + + fv := fuzzyVersion{ + semVer: &semVer, + raw: "1.2.3", } - otherVer := &Version{ - Raw: "1.2.4", - Format: SemanticFormat, - rich: rich{}, // semVer will be nil + // this should create a version that when passed to newFuzzySemver + // results in a semanticVersion with nil obj (this might be hard to achieve + // but we'll test the logic path) + otherVer := NewVersion("not-semver-compliant", UnknownFormat) + + return fv, otherVer + }, + expectFallback: true, + }, + { + name: "both semvers are valid - should use semver comparison", + setupFunc: func(t testing.TB) (fuzzyVersion, *Version) { + // create fuzzyVersion with valid semVer + semVer, err := newSemanticVersion("1.2.3", false) + require.NoError(t, err) + + fv := fuzzyVersion{ + semVer: &semVer, + raw: "1.2.3", } - return thisVer, otherVer + otherVer := NewVersion("1.2.4", UnknownFormat) + + return fv, otherVer }, - expectError: true, - errorSubstring: "given empty semver object (fuzzy)", + expectFallback: false, }, } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fv, otherVer := tt.setupFunc(t) + + result, err := fv.Compare(otherVer) + require.NoError(t, err) - _, err := thisVer.Compare(otherVer) + // verify that the result is a valid comparison result + assert.Contains(t, []int{-1, 0, 1}, result, "Expected comparison result to be -1, 0, or 1") - assert.Error(t, err) - if test.errorSubstring != "" { - assert.True(t, strings.Contains(err.Error(), test.errorSubstring), - "Expected error to contain '%s', got: %v", test.errorSubstring, err) + // we can't easily test which path was taken without modifying the source, + // but we can at least verify the function doesn't panic and returns valid results + if tt.expectFallback { + // when falling back to fuzzy comparison, we should get a result + // the exact value depends on the fuzzyVersionComparison implementation + assert.NotPanics(t, func() { + _, _ = fv.Compare(otherVer) + }) } }) } diff --git a/grype/version/gem_version.go b/grype/version/gem_version.go new file mode 100644 index 00000000000..6820ff3fbd0 --- /dev/null +++ b/grype/version/gem_version.go @@ -0,0 +1,299 @@ +package version + +import ( + "fmt" + "regexp" + "strconv" + "strings" +) + +var _ Comparator = (*gemVersion)(nil) + +type gemVersion struct { + original string + segments []any + canonical []any + isPrerelease bool +} + +const ( + rubySegmentPattern = `(\d+|[a-zA-Z]+)` + rubyCorrectnessPattern = `^[0-9a-zA-Z.\-]+$` +) + +var ( + segmentRegexp = regexp.MustCompile(rubySegmentPattern) + correctnessRegexp = regexp.MustCompile(rubyCorrectnessPattern) +) + +func newGemVersion(raw string) (gemVersion, error) { + original := raw + processed := cleanArchFromVersion(raw) + if processed == "" || strings.TrimSpace(processed) == "" { + processed = "0" + } else { + processed = strings.TrimSpace(processed) + } + + if !correctnessRegexp.MatchString(processed) { + return gemVersion{}, fmt.Errorf("malformed version number string %q", original) + } + processed = strings.ReplaceAll(processed, "-", ".pre.") + + isPrerelease := strings.ContainsAny(processed, "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + + segments, err := partitionSegments(processed) + if err != nil { + return gemVersion{}, fmt.Errorf("malformed version number string %q: %w", original, err) + } + if len(segments) == 0 { + if processed == "0" { + segments = []any{0} + } else { + return gemVersion{}, fmt.Errorf("malformed version number string %q (no valid segments after processing)", original) + } + } + canonical := make([]any, len(segments)) + copy(canonical, segments) + + canonical = trimTrailingZeros(canonical) + canonical = trimIntermediateZeros(canonical, isPrerelease) + + if len(canonical) == 0 { + canonical = []any{0} + } + + return gemVersion{ + original: original, + segments: segments, + canonical: canonical, + isPrerelease: isPrerelease, + }, nil +} + +func (v gemVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided + } + + o, err := newGemVersion(other.Raw) + if err != nil { + return 0, invalidFormatError(GemFormat, other.Raw, err) + } + + return v.compare(o) +} + +func (v gemVersion) compare(other gemVersion) (int, error) { + result, commonSegmentsAreEqual, err := compareSegments(v.canonical, other.canonical) + if err != nil { + return -1, err + } + + if commonSegmentsAreEqual { + return compareLengths(v.canonical, other.canonical, result), nil + } + + return result, nil +} + +func (v *gemVersion) String() string { + return v.original +} + +func partitionSegments(versionString string) ([]any, error) { + if versionString == "" { + return []any{}, fmt.Errorf("cannot partition empty version string") + } + if strings.Contains(versionString, "..") { + return nil, fmt.Errorf("invalid version string (double dot): %q", versionString) + } + if (strings.HasPrefix(versionString, ".") && versionString != ".") || + (strings.HasSuffix(versionString, ".") && versionString != ".") { + if len(versionString) > 1 { + return nil, fmt.Errorf("invalid version string (leading/trailing dot): %q", versionString) + } + } + + parts := segmentRegexp.FindAllString(versionString, -1) + if len(parts) == 0 { + if versionString == "0" { + return []any{0}, nil + } + return nil, fmt.Errorf("no valid segments found in %q", versionString) + } + + segments := make([]any, 0, len(parts)) + for _, s := range parts { + if n, err := strconv.Atoi(s); err == nil { + segments = append(segments, n) + } else { + segments = append(segments, s) + } + } + return segments, nil +} + +func trimTrailingZeros(segments []any) []any { + if len(segments) <= 1 { + if len(segments) == 1 { + if num, ok := segments[0].(int); ok && num == 0 { + return []any{0} + } + } + return segments + } + + lastSignificantIdx := -1 + for i := len(segments) - 1; i >= 0; i-- { + num, ok := segments[i].(int) + if !ok || num != 0 { + lastSignificantIdx = i + break + } + // It's a numeric zero, continue looking + } + + if lastSignificantIdx == -1 { + return []any{0} + } + return segments[:lastSignificantIdx+1] +} + +func trimIntermediateZeros(segments []any, isPrerelease bool) []any { + if !isPrerelease || len(segments) == 0 { + return segments + } + + firstLetterIdx := -1 + for i, seg := range segments { + if _, ok := seg.(string); ok { + firstLetterIdx = i + break + } + } + + if firstLetterIdx == -1 { + return segments + } + + segmentsBeforeLetter := []any{} + if firstLetterIdx > 0 { + segmentsBeforeLetter = segments[:firstLetterIdx] + } + + trimmedPrefix := []any{} + if len(segmentsBeforeLetter) > 0 { + lastNonZeroInPrefixIdx := -1 + for i := len(segmentsBeforeLetter) - 1; i >= 0; i-- { + num, ok := segmentsBeforeLetter[i].(int) + if !ok || num != 0 { + lastNonZeroInPrefixIdx = i + break + } + } + if lastNonZeroInPrefixIdx != -1 { + trimmedPrefix = segmentsBeforeLetter[:lastNonZeroInPrefixIdx+1] + } + } + + reconstructed := make([]any, 0, len(trimmedPrefix)+len(segments)-firstLetterIdx) + reconstructed = append(reconstructed, trimmedPrefix...) + reconstructed = append(reconstructed, segments[firstLetterIdx:]...) + + return reconstructed +} + +func compareSegments(left, right []any) (result int, allEqual bool, err error) { + limit := len(left) + if len(right) < limit { + limit = len(right) + } + + for i := 0; i < limit; i++ { + l := left[i] + r := right[i] + + lNum, lIsNum := l.(int) + lStr, lIsStr := l.(string) + rNum, rIsNum := r.(int) + rStr, rIsStr := r.(string) + + if lIsNum && rIsNum { + if lNum != rNum { + if lNum < rNum { + return -1, false, nil + } + return 1, false, nil + } + continue + } + + if lIsStr && rIsStr { + if cmp := strings.Compare(lStr, rStr); cmp != 0 { + return cmp, false, nil + } + continue + } + + if lIsNum && rIsStr { + return 1, false, nil + } + if lIsStr && rIsNum { + return -1, false, nil + } + + return 0, false, fmt.Errorf("internal comparison error: unexpected types %T vs %T", l, r) + } + return 0, true, nil +} + +func compareLengths(left, right []any, commonResult int) int { + if commonResult != 0 { + return commonResult + } + + lLen := len(left) + rLen := len(right) + + if lLen == rLen { + return 0 + } + + if lLen > rLen { + for i := rLen; i < lLen; i++ { + seg := left[i] + if _, isStr := seg.(string); isStr { + return -1 + } + if num, isNum := seg.(int); isNum && num != 0 { + return 1 + } + } + return 0 + } + + for i := lLen; i < rLen; i++ { + seg := right[i] + if _, isStr := seg.(string); isStr { + return 1 + } + if num, isNum := seg.(int); isNum && num != 0 { + return -1 + } + } + return 0 +} + +func cleanArchFromVersion(raw string) string { + platforms := []string{"x86", "universal", "arm", "java", "dalvik", "x64", "powerpc", "sparc", "mswin"} + dash := "-" + for _, p := range platforms { + vals := strings.SplitN(raw, dash+p, 2) + if len(vals) == 2 { + return vals[0] + } + } + + return raw +} diff --git a/grype/version/gem_version_test.go b/grype/version/gem_version_test.go new file mode 100644 index 00000000000..588797d985a --- /dev/null +++ b/grype/version/gem_version_test.go @@ -0,0 +1,312 @@ +package version + +import ( + "fmt" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGemVersion_Constraint(t *testing.T) { + tests := []testCase{ + // empty values + {version: "2.3.1", constraint: "", satisfied: true}, + // typical cases + {version: "0.9.9-r0", constraint: "< 0.9.12-r1", satisfied: true}, // regression case + {version: "1.5.0-arm-windows", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.2.0-arm-windows", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.0.1-armv5-window", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.0.1-armv7-linux", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0-universal-darwin-9", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0-universal-darwin-10", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0-x86_64-darwin-10", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.2.0", constraint: ">1.0, <2.0", satisfied: true}, + {version: "1.2.0-x86", constraint: ">1.0, <2.0", satisfied: true}, + {version: "1.2.0-x86-linux", constraint: ">1.0, <2.0", satisfied: true}, + {version: "1.2.0-x86-linux", constraint: "= 1.2.0", satisfied: true}, + {version: "1.2.0-x86_64-linux", constraint: "= 1.2.0", satisfied: true}, + {version: "1.2.0-x86_64-linux", constraint: "< 1.2.1", satisfied: true}, + // https://semver.org/#spec-item-11 + {version: "1.2.0-alpha-x86-linux", constraint: "<1.2.0", satisfied: true}, + {version: "1.2.0-alpha-1-x86-linux", constraint: "<1.2.0", satisfied: true}, + // gem versions seem to respect the order: {sem-version}+{meta}-{arch}-{os} + // but let's check the extraction works even when the order of {meta}-{arch} varies. + {version: "1.2.0-alpha-1-x86-linux-meta", constraint: "<1.2.0", satisfied: true}, + {version: "1.2.0-alpha-1-meta-x86-linux", constraint: "<1.2.0", satisfied: true}, + {version: "1.2.0-alpha-1-x86-linux-meta", constraint: ">1.1.0", satisfied: true}, + {version: "1.2.0-alpha-1-arm-linux-meta", constraint: ">1.1.0", satisfied: true}, + {version: "1.0.0-alpha-a.b-c-somethinglong-build.1-aef.1-its-okay", constraint: "<1.0.0", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, GemFormat) + assert.NoError(t, err, "unexpected error from newSemanticConstraint: %v", err) + + test.assertVersionConstraint(t, GemFormat, constraint) + }) + } + +} + +func Test_cleanPlatformMakesEqualVersions(t *testing.T) { + tests := []struct { + input string + trimmed string + want *gemVersion + }{ + {input: "1.13.1", trimmed: "1.13.1"}, + {input: "1.13.1-arm-linux", trimmed: "1.13.1"}, + {input: "1.13.1-armv6-linux", trimmed: "1.13.1"}, + {input: "1.13.1-armv7-linux", trimmed: "1.13.1"}, + {input: "1.13.1-java", trimmed: "1.13.1"}, + {input: "1.13.1-dalvik", trimmed: "1.13.1"}, + {input: "1.13.1-mswin32", trimmed: "1.13.1"}, + {input: "1.13.1-x64-mswin64", trimmed: "1.13.1"}, + {input: "1.13.1-sparc-unix", trimmed: "1.13.1"}, + {input: "1.13.1-powerpc-darwin", trimmed: "1.13.1"}, + {input: "1.13.1-x86-linux", trimmed: "1.13.1"}, + {input: "1.13.1-x86_64-linux", trimmed: "1.13.1"}, + {input: "1.13.1-x86-freebsd", trimmed: "1.13.1"}, + {input: "1.13.1-x86-mswin32-80", trimmed: "1.13.1"}, + {input: "1.13.1-universal-darwin-8", trimmed: "1.13.1"}, + // ruby versions get the canonical segment "pre" if there are any segments that are all + // alphabetic characters. + {input: "1.13.1-beta-universal-darwin-8", trimmed: "1.13.1.pre.beta"}, + {input: "1.13.1-alpha-1-meta-arm-linux", trimmed: "1.13.1-alpha-1-meta"}, + {input: "1.13.1-alpha-1-build.12-arm-linux", trimmed: "1.13.1-alpha-1-build.12"}, + } + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + original := NewVersion(tt.input, GemFormat) + trimmed := NewVersion(tt.trimmed, GemFormat) + comp, err := original.Compare(trimmed) + require.NoError(t, err) + assert.Equal(t, 0, comp) + comp, err = trimmed.Compare(original) + require.NoError(t, err) + assert.Equal(t, 0, comp) + }) + } +} + +func TestNewGemVersion_ValidInputs(t *testing.T) { + tests := []struct { + input string + expectedOriginal string // What v.original should be + expectedSegments []any // What v.segments should be (after .pre. processing) + expectedPrerelease bool + }{ + {"1.0", "1.0", []any{1, 0}, false}, + {"1.0 ", "1.0 ", []any{1, 0}, false}, // original preserves space + {" 1.0 ", " 1.0 ", []any{1, 0}, false}, + {"1.2.3", "1.2.3", []any{1, 2, 3}, false}, + {"1.2.3.a", "1.2.3.a", []any{1, 2, 3, "a"}, true}, + {"1.2.3-b4", "1.2.3-b4", []any{1, 2, 3, "pre", "b", 4}, true}, + {"1", "1", []any{1}, false}, + {"0", "0", []any{0}, false}, + {"", "", []any{0}, false}, // Empty string becomes "0" effectively, original is "" + {" ", " ", []any{0}, false}, // Whitespace string becomes "0" effectively + {"1.0-alpha", "1.0-alpha", []any{1, 0, "pre", "alpha"}, true}, + {"1-1", "1-1", []any{1, "pre", 1}, true}, + } + + for _, tt := range tests { + t.Run(fmt.Sprintf("Input_%s", tt.input), func(t *testing.T) { + v, err := newGemVersion(tt.input) + require.NoError(t, err) + assert.Equal(t, tt.expectedOriginal, v.original, "Original string mismatch") + assert.Equal(t, tt.expectedSegments, v.segments, "Initial segments mismatch") + assert.Equal(t, tt.expectedPrerelease, v.isPrerelease, "Prerelease flag mismatch") + }) + } +} + +func TestNewGemVersion_InvalidInputs(t *testing.T) { + invalidVersions := []struct { + name string + input string + errorSubstring string + }{ + {"newline", "1.0\n2.0", "malformed version number string"}, + {"double_dot", "1..2", "malformed version number string"}, + {"space_separated", "1.2 3.4", "malformed version number string"}, + {"trailing_dot_long", "1.2.", "leading/trailing dot"}, + {"leading_dot_long", ".1.2", "leading/trailing dot"}, + {"just_dot", ".", "no valid segments"}, + {"double_hyphen", "--", "malformed version number string"}, + {"hyphen_dot", "1.-2", "malformed version number string"}, + {"dot_hyphen", "1.-pre", "malformed version number string"}, + {"underscore", "1_2", "malformed version number string"}, + {"empty_segments", "...", "malformed version number string"}, + {"invalid_segment_char", "1.2.a@b", "malformed version number string"}, + } + + for _, tt := range invalidVersions { + t.Run(tt.name, func(t *testing.T) { + _, err := newGemVersion(tt.input) + require.Error(t, err) + if tt.errorSubstring != "" { + assert.Contains(t, err.Error(), tt.errorSubstring, "Error message mismatch for input: %s", tt.input) + } + }) + } +} + +func TestGemVersion_Compare(t *testing.T) { + tests := []struct { + v1 string + v2 string + want int // expected result of v1.Compare(v2) + }{ + // Basic comparisons (from Ruby's test_spaceship) + {"1.0", "1.0.0", 0}, + {"1.0", "1.0.a", 1}, + {"1.8.2", "0.0.0", 1}, + {"1.8.2", "1.8.2.a", 1}, + {"1.8.2.b", "1.8.2.a", 1}, + {"1.8.2.a", "1.8.2", -1}, + {"1.8.2.a10", "1.8.2.a9", 1}, + {"", "0", 0}, // "" is treated as "0" + + // Canonicalization leading to equality + {"0.beta.1", "0.0.beta.1", 0}, // Ruby: 0.beta.1 <=> 0.0.beta.1 is 0. Canonical for both is ["beta", -1] + {"0.0.beta", "0.0.beta.1", -1}, // Ruby: 0.0.beta <=> 0.0.beta.1 is -1. Canonical ["beta"] vs ["beta", -1] + + // String segments comparison + {"5.a", "5.0.0.rc2", -1}, // "a" < "rc" + {"5.x", "5.0.0.rc2", 1}, // "x" > "rc" + + // Direct string comparison from Ruby test + {"1.9.3", "1.9.3", 0}, + {"1.9.3", "1.9.2.99", 1}, + {"1.9.3", "1.9.3.1", -1}, + + // Additional common cases + {"1.0", "1.1", -1}, + {"1.1", "1.0", 1}, + {"1", "1.0", 0}, + {"1.0.1", "1.0.0", 1}, + {"1.0.0", "1.0.1", -1}, + + // Prerelease vs Prerelease (length diff) + {"1.0.alpha.1", "1.0.alpha", 1}, + {"1.0.alpha", "1.0.alpha.1", -1}, + + // Hyphen handling (SemVer-like via .pre.) + {"1.0.0-alpha", "1.0.0-alpha.1", -1}, + {"1.0.0-alpha.1", "1.0.0-beta.2", -1}, + {"1.0.0-beta.2", "1.0.0-beta.11", -1}, + {"1.0.0-beta.11", "1.0.0-rc.1", -1}, // beta < rc + {"1.0.0-rc1", "1.0.0", -1}, + {"1.0.0-1", "1", -1}, // 1.0.0.pre.1 vs 1 + {"1-1", "1", -1}, // 1.pre.1 vs 1 + + // From Ruby's test_semver (some overlap, ensure coverage) + {"1.0.0-alpha", "1.0.0-alpha.1", -1}, + {"1.0.0-alpha.1", "1.0.0-beta.2", -1}, // alpha < beta + {"1.0.0-beta.2", "1.0.0-beta.11", -1}, // 2 < 11 + {"1.0.0-beta.11", "1.0.0-rc.1", -1}, // beta < rc + {"1.0.0-rc1", "1.0.0", -1}, // 1.0.0.pre.rc.1 < 1.0.0 (release) + {"1.0.0-1", "1", -1}, // 1.0.0.pre.1 < 1 (release) + + // Edge cases with canonicalization + {"1.0", "1", 0}, + {"1.0.0", "1", 0}, + {"1.a", "1.0.0.a", 0}, // Canonical [1,"a"] for both + {"1.a.0", "1.a", 0}, // Canonical [1,"a"] for both + } + + for _, tt := range tests { + t.Run(fmt.Sprintf("%s_vs_%s", tt.v1, tt.v2), func(t *testing.T) { + ver1 := NewVersion(tt.v1, GemFormat) + ver2 := NewVersion(tt.v2, GemFormat) + + // Test v1 vs v2 + got1, err1 := ver1.Compare(ver2) + require.NoError(t, err1, "v1.Compare(v2) failed for %s vs %s", tt.v1, tt.v2) + assert.Equal(t, tt.want, got1, "Compare(%q, %q) == %d, want %d", tt.v1, tt.v2, got1, tt.want) + + // Test symmetry: v2 vs v1 + expectedSymmetric := 0 + if tt.want != 0 { + expectedSymmetric = -tt.want + } + got2, err2 := ver2.Compare(ver1) + require.NoError(t, err2, "v2.Compare(v1) failed for %s vs %s", tt.v2, tt.v1) + assert.Equal(t, expectedSymmetric, got2, "Compare(%q, %q) == %d, want %d (symmetric)", tt.v2, tt.v1, got2, expectedSymmetric) + + // Test reflexivity: v1 vs v1 + gotReflexive1, errReflexive1 := ver1.Compare(ver1) + require.NoError(t, errReflexive1, "v1.Compare(v1) failed for %s", tt.v1) + assert.Equal(t, 0, gotReflexive1, "Compare(%q, %q) == %d, want 0 (reflexive)", tt.v1, tt.v1, gotReflexive1) + }) + } +} + +func TestGemVersion_Compare_Errors(t *testing.T) { + vGem1_0, err := newGemVersion("1.0") + require.NoError(t, err) + + t.Run("CompareWithNil", func(t *testing.T) { + _, err := vGem1_0.Compare(nil) + assert.ErrorIs(t, err, ErrNoVersionProvided) + }) + + t.Run("CompareWithDifferentFormat", func(t *testing.T) { + // Assuming SemanticFormat is a distinct, incompatible format + // and that the Format type has a String() method for user-friendly error messages. + vOther := &Version{Raw: "1.0.0", Format: SemanticFormat} + _, err := vGem1_0.Compare(vOther) + require.NoError(t, err) + }) + + t.Run("CompareWithUnknownFormat_ParsableAsGem", func(t *testing.T) { + vOther := &Version{Raw: "1.1", Format: UnknownFormat} // Parsable as Gem + res, err := vGem1_0.Compare(vOther) + assert.NoError(t, err) + assert.Equal(t, -1, res) // 1.0 < 1.1 + }) + + t.Run("CompareWithUnknownFormat_UnparsableAsGem", func(t *testing.T) { + vOther := &Version{Raw: "invalid..version", Format: UnknownFormat} + _, err := vGem1_0.Compare(vOther) + require.Error(t, err) + require.ErrorContains(t, err, "malformed version number string") + }) +} + +func TestGemVersion_canonical(t *testing.T) { + tests := []struct { + name string + version string + want []any + }{ + // obtained from a simple ruby program like this: + /* + require 'rubygems/version' + v = Gem::Version.new(input) + v.canonical_segments + */ + {"simple ints", "1.2.3", []any{1, 2, 3}}, + {"leading zeros preserved", "0.1.2", []any{0, 1, 2}}, + {"drop intermediate zeros in pre-release", "5.0.0.a1", []any{5, "a", 1}}, + {"preserve intermedia zeros in regular release", "1.0.0.1", []any{1, 0, 0, 1}}, + {"drop trailing zeros", "1.0.0", []any{1}}, + {"alpha version", "1.6.1.a", []any{1, 6, 1, "a"}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v, err := newGemVersion(tt.version) + require.NoError(t, err) + + if d := cmp.Diff(v.canonical, tt.want); d != "" { + t.Errorf("canonical mismatch (-want +got):\n%s", d) + } + }) + } +} diff --git a/grype/version/gemfile_constraint_test.go b/grype/version/gemfile_constraint_test.go deleted file mode 100644 index ef2bfaa9870..00000000000 --- a/grype/version/gemfile_constraint_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGemfileConstraint(t *testing.T) { - tests := []testCase{ - // empty values - {version: "2.3.1", constraint: "", satisfied: true}, - // typical cases - {version: "0.9.9-r0", constraint: "< 0.9.12-r1", satisfied: true}, // regression case - {version: "1.5.0-arm-windows", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.2.0-arm-windows", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.0.1-armv5-window", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.0.1-armv7-linux", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0-universal-darwin-9", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0-universal-darwin-10", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0-x86_64-darwin-10", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.2.0", constraint: ">1.0, <2.0", satisfied: true}, - {version: "1.2.0-x86", constraint: ">1.0, <2.0", satisfied: true}, - {version: "1.2.0-x86-linux", constraint: ">1.0, <2.0", satisfied: true}, - {version: "1.2.0-x86-linux", constraint: "= 1.2.0", satisfied: true}, - {version: "1.2.0-x86_64-linux", constraint: "= 1.2.0", satisfied: true}, - {version: "1.2.0-x86_64-linux", constraint: "< 1.2.1", satisfied: true}, - {version: "1.2.3----RC-SNAPSHOT.12.9.1--.12+788", constraint: "> 1.0.0", satisfied: true}, - {version: "1.2.3----RC-SNAPSHOT.12.9.1--.12+788-armv7-darwin", constraint: "< 1.2.3", satisfied: true}, - {version: "1.2.3----rc-snapshot.12.9.1--.12+788-armv7-darwin", constraint: "< 1.2.3", satisfied: true}, - // https://semver.org/#spec-item-11 - {version: "1.2.0-alpha-x86-linux", constraint: "<1.2.0", satisfied: true}, - {version: "1.2.0-alpha-1-x86-linux", constraint: "<1.2.0", satisfied: true}, - // gem versions seem to respect the order: {sem-version}+{meta}-{arch}-{os} - // but let's check the extraction works even when the order of {meta}-{arch} varies. - {version: "1.2.0-alpha-1-x86-linux+meta", constraint: "<1.2.0", satisfied: true}, - {version: "1.2.0-alpha-1+meta-x86-linux", constraint: "<1.2.0", satisfied: true}, - {version: "1.2.0-alpha-1-x86-linux+meta", constraint: ">1.1.0", satisfied: true}, - {version: "1.2.0-alpha-1-arm-linux+meta", constraint: ">1.1.0", satisfied: true}, - {version: "1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay", constraint: "<1.0.0", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.tName(), func(t *testing.T) { - constraint, err := newSemanticConstraint(test.constraint) - assert.NoError(t, err, "unexpected error from newSemanticConstraint: %v", err) - - test.assertVersionConstraint(t, GemFormat, constraint) - }) - } - -} diff --git a/grype/version/gemfile_version.go b/grype/version/gemfile_version.go deleted file mode 100644 index a26d67c3a25..00000000000 --- a/grype/version/gemfile_version.go +++ /dev/null @@ -1,39 +0,0 @@ -package version - -import ( - "strings" -) - -// Gemfile.lock doesn't follow a spec, the best documentation comes -// from `gem help platform`. Gemfile.lock versions may have "{cpu}-{os}" -// or "{cpu}-{os}-{version}" -// after the semvVer, for example, 12.2.1-alpha-x86_64-darwin-8, where `2.2.1-alpha` -// is a valid and comparable semVer, and `x86_64-darwin-8` is not a semVer due to -// the underscore. Also, we can't sort based on arch and OS in a way that make sense -// for versions. SemVer is a characteristic of the code, not which arch OS it runs on. -// -// Bunlder's code: https://github.com/rubygems/rubygems/blob/2070231bf0c7c4654bbc2e4c08882bf414840360/bundler/spec/install/gemfile/platform_spec.rb offers more info on possible architecture values, for example `mswin32` may appead without arch. -// -// Spec for pre-release info: https://github.com/rubygems/rubygems/blob/2070231bf0c7c4654bbc2e4c08882bf414840360/bundler/spec/install/gemfile/path_spec.rb#L186 -// -// CPU/arch is the most structured value present in gemfile.lock versions, we use it -// to split the version info in half, the first half has semVer, and -// the second half has arch and OS which we ignore. -// When there is no arch we split the version string with: {java, delvik, mswin} -func extractSemVer(raw string) string { - platforms := []string{"x86", "universal", "arm", "java", "dalvik", "x64", "powerpc", "sparc", "mswin"} - dash := "-" - for _, p := range platforms { - vals := strings.SplitN(raw, dash+p, 2) - if len(vals) == 2 { - return vals[0] - } - } - - return raw -} - -func newGemfileVersion(raw string) (*semanticVersion, error) { - cleaned := extractSemVer(raw) - return newSemanticVersion(cleaned) -} diff --git a/grype/version/gemfile_version_test.go b/grype/version/gemfile_version_test.go deleted file mode 100644 index 8456cbd6f2e..00000000000 --- a/grype/version/gemfile_version_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package version - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" -) - -func makeSemVer(t *testing.T, raw string) *semanticVersion { - semVer, err := newSemanticVersion(raw) - assert.NoError(t, err) - return semVer -} - -func Test_newGemfileVersion(t *testing.T) { - - tests := []struct { - input string - want *semanticVersion - }{ - {input: "1.13.1", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-arm-linux", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-armv6-linux", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-armv7-linux", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-java", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-dalvik", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-mswin32", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-x64-mswin64", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-sparc-unix", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-powerpc-darwin", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-x86-linux", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-x86_64-linux", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-x86-freebsd", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-x86-mswin32-80", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-universal-darwin-8", want: makeSemVer(t, "1.13.1")}, - {input: "1.13.1-beta-universal-darwin-8", want: makeSemVer(t, "1.13.1.beta")}, - {input: "1.13.1-alpha-1+meta-arm-linux", want: makeSemVer(t, "1.13.1.alpha-1+meta")}, - {input: "1.13.1-alpha-1+build.12-arm-linux", want: makeSemVer(t, "1.13.1.alpha-1+build.12")}, - {input: "1.2.3----RC-SNAPSHOT.12.9.1--.12+788-armv7-darwin", want: makeSemVer(t, "1.2.3----RC-SNAPSHOT.12.9.1--.12+788")}, - {input: "1.2.3----rc-snapshot.12.9.1--.12+788-armv7-darwin", want: makeSemVer(t, "1.2.3----rc-snapshot.12.9.1--.12+788")}, - } - for _, tt := range tests { - t.Run(tt.input, func(t *testing.T) { - got, err := newGemfileVersion(tt.input) - if !assert.NoError(t, err, fmt.Sprintf("newGemfileVersion(%v)", tt.input)) { - return - } - assert.Equalf(t, tt.want, got, "newGemfileVersion(%v)", tt.input) - - // check that semantic versions are comaprable to gemfile versions - other, err := NewVersion(tt.want.verObj.String(), SemanticFormat) - assert.NoError(t, err) - - v, err := got.Compare(other) - assert.NoError(t, err) - // zero here means `other` and `got` are the same version - assert.Equal(t, 0, v) - }) - } -} diff --git a/grype/version/generic_constraint.go b/grype/version/generic_constraint.go index ebd039e290d..01837cd0716 100644 --- a/grype/version/generic_constraint.go +++ b/grype/version/generic_constraint.go @@ -1,38 +1,60 @@ package version -import "fmt" +import ( + "fmt" + "strings" +) var _ Constraint = (*genericConstraint)(nil) type genericConstraint struct { - raw string - expression constraintExpression - name string + Raw string + Expression simpleRangeExpression + Fmt Format } -func newGenericConstraint(raw string, genFn comparatorGenerator, name string) (genericConstraint, error) { - constraints, err := newConstraintExpression(raw, genFn) +func newGenericConstraint(format Format, raw string) (genericConstraint, error) { + constraints, err := parseRangeExpression(raw) if err != nil { - return genericConstraint{}, err + return genericConstraint{}, invalidFormatError(format, raw, err) } return genericConstraint{ - expression: constraints, - raw: raw, - name: name, + Expression: constraints, + Raw: raw, + Fmt: format, }, nil } func (g genericConstraint) String() string { - value := "none" - if g.raw != "" { - value = g.raw + value := g.Value() + if g.Raw == "" { + value = "none" } - return fmt.Sprintf("%s (%s)", value, g.name) + return fmt.Sprintf("%s (%s)", value, strings.ToLower(g.Fmt.String())) +} + +func (g genericConstraint) Value() string { + return g.Raw +} + +func (g genericConstraint) Format() Format { + return g.Fmt } func (g genericConstraint) Satisfied(version *Version) (bool, error) { - if g.raw == "" { - return true, nil // the empty constraint is always satisfied + if g.Raw == "" && version != nil { + // empty constraints are always satisfied + return true, nil + } + if version == nil { + if g.Raw != "" { + // a non-empty constraint with no version given should always fail + return false, nil + } + return true, nil + } + if version.Format != g.Fmt { + return false, newUnsupportedFormatError(g.Fmt, version) } - return g.expression.satisfied(version) + return g.Expression.satisfied(g.Fmt, version) } diff --git a/grype/version/generic_constraint_test.go b/grype/version/generic_constraint_test.go new file mode 100644 index 00000000000..15c9706f055 --- /dev/null +++ b/grype/version/generic_constraint_test.go @@ -0,0 +1,177 @@ +package version + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGenericConstraint_String(t *testing.T) { + tests := []struct { + name string + constraint string + format Format + expected string + }{ + { + name: "empty constraint", + constraint: "", + format: SemanticFormat, + expected: "none (semantic)", + }, + { + name: "simple constraint", + constraint: "> 1.0.0", + format: SemanticFormat, + expected: "> 1.0.0 (semantic)", + }, + { + name: "complex constraint", + constraint: ">= 1.0.0, < 2.0.0", + format: MavenFormat, + expected: ">= 1.0.0, < 2.0.0 (maven)", + }, + { + name: "jvm format name", + constraint: "< 11", + format: JVMFormat, + expected: "< 11 (jvm)", + }, + { + name: "go format name", + constraint: "> v1.2.3", + format: GolangFormat, + expected: "> v1.2.3 (go)", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + constraint, err := newGenericConstraint(test.format, test.constraint) + require.NoError(t, err) + + result := constraint.String() + assert.Equal(t, test.expected, result) + }) + } +} + +func TestGenericConstraint_Satisfied_EmptyConstraint(t *testing.T) { + constraint, err := newGenericConstraint(SemanticFormat, "") + require.NoError(t, err) + + tests := []struct { + name string + version *Version + }{ + { + name: "with valid version", + version: NewVersion("1.2.3", SemanticFormat), + }, + { + name: "with nil version", + version: nil, + }, + { + name: "with different format version", + version: NewVersion("1.2.3-r1", ApkFormat), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + satisfied, err := constraint.Satisfied(test.version) + assert.NoError(t, err) + assert.True(t, satisfied, "empty constraint should always be satisfied") + }) + } +} + +func TestGenericConstraint_Satisfied_WithConstraint(t *testing.T) { + tests := []struct { + name string + constraint string + version string + satisfied bool + shouldError bool + }{ + { + name: "simple greater than - satisfied", + constraint: "> 1.0.0", + version: "1.2.3", + satisfied: true, + }, + { + name: "simple greater than - not satisfied", + constraint: "> 2.0.0", + version: "1.2.3", + satisfied: false, + }, + { + name: "complex constraint - satisfied", + constraint: ">= 1.0.0, < 2.0.0", + version: "1.5.0", + satisfied: true, + }, + { + name: "complex constraint - not satisfied", + constraint: ">= 1.0.0, < 2.0.0", + version: "2.5.0", + satisfied: false, + }, + { + name: "equality constraint - satisfied", + constraint: "= 1.2.3", + version: "1.2.3", + satisfied: true, + }, + { + name: "equality constraint - not satisfied", + constraint: "= 1.2.3", + version: "1.2.4", + satisfied: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + constraint, err := newGenericConstraint(SemanticFormat, test.constraint) + require.NoError(t, err) + + version := NewVersion(test.version, SemanticFormat) + + satisfied, err := constraint.Satisfied(version) + if test.shouldError { + require.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.satisfied, satisfied) + } + }) + } +} + +func TestGenericConstraint_Invalid(t *testing.T) { + tests := []struct { + name string + constraint string + gen func(unit rangeUnit) (Comparator, error) + }{ + { + name: "invalid operator", + constraint: "~~ 1.0.0", + }, + { + name: "malformed constraint", + constraint: "> 1.0.0 < 2.0.0", // missing comma + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + _, err := newGenericConstraint(SemanticFormat, test.constraint) + require.Error(t, err) + }) + } +} diff --git a/grype/version/golang_constraint.go b/grype/version/golang_constraint.go deleted file mode 100644 index f9443311a3d..00000000000 --- a/grype/version/golang_constraint.go +++ /dev/null @@ -1,15 +0,0 @@ -package version - -import "fmt" - -func newGolangConstraint(raw string) (Constraint, error) { - return newGenericConstraint(raw, newGolangComparator, "go") -} - -func newGolangComparator(unit constraintUnit) (Comparator, error) { - ver, err := newGolangVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse Golang constraint version (%s): %w", unit.version, err) - } - return ver, nil -} diff --git a/grype/version/golang_constraint_test.go b/grype/version/golang_constraint_test.go deleted file mode 100644 index 4a73767af01..00000000000 --- a/grype/version/golang_constraint_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestGolangConstraints(t *testing.T) { - tests := []struct { - name string - version string - constraint string - satisfied bool - }{ - { - name: "regular semantic version satisfied", - version: "v1.2.3", - constraint: "< 1.2.4", - satisfied: true, - }, - { - name: "regular semantic version unsatisfied", - version: "v1.2.3", - constraint: "> 1.2.4", - satisfied: false, - }, - { - name: "+incompatible added to version", // see grype#1581 - version: "v3.2.0+incompatible", - constraint: "<=3.2.0", - satisfied: true, - }, - { - name: "the empty constraint is always satisfied", - version: "v1.0.0", - constraint: "", - satisfied: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - c, err := newGolangConstraint(tc.constraint) - require.NoError(t, err) - v, err := NewVersion(tc.version, GolangFormat) - require.NoError(t, err) - sat, err := c.Satisfied(v) - require.NoError(t, err) - assert.Equal(t, tc.satisfied, sat) - }) - } -} - -func TestString(t *testing.T) { - tests := []struct { - name string - constraint string - expected string - }{ - { - name: "empty string", - constraint: "", - expected: "none (go)", - }, - { - name: "basic constraint", - constraint: "< 1.3.4", - expected: "< 1.3.4 (go)", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - c, err := newGolangConstraint(tc.constraint) - require.NoError(t, err) - assert.Equal(t, tc.expected, c.String()) - }) - } -} diff --git a/grype/version/golang_version.go b/grype/version/golang_version.go index 0be1aa92b25..0b81c2079d5 100644 --- a/grype/version/golang_version.go +++ b/grype/version/golang_version.go @@ -10,13 +10,13 @@ import ( var _ Comparator = (*golangVersion)(nil) type golangVersion struct { - raw string - semVer *hashiVer.Version + raw string + obj *hashiVer.Version } -func newGolangVersion(v string) (*golangVersion, error) { +func newGolangVersion(v string) (golangVersion, error) { if v == "(devel)" { - return nil, ErrUnsupportedVersion + return golangVersion{}, ErrUnsupportedVersion } // Invalid Semver fix ups @@ -34,42 +34,44 @@ func newGolangVersion(v string) (*golangVersion, error) { semver, err := hashiVer.NewSemver(fixedUp) if err != nil { - return nil, err + return golangVersion{}, err } - return &golangVersion{ - raw: v, - semVer: semver, + return golangVersion{ + raw: v, + obj: semver, }, nil } -func (g golangVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, GolangFormat) - if err != nil { - return -1, err +func (v golangVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - if other.rich.golangVersion == nil { - return -1, fmt.Errorf("cannot compare version with nil golang version to golang version") + o, err := newGolangVersion(other.Raw) + if err != nil { + return 0, err } - if other.rich.golangVersion.raw == g.raw { + + if o.raw == v.raw { return 0, nil } - if other.rich.golangVersion.raw == "(devel)" { - return -1, fmt.Errorf("cannot compare %s with %s", g.raw, other.rich.golangVersion.raw) + + if o.raw == "(devel)" { + return -1, fmt.Errorf("cannot compare a non-development version %q with a default development version of %q", v.raw, o.raw) } - return other.rich.golangVersion.compare(g), nil + return v.compare(o), nil } -func (g golangVersion) compare(o golangVersion) int { +func (v golangVersion) compare(o golangVersion) int { switch { - case g.semVer != nil && o.semVer != nil: - return g.semVer.Compare(o.semVer) - case g.semVer != nil && o.semVer == nil: + case v.obj != nil && o.obj != nil: + return v.obj.Compare(o.obj) + case v.obj != nil && o.obj == nil: return 1 - case g.semVer == nil && o.semVer != nil: + case v.obj == nil && o.obj != nil: return -1 default: - return strings.Compare(g.raw, o.raw) + return strings.Compare(v.raw, o.raw) } } diff --git a/grype/version/golang_version_test.go b/grype/version/golang_version_test.go index deab9ae711c..ae199e756bb 100644 --- a/grype/version/golang_version_test.go +++ b/grype/version/golang_version_test.go @@ -9,204 +9,310 @@ import ( hashiVer "github.com/anchore/go-version" ) -func TestNewGolangVersion(t *testing.T) { +func TestGolangVersion_Constraint(t *testing.T) { tests := []struct { - name string - input string - expected golangVersion - wantErr bool + name string + version string + constraint string + satisfied bool }{ { - name: "normal semantic version", - input: "v1.8.0", - expected: golangVersion{ - raw: "v1.8.0", - semVer: hashiVer.Must(hashiVer.NewSemver("v1.8.0")), - }, + name: "regular semantic version satisfied", + version: "v1.2.3", + constraint: "< 1.2.4", + satisfied: true, }, { - name: "v0.0.0 date and hash version", - input: "v0.0.0-20180116102854-5a71ef0e047d", - expected: golangVersion{ - raw: "v0.0.0-20180116102854-5a71ef0e047d", - semVer: hashiVer.Must(hashiVer.NewSemver("v0.0.0-20180116102854-5a71ef0e047d")), - }, - }, - { - name: "semver with +incompatible", - input: "v24.0.7+incompatible", - expected: golangVersion{ - raw: "v24.0.7+incompatible", - semVer: hashiVer.Must(hashiVer.NewSemver("v24.0.7+incompatible")), - }, + name: "regular semantic version unsatisfied", + version: "v1.2.3", + constraint: "> 1.2.4", + satisfied: false, }, { - name: "semver with +incompatible+dirty", - input: "v24.0.7+incompatible+dirty", - expected: golangVersion{ - raw: "v24.0.7+incompatible+dirty", - semVer: hashiVer.Must(hashiVer.NewSemver("v24.0.7+incompatible.dirty")), - }, + name: "+incompatible added to version", // see grype#1581 + version: "v3.2.0+incompatible", + constraint: "<=3.2.0", + satisfied: true, }, { - name: "standard library", - input: "go1.21.4", - expected: golangVersion{ - raw: "go1.21.4", - semVer: hashiVer.Must(hashiVer.NewSemver("1.21.4")), - }, + name: "the empty constraint is always satisfied", + version: "v1.0.0", + constraint: "", + satisfied: true, }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + c, err := GetConstraint(tc.constraint, GolangFormat) + require.NoError(t, err) + v := NewVersion(tc.version, GolangFormat) + sat, err := c.Satisfied(v) + require.NoError(t, err) + assert.Equal(t, tc.satisfied, sat) + }) + } +} + +func TestGolangVersion_String(t *testing.T) { + tests := []struct { + name string + constraint string + expected string + }{ { - // "(devel)" is the main module of a go program. - // If we get a package with this version, it means the SBOM - // doesn't have a real version number for the built package, so - // we can't compare it and should just return an error. - name: "devel", - input: "(devel)", - wantErr: true, + name: "empty string", + constraint: "", + expected: "none (go)", }, { - name: "invalid input", - input: "some nonsense", - wantErr: true, + name: "basic constraint", + constraint: "< 1.3.4", + expected: "< 1.3.4 (go)", }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - v, err := newGolangVersion(tc.input) - if tc.wantErr { - require.Error(t, err) - return - } - assert.Nil(t, err) - assert.Equal(t, tc.expected, *v) + c, err := GetConstraint(tc.constraint, GolangFormat) + require.NoError(t, err) + assert.Equal(t, tc.expected, c.String()) }) } } -func TestCompareGolangVersions(t *testing.T) { +func TestGolangVersion_Compare(t *testing.T) { tests := []struct { - name string - thisVersion string - otherVersion string - want int + name string + version1 string + version2 string + expected int }{ { - name: "semver this version less", - thisVersion: "v1.2.3", - otherVersion: "v1.2.4", - want: -1, + name: "same basic version", + version1: "v1.2.3", + version2: "v1.2.3", + expected: 0, + }, + { + name: "same version with incompatible", + version1: "v3.2.0+incompatible", + version2: "v3.2.0+incompatible", + expected: 0, + }, + { + name: "same go stdlib version", + version1: "go1.24.1", + version2: "go1.24.1", + expected: 0, + }, + { + name: "version1 less than version2", + version1: "v1.2.3", + version2: "v1.2.4", + expected: -1, }, { - name: "semver this version more", - thisVersion: "v1.3.4", - otherVersion: "v1.2.4", - want: 1, + name: "version1 greater than version2", + version1: "v1.2.4", + version2: "v1.2.3", + expected: 1, }, { - name: "semver equal", - thisVersion: "v1.2.4", - otherVersion: "v1.2.4", - want: 0, + name: "version1 equal to version2", + version1: "v1.2.3", + version2: "v1.2.3", + expected: 0, }, { - name: "commit-sha this version less", - thisVersion: "v0.0.0-20180116102854-5a71ef0e047d", - otherVersion: "v0.0.0-20190116102854-somehash", - want: -1, + name: "go stdlib versions", + version1: "go1.23.1", + version2: "go1.24.1", + expected: -1, }, { - name: "commit-sha this version more", - thisVersion: "v0.0.0-20180216102854-5a71ef0e047d", - otherVersion: "v0.0.0-20180116102854-somehash", - want: 1, + name: "incompatible versions", + version1: "v3.1.0+incompatible", + version2: "v3.2.0+incompatible", + expected: -1, }, { - name: "commit-sha this version equal", - thisVersion: "v0.0.0-20180116102854-5a71ef0e047d", - otherVersion: "v0.0.0-20180116102854-5a71ef0e047d", - want: 0, + name: "semver this version less", + version1: "v1.2.3", + version2: "v1.2.4", + expected: -1, }, { - name: "this pre-semver is less than any semver", - thisVersion: "v0.0.0-20180116102854-5a71ef0e047d", - otherVersion: "v0.0.1", - want: -1, + name: "semver this version more", + version1: "v1.3.4", + version2: "v1.2.4", + expected: 1, }, { - name: "semver is greater than timestamp", - thisVersion: "v2.1.0", - otherVersion: "v0.0.0-20180116102854-5a71ef0e047d", - want: 1, + name: "semver equal", + version1: "v1.2.4", + version2: "v1.2.4", + expected: 0, }, { - name: "pseudoversion less than other pseudoversion", - thisVersion: "v0.0.0-20170116102854-1ef0e047d5a7", - otherVersion: "v0.0.0-20180116102854-5a71ef0e047d", - want: -1, + name: "commit-sha this version less", + version1: "v0.0.0-20180116102854-5a71ef0e047d", + version2: "v0.0.0-20190116102854-somehash", + expected: -1, }, { - name: "pseudoversion greater than other pseudoversion", - thisVersion: "v0.0.0-20190116102854-8a3f0e047d5a", - otherVersion: "v0.0.0-20180116102854-5a71ef0e047d", - want: 1, + name: "commit-sha this version more", + version1: "v0.0.0-20180216102854-5a71ef0e047d", + version2: "v0.0.0-20180116102854-somehash", + expected: 1, }, { - name: "+incompatible doesn't break equality", - thisVersion: "v3.2.0", - otherVersion: "v3.2.0+incompatible", - want: 0, + name: "commit-sha this version equal", + version1: "v0.0.0-20180116102854-5a71ef0e047d", + version2: "v0.0.0-20180116102854-5a71ef0e047d", + expected: 0, + }, + { + name: "this pre-semver is less than any semver", + version1: "v0.0.0-20180116102854-5a71ef0e047d", + version2: "v0.0.1", + expected: -1, + }, + { + name: "semver is greater than timestamp", + version1: "v2.1.0", + version2: "v0.0.0-20180116102854-5a71ef0e047d", + expected: 1, + }, + { + name: "pseudoversion less than other pseudoversion", + version1: "v0.0.0-20170116102854-1ef0e047d5a7", + version2: "v0.0.0-20180116102854-5a71ef0e047d", + expected: -1, + }, + { + name: "pseudoversion greater than other pseudoversion", + version1: "v0.0.0-20190116102854-8a3f0e047d5a", + version2: "v0.0.0-20180116102854-5a71ef0e047d", + expected: 1, + }, + { + name: "+incompatible doesn't break equality", + version1: "v3.2.0", + version2: "v3.2.0+incompatible", + expected: 0, }, } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - a, err := newGolangVersion(tc.thisVersion) - require.NoError(t, err) - other, err := newGolangVersion(tc.otherVersion) - require.NoError(t, err) - got := a.compare(*other) - assert.Equal(t, tc.want, got) + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + version1 := NewVersion(test.version1, GolangFormat) + + version2 := NewVersion(test.version2, GolangFormat) + + result, err := version1.Compare(version2) + assert.NoError(t, err) + assert.Equal(t, test.expected, result) }) } } -func Test_newGolangVersion_UnsupportedVersion(t *testing.T) { +func TestGolangVersion_Compare_NilVersion(t *testing.T) { + version := NewVersion("v1.2.3", GolangFormat) + + result, err := version.Compare(nil) + require.Error(t, err) + assert.Equal(t, ErrNoVersionProvided, err) + assert.Equal(t, -1, result) +} + +func TestGolangVersion_Compare_DifferentFormat(t *testing.T) { + golangVer, err := newGolangVersion("v1.2.3") + require.NoError(t, err) + + semanticVer := NewVersion("1.2.3", SemanticFormat) + + result, err := golangVer.Compare(semanticVer) + require.NoError(t, err) + assert.Equal(t, 0, result) +} + +func TestGolangVersion(t *testing.T) { tests := []struct { - name string - v string - want *golangVersion - wantErr assert.ErrorAssertionFunc + name string + input string + expected golangVersion + wantErr require.ErrorAssertionFunc }{ { - name: "devel", - v: "(devel)", - wantErr: func(t assert.TestingT, err error, msgAndArgs ...interface{}) bool { - return assert.ErrorIs(t, err, ErrUnsupportedVersion) + name: "normal semantic version", + input: "v1.8.0", + expected: golangVersion{ + raw: "v1.8.0", + obj: hashiVer.Must(hashiVer.NewSemver("v1.8.0")), + }, + }, + { + name: "v0.0.0 date and hash version", + input: "v0.0.0-20180116102854-5a71ef0e047d", + expected: golangVersion{ + raw: "v0.0.0-20180116102854-5a71ef0e047d", + obj: hashiVer.Must(hashiVer.NewSemver("v0.0.0-20180116102854-5a71ef0e047d")), }, }, { - name: "invalid", - v: "invalid", - wantErr: assert.Error, + name: "semver with +incompatible", + input: "v24.0.7+incompatible", + expected: golangVersion{ + raw: "v24.0.7+incompatible", + obj: hashiVer.Must(hashiVer.NewSemver("v24.0.7+incompatible")), + }, }, { - name: "valid", - v: "v1.2.3", - want: &golangVersion{ - raw: "v1.2.3", - semVer: hashiVer.Must(hashiVer.NewSemver("v1.2.3")), + name: "semver with +incompatible+dirty", + input: "v24.0.7+incompatible+dirty", + expected: golangVersion{ + raw: "v24.0.7+incompatible+dirty", + obj: hashiVer.Must(hashiVer.NewSemver("v24.0.7+incompatible.dirty")), }, }, + { + name: "standard library", + input: "go1.21.4", + expected: golangVersion{ + raw: "go1.21.4", + obj: hashiVer.Must(hashiVer.NewSemver("1.21.4")), + }, + }, + { + // "(devel)" is the main module of a go program. + // If we get a package with this version, it means the SBOM + // doesn't have a real version number for the built package, so + // we can't compare it and should just return an error. + name: "devel", + input: "(devel)", + wantErr: func(t require.TestingT, err error, msgAndArgs ...interface{}) { + require.ErrorIs(t, err, ErrUnsupportedVersion) + }, + }, + { + name: "invalid", + input: "invalid", + wantErr: require.Error, + }, } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := newGolangVersion(tt.v) - if tt.wantErr != nil { - tt.wantErr(t, err) + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + if tc.wantErr == nil { + tc.wantErr = require.NoError + } + v, err := newGolangVersion(tc.input) + tc.wantErr(t, err) + if err != nil { + return } - assert.Equal(t, tt.want, got) + assert.Equal(t, tc.expected, v) }) } } diff --git a/grype/version/helper_test.go b/grype/version/helper_test.go index c59b77ec1ba..55bcbb6a8b3 100644 --- a/grype/version/helper_test.go +++ b/grype/version/helper_test.go @@ -6,15 +6,15 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) type testCase struct { - name string - version string - constraint string - satisfied bool - shouldErr bool - errorAssertion func(t *testing.T, err error) + name string + version string + constraint string + satisfied bool + wantError require.ErrorAssertionFunc } func (c *testCase) tName() string { @@ -27,19 +27,16 @@ func (c *testCase) tName() string { func (c *testCase) assertVersionConstraint(t *testing.T, format Format, constraint Constraint) { t.Helper() + if c.wantError == nil { + c.wantError = require.NoError + } - version, err := NewVersion(c.version, format) - assert.NoError(t, err, "unexpected error from NewVersion: %v", err) + version := NewVersion(c.version, format) isSatisfied, err := constraint.Satisfied(version) - if c.shouldErr { - if c.errorAssertion != nil { - c.errorAssertion(t, err) - } else { - assert.Error(t, err) - } - } else { - assert.NoError(t, err, "unexpected error from constraint.Satisfied: %v", err) + c.wantError(t, err) + if err != nil { + return } assert.Equal(t, c.satisfied, isSatisfied, "unexpected constraint check result") } diff --git a/grype/version/jvm_constraint.go b/grype/version/jvm_constraint.go deleted file mode 100644 index 1f7930f4d5e..00000000000 --- a/grype/version/jvm_constraint.go +++ /dev/null @@ -1,15 +0,0 @@ -package version - -import "fmt" - -func newJvmConstraint(raw string) (Constraint, error) { - return newGenericConstraint(raw, newJvmComparator, "jvm") -} - -func newJvmComparator(unit constraintUnit) (Comparator, error) { - ver, err := newJvmVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse JVM constraint version (%s): %w", unit.version, err) - } - return ver, nil -} diff --git a/grype/version/jvm_constraint_test.go b/grype/version/jvm_constraint_test.go deleted file mode 100644 index b587662826c..00000000000 --- a/grype/version/jvm_constraint_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestVersionConstraintJVM(t *testing.T) { - tests := []testCase{ - // pre jep 223 versions - {version: "1.7.0_80", constraint: "< 1.8.0", satisfied: true}, - {version: "1.8.0_131", constraint: "> 1.8.0", satisfied: true}, - {version: "1.8.0_131", constraint: "< 1.8.0_132", satisfied: true}, - {version: "1.8.0_131-b11", constraint: "< 1.8.0_132", satisfied: true}, - - {version: "1.7.0_80", constraint: "> 1.8.0", satisfied: false}, - {version: "1.8.0_131", constraint: "< 1.8.0", satisfied: false}, - {version: "1.8.0_131", constraint: "> 1.8.0_132", satisfied: false}, - {version: "1.8.0_131-b11", constraint: "> 1.8.0_132", satisfied: false}, - - {version: "1.7.0_80", constraint: "= 1.8.0", satisfied: false}, - {version: "1.8.0_131", constraint: "= 1.8.0", satisfied: false}, - {version: "1.8.0_131", constraint: "= 1.8.0_132", satisfied: false}, - {version: "1.8.0_131-b11", constraint: "= 1.8.0_132", satisfied: false}, - - {version: "1.8.0_80", constraint: "= 1.8.0_80", satisfied: true}, - {version: "1.8.0_131", constraint: ">= 1.8.0_131", satisfied: true}, - {version: "1.8.0_131", constraint: "= 1.8.0_131-b001", satisfied: true}, // builds should not matter - {version: "1.8.0_131-ea-b11", constraint: "= 1.8.0_131-ea", satisfied: true}, - - // jep 223 versions - {version: "8.0.4", constraint: "> 8.0.3", satisfied: true}, - {version: "8.0.4", constraint: "< 8.0.5", satisfied: true}, - {version: "9.0.0", constraint: "> 8.0.5", satisfied: true}, - {version: "9.0.0", constraint: "< 9.1.0", satisfied: true}, - {version: "11.0.4", constraint: "<= 11.0.4", satisfied: true}, - {version: "11.0.5", constraint: "> 11.0.4", satisfied: true}, - - {version: "8.0.4", constraint: "< 8.0.3", satisfied: false}, - {version: "8.0.4", constraint: "> 8.0.5", satisfied: false}, - {version: "9.0.0", constraint: "< 8.0.5", satisfied: false}, - {version: "9.0.0", constraint: "> 9.1.0", satisfied: false}, - {version: "11.0.4", constraint: "> 11.0.4", satisfied: false}, - {version: "11.0.5", constraint: "< 11.0.4", satisfied: false}, - - // mixed versions - {version: "1.8.0_131", constraint: "< 9.0.0", satisfied: true}, // 1.8.0_131 -> 8.0.131 - {version: "9.0.0", constraint: "> 1.8.0_131", satisfied: true}, // 1.8.0_131 -> 8.0.131 - {version: "1.8.0_131", constraint: "<= 8.0.131", satisfied: true}, - {version: "1.8.0_131", constraint: "> 7.0.79", satisfied: true}, - {version: "1.8.0_131", constraint: "= 8.0.131", satisfied: true}, - {version: "1.8.0_131", constraint: ">= 9.0.0", satisfied: false}, - {version: "9.0.1", constraint: "< 8.0.131", satisfied: false}, - - // pre-release versions - {version: "1.8.0_131-ea", constraint: "< 1.8.0_131", satisfied: true}, - {version: "1.8.0_131", constraint: "> 1.8.0_131-ea", satisfied: true}, - {version: "9.0.0-ea", constraint: "< 9.0.0", satisfied: true}, - {version: "9.0.0-ea", constraint: "> 1.8.0_131", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.version+"_constraint_"+test.constraint, func(t *testing.T) { - constraint, err := newJvmConstraint(test.constraint) - require.NoError(t, err) - test.assertVersionConstraint(t, JVMFormat, constraint) - }) - } -} diff --git a/grype/version/jvm_version.go b/grype/version/jvm_version.go index 2d43cb35ffc..a67dc2807b7 100644 --- a/grype/version/jvm_version.go +++ b/grype/version/jvm_version.go @@ -10,7 +10,9 @@ import ( "github.com/anchore/grype/internal/log" ) -var _ Comparator = (*jvmVersion)(nil) +var _ interface { + Comparator +} = (*jvmVersion)(nil) var ( preJep223VersionPattern = regexp.MustCompile(`^1\.(?P\d+)(\.(?P\d+)([_-](update)?(_)?(?P\d+))?(-(?P[^b][^-]+))?(-b(?P\d+))?)?`) @@ -22,7 +24,7 @@ type jvmVersion struct { semVer *hashiVer.Version } -func newJvmVersion(raw string) (*jvmVersion, error) { +func newJvmVersion(raw string) (jvmVersion, error) { isPreJep233 := strings.HasPrefix(raw, "1.") if isPreJep233 { @@ -33,51 +35,25 @@ func newJvmVersion(raw string) (*jvmVersion, error) { } verObj, err := hashiVer.NewVersion(raw) if err != nil { - return nil, fmt.Errorf("unable to create semver obj for JVM version: %w", err) + return jvmVersion{}, invalidFormatError(JVMFormat, raw, err) } - return &jvmVersion{ + return jvmVersion{ isPreJep223: isPreJep233, semVer: verObj, }, nil } -func (v *jvmVersion) Compare(other *Version) (int, error) { +func (v jvmVersion) Compare(other *Version) (int, error) { if other == nil { return -1, ErrNoVersionProvided } - if other.Format == JVMFormat { - if other.rich.jvmVersion == nil { - return -1, fmt.Errorf("given empty jvmVersion object") - } - return other.rich.jvmVersion.compare(*v), nil - } - - if other.Format == SemanticFormat { - if other.rich.semVer == nil { - return -1, fmt.Errorf("given empty semVer object") - } - return other.rich.semVer.verObj.Compare(v.semVer), nil - } - - jvmUpgrade, err := finalizeComparisonVersion(other, JVMFormat) - if err == nil { - if jvmUpgrade.rich.jvmVersion == nil { - return -1, fmt.Errorf("given empty jvmVersion object") - } - return jvmUpgrade.rich.jvmVersion.compare(*v), nil - } - - semUpgrade, err := finalizeComparisonVersion(other, SemanticFormat) - if err == nil { - if semUpgrade.rich.semVer == nil { - return -1, fmt.Errorf("given empty semVer object") - } - return semUpgrade.rich.semVer.verObj.Compare(v.semVer), nil + o, err := newJvmVersion(other.Raw) + if err != nil { + return 0, err } - - return -1, NewUnsupportedFormatError(JVMFormat, other.Format) + return v.compare(o), nil } func (v jvmVersion) compare(other jvmVersion) int { diff --git a/grype/version/jvm_version_test.go b/grype/version/jvm_version_test.go index a1e0df8e952..330e0182b2b 100644 --- a/grype/version/jvm_version_test.go +++ b/grype/version/jvm_version_test.go @@ -4,11 +4,73 @@ import ( "strings" "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestVersionJVM(t *testing.T) { +func TestJVMVersion_Constraint(t *testing.T) { + tests := []testCase{ + // pre jep 223 versions + {version: "1.7.0_80", constraint: "< 1.8.0", satisfied: true}, + {version: "1.8.0_131", constraint: "> 1.8.0", satisfied: true}, + {version: "1.8.0_131", constraint: "< 1.8.0_132", satisfied: true}, + {version: "1.8.0_131-b11", constraint: "< 1.8.0_132", satisfied: true}, + + {version: "1.7.0_80", constraint: "> 1.8.0", satisfied: false}, + {version: "1.8.0_131", constraint: "< 1.8.0", satisfied: false}, + {version: "1.8.0_131", constraint: "> 1.8.0_132", satisfied: false}, + {version: "1.8.0_131-b11", constraint: "> 1.8.0_132", satisfied: false}, + + {version: "1.7.0_80", constraint: "= 1.8.0", satisfied: false}, + {version: "1.8.0_131", constraint: "= 1.8.0", satisfied: false}, + {version: "1.8.0_131", constraint: "= 1.8.0_132", satisfied: false}, + {version: "1.8.0_131-b11", constraint: "= 1.8.0_132", satisfied: false}, + + {version: "1.8.0_80", constraint: "= 1.8.0_80", satisfied: true}, + {version: "1.8.0_131", constraint: ">= 1.8.0_131", satisfied: true}, + {version: "1.8.0_131", constraint: "= 1.8.0_131-b001", satisfied: true}, // builds should not matter + {version: "1.8.0_131-ea-b11", constraint: "= 1.8.0_131-ea", satisfied: true}, + + // jep 223 versions + {version: "8.0.4", constraint: "> 8.0.3", satisfied: true}, + {version: "8.0.4", constraint: "< 8.0.5", satisfied: true}, + {version: "9.0.0", constraint: "> 8.0.5", satisfied: true}, + {version: "9.0.0", constraint: "< 9.1.0", satisfied: true}, + {version: "11.0.4", constraint: "<= 11.0.4", satisfied: true}, + {version: "11.0.5", constraint: "> 11.0.4", satisfied: true}, + + {version: "8.0.4", constraint: "< 8.0.3", satisfied: false}, + {version: "8.0.4", constraint: "> 8.0.5", satisfied: false}, + {version: "9.0.0", constraint: "< 8.0.5", satisfied: false}, + {version: "9.0.0", constraint: "> 9.1.0", satisfied: false}, + {version: "11.0.4", constraint: "> 11.0.4", satisfied: false}, + {version: "11.0.5", constraint: "< 11.0.4", satisfied: false}, + + // mixed versions + {version: "1.8.0_131", constraint: "< 9.0.0", satisfied: true}, // 1.8.0_131 -> 8.0.131 + {version: "9.0.0", constraint: "> 1.8.0_131", satisfied: true}, // 1.8.0_131 -> 8.0.131 + {version: "1.8.0_131", constraint: "<= 8.0.131", satisfied: true}, + {version: "1.8.0_131", constraint: "> 7.0.79", satisfied: true}, + {version: "1.8.0_131", constraint: "= 8.0.131", satisfied: true}, + {version: "1.8.0_131", constraint: ">= 9.0.0", satisfied: false}, + {version: "9.0.1", constraint: "< 8.0.131", satisfied: false}, + + // pre-release versions + {version: "1.8.0_131-ea", constraint: "< 1.8.0_131", satisfied: true}, + {version: "1.8.0_131", constraint: "> 1.8.0_131-ea", satisfied: true}, + {version: "9.0.0-ea", constraint: "< 9.0.0", satisfied: true}, + {version: "9.0.0-ea", constraint: "> 1.8.0_131", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.version+"_constraint_"+test.constraint, func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, JVMFormat) + require.NoError(t, err) + test.assertVersionConstraint(t, JVMFormat, constraint) + }) + } +} + +func TestJVMVersion_Compare(t *testing.T) { tests := []struct { v1 string v2 string @@ -62,21 +124,20 @@ func TestVersionJVM(t *testing.T) { for _, test := range tests { name := test.v1 + "_vs_" + test.v2 t.Run(name, func(t *testing.T) { - v1, err := newJvmVersion(test.v1) + v1 := NewVersion(test.v1, JVMFormat) require.NotNil(t, v1) - require.NoError(t, err) - v2, err := newJvmVersion(test.v2) + v2 := NewVersion(test.v2, JVMFormat) require.NotNil(t, v2) - require.NoError(t, err) - actual := v1.compare(*v2) - assert.Equal(t, test.expected, actual) + actual, err := v1.Compare(v2) + require.NoError(t, err) + require.Equal(t, test.expected, actual) }) } } -func TestConvertNonCompliantSemver(t *testing.T) { +func TestJVMVersion_ConvertNonCompliantSemver(t *testing.T) { tests := []struct { name string input string @@ -122,12 +183,12 @@ func TestConvertNonCompliantSemver(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := convertNonCompliantSemver(tt.input) - assert.Equal(t, tt.expected, result) + require.Equal(t, tt.expected, result) }) } } -func TestVersionJVM_invalid(t *testing.T) { +func TestJVMVersion_Invalid(t *testing.T) { tests := []struct { name string version string @@ -145,14 +206,13 @@ func TestVersionJVM_invalid(t *testing.T) { if tt.wantErr == nil { tt.wantErr = require.NoError } - v, err := newJvmVersion(tt.version) - assert.Nil(t, v) + _, err := newJvmVersion(tt.version) tt.wantErr(t, err) }) } } -func TestJvmVersionCompare_Formats(t *testing.T) { +func TestJvmVersion_Compare_Formats(t *testing.T) { tests := []struct { name string thisVersion string @@ -195,23 +255,7 @@ func TestJvmVersionCompare_Formats(t *testing.T) { otherVersion: "not-valid-jvm-or-semver", otherFormat: UnknownFormat, expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.8.0_275", - otherVersion: "1.8.0-r1", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - deb", - thisVersion: "1.8.0_275", - otherVersion: "1.8.0-1", - otherFormat: DebFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", + errorSubstring: "invalid", }, } @@ -220,84 +264,50 @@ func TestJvmVersionCompare_Formats(t *testing.T) { thisVer, err := newJvmVersion(test.thisVersion) require.NoError(t, err) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) if test.expectError { - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { - assert.True(t, strings.Contains(err.Error(), test.errorSubstring), - "Expected error to contain '%s', got: %v", test.errorSubstring, err) + require.ErrorContains(t, err, test.errorSubstring) } } else { - assert.NoError(t, err) - assert.Contains(t, []int{-1, 0, 1}, result, "Expected comparison result to be -1, 0, or 1") + require.NoError(t, err) + require.Contains(t, []int{-1, 0, 1}, result, "Expected comparison result to be -1, 0, or 1") } }) } } -func TestJvmVersionCompareEdgeCases(t *testing.T) { +func TestJvmVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*jvmVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*jvmVersion, *Version) { - thisVer, _ := newJvmVersion("1.8.0_275") + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.8.0_275", JVMFormat) return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "jvm format but empty jvmVersion object", - setupFunc: func() (*jvmVersion, *Version) { - thisVer, _ := newJvmVersion("1.8.0_275") - - otherVer := &Version{ - Raw: "1.8.0_281", - Format: JVMFormat, - rich: rich{}, // jvmVersion will be nil - } - - return thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty jvmVersion object", - }, - { - name: "semantic format but empty semVer object", - setupFunc: func() (*jvmVersion, *Version) { - thisVer, _ := newJvmVersion("1.8.0_275") - - otherVer := &Version{ - Raw: "1.8.1", - Format: SemanticFormat, - rich: rich{}, // semVer will be nil - } - - return thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty semVer object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { - assert.True(t, strings.Contains(err.Error(), test.errorSubstring), + require.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) } }) diff --git a/grype/version/kb_constraint.go b/grype/version/kb_constraint.go new file mode 100644 index 00000000000..9c9aabc65e1 --- /dev/null +++ b/grype/version/kb_constraint.go @@ -0,0 +1,61 @@ +package version + +import "fmt" + +type kbConstraint struct { + Raw string + Expression simpleRangeExpression +} + +func newKBConstraint(raw string) (kbConstraint, error) { + if raw == "" { + // an empty constraint is always satisfied + return kbConstraint{}, nil + } + + constraints, err := parseRangeExpression(raw) + if err != nil { + return kbConstraint{}, fmt.Errorf("unable to parse kb constraint phrase: %w", err) + } + + return kbConstraint{ + Raw: raw, + Expression: constraints, + }, nil +} + +func (c kbConstraint) Satisfied(version *Version) (bool, error) { + if c.Raw == "" { + // an empty constraint is never satisfied + return false, &NonFatalConstraintError{ + constraint: c, + version: version, + message: "unexpected data in DB: empty raw version constraint", + } + } + + if version == nil { + return true, nil + } + + if version.Format != KBFormat { + return false, newUnsupportedFormatError(KBFormat, version) + } + + return c.Expression.satisfied(KBFormat, version) +} + +func (c kbConstraint) Format() Format { + return KBFormat +} + +func (c kbConstraint) String() string { + if c.Raw == "" { + return fmt.Sprintf("%q (kb)", c.Raw) // with quotes + } + return fmt.Sprintf("%s (kb)", c.Raw) // no quotes +} + +func (c kbConstraint) Value() string { + return c.Raw +} diff --git a/grype/version/kb_constraint_test.go b/grype/version/kb_constraint_test.go new file mode 100644 index 00000000000..c04c8465d2a --- /dev/null +++ b/grype/version/kb_constraint_test.go @@ -0,0 +1,46 @@ +package version + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestKbVersion_Constraint(t *testing.T) { + tests := []testCase{ + { + name: "no constraint no version raises error", + version: "", constraint: "", + satisfied: false, + wantError: func(t require.TestingT, err error, msgAndArgs ...interface{}) { + var expectedError *NonFatalConstraintError + assert.ErrorAs(t, err, &expectedError, "Unexpected error type from kbConstraint.Satisfied: %v", err) + }, + }, + { + name: "no constraint with version raises error", + version: "878787", constraint: "", + satisfied: false, + wantError: func(t require.TestingT, err error, msgAndArgs ...interface{}) { + var expectedError *NonFatalConstraintError + assert.ErrorAs(t, err, &expectedError, "Unexpected error type from kbConstraint.Satisfied: %v", err) + }, + }, + {name: "no version is unsatisfied", version: "", constraint: "foo", satisfied: false}, + {name: "version constraint mismatch", version: "1", constraint: "foo", satisfied: false}, + {name: "matching version and constraint", version: "1", constraint: "1", satisfied: true}, + {name: "base keyword matching version and constraint", version: "base", constraint: "base", satisfied: true}, + {name: "version and OR constraint match", version: "878787", constraint: "979797 || 101010 || 878787", satisfied: true}, + {name: "version and OR constraint mismatch", version: "478787", constraint: "979797 || 101010 || 878787", satisfied: false}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, KBFormat) + assert.NoError(t, err, "unexpected error from newKBConstraint: %v", err) + + test.assertVersionConstraint(t, KBFormat, constraint) + }) + } +} diff --git a/grype/version/kb_contraint.go b/grype/version/kb_contraint.go deleted file mode 100644 index c2367d7e95a..00000000000 --- a/grype/version/kb_contraint.go +++ /dev/null @@ -1,65 +0,0 @@ -package version - -import ( - "fmt" -) - -type kbConstraint struct { - raw string - expression constraintExpression -} - -func newKBConstraint(raw string) (kbConstraint, error) { - if raw == "" { - // an empty constraint is always satisfied - return kbConstraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newKBComparator) - if err != nil { - return kbConstraint{}, fmt.Errorf("unable to parse kb constraint phrase: %w", err) - } - - return kbConstraint{ - raw: raw, - expression: constraints, - }, nil -} - -func newKBComparator(unit constraintUnit) (Comparator, error) { - // XXX unit.version is probably not needed because newKBVersion doesn't do anything - ver := newKBVersion(unit.version) - return &ver, nil -} - -func (c kbConstraint) supported(format Format) bool { - return format == KBFormat -} - -func (c kbConstraint) Satisfied(version *Version) (bool, error) { - if c.raw == "" { - // an empty constraint is never satisfied - return false, &NonFatalConstraintError{ - constraint: c, - version: version, - message: "Unexpected data in DB: Empty raw version constraint.", - } - } - - if version == nil { - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(KBFormat, version.Format) - } - - return c.expression.satisfied(version) -} - -func (c kbConstraint) String() string { - if c.raw == "" { - return fmt.Sprintf("%q (kb)", c.raw) - } - return fmt.Sprintf("%s (kb)", c.raw) -} diff --git a/grype/version/kb_contraint_test.go b/grype/version/kb_contraint_test.go deleted file mode 100644 index ba0fbcce902..00000000000 --- a/grype/version/kb_contraint_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionKbConstraint(t *testing.T) { - tests := []testCase{ - {name: "no constraint no version raises error", version: "", constraint: "", satisfied: false, shouldErr: true, errorAssertion: func(t *testing.T, err error) { - var expectedError *NonFatalConstraintError - assert.ErrorAs(t, err, &expectedError, "Unexpected error type from kbConstraint.Satisfied: %v", err) - }}, - {name: "no constraint with version raises error", version: "878787", constraint: "", satisfied: false, shouldErr: true, errorAssertion: func(t *testing.T, err error) { - var expectedError *NonFatalConstraintError - assert.ErrorAs(t, err, &expectedError, "Unexpected error type from kbConstraint.Satisfied: %v", err) - }}, - {name: "no version is unsatisifed", version: "", constraint: "foo", satisfied: false}, - {name: "version constraint mismatch", version: "1", constraint: "foo", satisfied: false}, - {name: "matching version and constraint", version: "1", constraint: "1", satisfied: true}, - {name: "base keyword matching version and constraint", version: "base", constraint: "base", satisfied: true}, - {name: "version and OR constraint match", version: "878787", constraint: "979797 || 101010 || 878787", satisfied: true}, - {name: "version and OR constraint mismatch", version: "478787", constraint: "979797 || 101010 || 878787", satisfied: false}, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - constraint, err := newKBConstraint(test.constraint) - assert.NoError(t, err, "unexpected error from newKBConstraint: %v", err) - - test.assertVersionConstraint(t, KBFormat, constraint) - }) - } -} diff --git a/grype/version/kb_version.go b/grype/version/kb_version.go index 37074c115bf..77a92617933 100644 --- a/grype/version/kb_version.go +++ b/grype/version/kb_version.go @@ -1,37 +1,32 @@ package version import ( - "fmt" "reflect" ) +var _ Comparator = (*kbVersion)(nil) + type kbVersion struct { version string } func newKBVersion(raw string) kbVersion { - // XXX Is this even useful/necessary? return kbVersion{ version: raw, } } -func (v *kbVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, KBFormat) - if err != nil { - return -1, err - } - - if other.rich.kbVer == nil { - return -1, fmt.Errorf("given empty kbVersion object") +func (v kbVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - return other.rich.kbVer.compare(*v), nil + return v.compare(newKBVersion(other.Raw)), nil } -// Compare returns 0 if v == v2, 1 otherwise -func (v kbVersion) compare(v2 kbVersion) int { - if reflect.DeepEqual(v, v2) { +// compare returns 0 if v == v2, 1 otherwise +func (v kbVersion) compare(other kbVersion) int { + if reflect.DeepEqual(v, other) { return 0 } diff --git a/grype/version/kb_version_test.go b/grype/version/kb_version_test.go index b379c999614..17a55b6de07 100644 --- a/grype/version/kb_version_test.go +++ b/grype/version/kb_version_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestKbVersionCompare(t *testing.T) { +func TestKbVersion_Compare(t *testing.T) { tests := []struct { name string thisVersion string @@ -25,12 +25,11 @@ func TestKbVersionCompare(t *testing.T) { expectError: false, }, { - name: "different format returns error", - thisVersion: "KB4562562", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", + name: "different format does not return error", + thisVersion: "KB4562562", + otherVersion: "1.2.3", + otherFormat: SemanticFormat, + expectError: false, }, { name: "unknown format attempts upgrade - valid kb format", @@ -45,8 +44,7 @@ func TestKbVersionCompare(t *testing.T) { t.Run(test.name, func(t *testing.T) { thisVer := newKBVersion(test.thisVersion) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) @@ -64,47 +62,31 @@ func TestKbVersionCompare(t *testing.T) { } } -func TestKbVersionCompareEdgeCases(t *testing.T) { +func TestKbVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*kbVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*kbVersion, *Version) { - thisVer := newKBVersion("KB4562562") - return &thisVer, nil + setupFunc: func(t testing.TB) (*Version, *Version) { + v := NewVersion("KB4562562", KBFormat) + return v, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty kbVersion in other object", - setupFunc: func() (*kbVersion, *Version) { - thisVer := newKBVersion("KB4562562") - - otherVer := &Version{ - Raw: "KB4562563", - Format: KBFormat, - rich: rich{}, - } - - return &thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty kbVersion object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/maven_constraint.go b/grype/version/maven_constraint.go deleted file mode 100644 index 77c4e95ec2f..00000000000 --- a/grype/version/maven_constraint.go +++ /dev/null @@ -1,72 +0,0 @@ -package version - -import "fmt" - -type mavenConstraint struct { - raw string - expression constraintExpression -} - -func newMavenConstraint(raw string) (mavenConstraint, error) { - if raw == "" { - // empty constraints are always satisfied - return mavenConstraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newMavenComparator) - if err != nil { - return mavenConstraint{}, fmt.Errorf("unable to parse maven constraint phrase: %w", err) - } - - return mavenConstraint{ - raw: raw, - expression: constraints, - }, nil -} - -func newMavenComparator(unit constraintUnit) (Comparator, error) { - ver, err := newMavenVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint version (%s): %w", unit.version, err) - } - - return ver, nil -} - -func (c mavenConstraint) supported(format Format) bool { - return format == MavenFormat -} - -func (c mavenConstraint) Satisfied(version *Version) (satisfied bool, err error) { - if c.raw == "" && version != nil { - // empty constraints are always satisfied - return true, nil - } - - if version == nil { - if c.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(MavenFormat, version.Format) - } - - if version.rich.mavenVer == nil { - return false, fmt.Errorf("no rich apk version given: %+v", version) - } - - return c.expression.satisfied(version) -} - -func (c mavenConstraint) String() string { - if c.raw == "" { - return "none (maven)" - } - - return fmt.Sprintf("%s (maven)", c.raw) -} diff --git a/grype/version/maven_constraint_test.go b/grype/version/maven_constraint_test.go deleted file mode 100644 index ff5a9888997..00000000000 --- a/grype/version/maven_constraint_test.go +++ /dev/null @@ -1,106 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionConstraintJava(t *testing.T) { - tests := []testCase{ - {version: "1", constraint: "< 2.5", satisfied: true}, - {version: "1.0", constraint: "< 1.1", satisfied: true}, - {version: "1.1", constraint: "< 1.2", satisfied: true}, - {version: "1.0.0", constraint: "< 1.1", satisfied: true}, - {version: "1.0.1", constraint: "< 1.1", satisfied: true}, - {version: "1.1", constraint: "> 1.2.0", satisfied: false}, - {version: "1.0-alpha-1", constraint: "> 1.0", satisfied: false}, - {version: "1.0-alpha-1", constraint: "> 1.0-alpha-2", satisfied: false}, - {version: "1.0-alpha-1", constraint: "< 1.0-beta-1", satisfied: true}, - {version: "1.0-beta-1", constraint: "< 1.0-SNAPSHOT", satisfied: true}, - {version: "1.0-SNAPSHOT", constraint: "< 1.0", satisfied: true}, - {version: "1.0-alpha-1-SNAPSHOT", constraint: "> 1.0-alpha-1", satisfied: false}, - {version: "1.0", constraint: "< 1.0-1", satisfied: true}, - {version: "1.0-1", constraint: "< 1.0-2", satisfied: true}, - {version: "1.0.0", constraint: "< 1.0-1", satisfied: true}, - {version: "2.0-1", constraint: "> 2.0.1", satisfied: false}, - {version: "2.0.1-klm", constraint: "> 2.0.1-lmn", satisfied: false}, - {version: "2.0.1", constraint: "< 2.0.1-xyz", satisfied: true}, - {version: "2.0.1", constraint: "< 2.0.1-123", satisfied: true}, - {version: "2.0.1-xyz", constraint: "< 2.0.1-123", satisfied: true}, - {version: "2.414.2-cb-5", constraint: "> 2.414.2", satisfied: true}, - {version: "5.2.25.RELEASE", constraint: "< 5.2.25", satisfied: false}, - {version: "5.2.25.RELEASE", constraint: "<= 5.2.25", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - constraint, err := newMavenConstraint(test.constraint) - - assert.NoError(t, err, "unexpected error from newMavenConstraint %s: %v", test.version, err) - test.assertVersionConstraint(t, MavenFormat, constraint) - - }) - } -} - -func TestVersionEqualityJava(t *testing.T) { - tests := []testCase{ - {version: "1", constraint: "1", satisfied: true}, - {version: "1", constraint: "1.0", satisfied: true}, - {version: "1", constraint: "1.0.0", satisfied: true}, - {version: "1.0", constraint: "1.0.0", satisfied: true}, - {version: "1", constraint: "1-0", satisfied: true}, - {version: "1", constraint: "1.0-0", satisfied: true}, - {version: "1.0", constraint: "1.0-0", satisfied: true}, - {version: "1a", constraint: "1-a", satisfied: true}, - {version: "1a", constraint: "1.0-a", satisfied: true}, - {version: "1a", constraint: "1.0.0-a", satisfied: true}, - {version: "1.0a", constraint: "1-a", satisfied: true}, - {version: "1.0.0a", constraint: "1-a", satisfied: true}, - {version: "1x", constraint: "1-x", satisfied: true}, - {version: "1x", constraint: "1.0-x", satisfied: true}, - {version: "1x", constraint: "1.0.0-x", satisfied: true}, - {version: "1.0x", constraint: "1-x", satisfied: true}, - {version: "1.0.0x", constraint: "1-x", satisfied: true}, - {version: "1ga", constraint: "1", satisfied: true}, - {version: "1release", constraint: "1", satisfied: true}, - {version: "1final", constraint: "1", satisfied: true}, - {version: "1cr", constraint: "1rc", satisfied: true}, - {version: "1a1", constraint: "1-alpha-1", satisfied: true}, - {version: "1b2", constraint: "1-beta-2", satisfied: true}, - {version: "1m3", constraint: "1-milestone-3", satisfied: true}, - {version: "1X", constraint: "1x", satisfied: true}, - {version: "1A", constraint: "1a", satisfied: true}, - {version: "1B", constraint: "1b", satisfied: true}, - {version: "1M", constraint: "1m", satisfied: true}, - {version: "1Ga", constraint: "1", satisfied: true}, - {version: "1GA", constraint: "1", satisfied: true}, - {version: "1RELEASE", constraint: "1", satisfied: true}, - {version: "1release", constraint: "1", satisfied: true}, - {version: "1RELeaSE", constraint: "1", satisfied: true}, - {version: "1Final", constraint: "1", satisfied: true}, - {version: "1FinaL", constraint: "1", satisfied: true}, - {version: "1FINAL", constraint: "1", satisfied: true}, - {version: "1Cr", constraint: "1Rc", satisfied: true}, - {version: "1cR", constraint: "1rC", satisfied: true}, - {version: "1m3", constraint: "1Milestone3", satisfied: true}, - {version: "1m3", constraint: "1MileStone3", satisfied: true}, - {version: "1m3", constraint: "1MILESTONE3", satisfied: true}, - {version: "1", constraint: "01", satisfied: true}, - {version: "1", constraint: "001", satisfied: true}, - {version: "1.1", constraint: "1.01", satisfied: true}, - {version: "1.1", constraint: "1.001", satisfied: true}, - {version: "1-1", constraint: "1-01", satisfied: true}, - {version: "1-1", constraint: "1-001", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - constraint, err := newMavenConstraint(test.constraint) - - assert.NoError(t, err, "unexpected error from newMavenConstraint %s: %v", test.version, err) - test.assertVersionConstraint(t, MavenFormat, constraint) - }) - } -} diff --git a/grype/version/maven_version.go b/grype/version/maven_version.go index 916cf1fe363..44b9a613b3c 100644 --- a/grype/version/maven_version.go +++ b/grype/version/maven_version.go @@ -6,47 +6,52 @@ import ( mvnv "github.com/masahiro331/go-mvn-version" ) +var _ Comparator = (*mavenVersion)(nil) + type mavenVersion struct { - raw string - version mvnv.Version + raw string + obj mvnv.Version } -func newMavenVersion(raw string) (*mavenVersion, error) { +func newMavenVersion(raw string) (mavenVersion, error) { ver, err := mvnv.NewVersion(raw) if err != nil { - return nil, fmt.Errorf("could not generate new java version from: %s; %w", raw, err) + return mavenVersion{}, fmt.Errorf("could not generate new java version from: %s; %w", raw, err) } - return &mavenVersion{ - raw: raw, - version: ver, + return mavenVersion{ + raw: raw, + obj: ver, }, nil } // Compare returns 0 if other == j, 1 if other > j, and -1 if other < j. // If an error is returned, the int value is -1 -func (j *mavenVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, MavenFormat) - if err != nil { - return -1, err +func (v mavenVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, fmt.Errorf("cannot compare nil version with %v", other) } - if other.rich.mavenVer == nil { - return -1, fmt.Errorf("given empty mavenVersion object") + + o, err := newMavenVersion(other.Raw) + if err != nil { + return 0, err } + return v.compare(o.obj) +} - submittedVersion := other.rich.mavenVer.version - if submittedVersion.Equal(j.version) { +func (v mavenVersion) compare(other mvnv.Version) (int, error) { + if v.obj.Equal(other) { return 0, nil } - if submittedVersion.LessThan(j.version) { + if v.obj.LessThan(other) { return -1, nil } - if submittedVersion.GreaterThan(j.version) { + if v.obj.GreaterThan(other) { return 1, nil } return -1, fmt.Errorf( "could not compare java versions: %v with %v", - submittedVersion.String(), - j.version.String()) + other.String(), + v.obj.String()) } diff --git a/grype/version/maven_version_test.go b/grype/version/maven_version_test.go index 633cea0de59..37bc1bb9c14 100644 --- a/grype/version/maven_version_test.go +++ b/grype/version/maven_version_test.go @@ -8,79 +8,164 @@ import ( "github.com/stretchr/testify/require" ) -func Test_javaVersion_Compare(t *testing.T) { +func TestMavenVersion_Constraint(t *testing.T) { + tests := []testCase{ + // range expressions + {version: "1", constraint: "< 2.5", satisfied: true}, + {version: "1.0", constraint: "< 1.1", satisfied: true}, + {version: "1.1", constraint: "< 1.2", satisfied: true}, + {version: "1.0.0", constraint: "< 1.1", satisfied: true}, + {version: "1.0.1", constraint: "< 1.1", satisfied: true}, + {version: "1.1", constraint: "> 1.2.0", satisfied: false}, + {version: "1.0-alpha-1", constraint: "> 1.0", satisfied: false}, + {version: "1.0-alpha-1", constraint: "> 1.0-alpha-2", satisfied: false}, + {version: "1.0-alpha-1", constraint: "< 1.0-beta-1", satisfied: true}, + {version: "1.0-beta-1", constraint: "< 1.0-SNAPSHOT", satisfied: true}, + {version: "1.0-SNAPSHOT", constraint: "< 1.0", satisfied: true}, + {version: "1.0-alpha-1-SNAPSHOT", constraint: "> 1.0-alpha-1", satisfied: false}, + {version: "1.0", constraint: "< 1.0-1", satisfied: true}, + {version: "1.0-1", constraint: "< 1.0-2", satisfied: true}, + {version: "1.0.0", constraint: "< 1.0-1", satisfied: true}, + {version: "2.0-1", constraint: "> 2.0.1", satisfied: false}, + {version: "2.0.1-klm", constraint: "> 2.0.1-lmn", satisfied: false}, + {version: "2.0.1", constraint: "< 2.0.1-xyz", satisfied: true}, + {version: "2.0.1", constraint: "< 2.0.1-123", satisfied: true}, + {version: "2.0.1-xyz", constraint: "< 2.0.1-123", satisfied: true}, + {version: "2.414.2-cb-5", constraint: "> 2.414.2", satisfied: true}, + {version: "5.2.25.RELEASE", constraint: "< 5.2.25", satisfied: false}, + {version: "5.2.25.RELEASE", constraint: "<= 5.2.25", satisfied: true}, + + // equality expressions + {version: "1", constraint: "1", satisfied: true}, + {version: "1", constraint: "1.0", satisfied: true}, + {version: "1", constraint: "1.0.0", satisfied: true}, + {version: "1.0", constraint: "1.0.0", satisfied: true}, + {version: "1", constraint: "1-0", satisfied: true}, + {version: "1", constraint: "1.0-0", satisfied: true}, + {version: "1.0", constraint: "1.0-0", satisfied: true}, + {version: "1a", constraint: "1-a", satisfied: true}, + {version: "1a", constraint: "1.0-a", satisfied: true}, + {version: "1a", constraint: "1.0.0-a", satisfied: true}, + {version: "1.0a", constraint: "1-a", satisfied: true}, + {version: "1.0.0a", constraint: "1-a", satisfied: true}, + {version: "1x", constraint: "1-x", satisfied: true}, + {version: "1x", constraint: "1.0-x", satisfied: true}, + {version: "1x", constraint: "1.0.0-x", satisfied: true}, + {version: "1.0x", constraint: "1-x", satisfied: true}, + {version: "1.0.0x", constraint: "1-x", satisfied: true}, + {version: "1ga", constraint: "1", satisfied: true}, + {version: "1release", constraint: "1", satisfied: true}, + {version: "1final", constraint: "1", satisfied: true}, + {version: "1cr", constraint: "1rc", satisfied: true}, + {version: "1a1", constraint: "1-alpha-1", satisfied: true}, + {version: "1b2", constraint: "1-beta-2", satisfied: true}, + {version: "1m3", constraint: "1-milestone-3", satisfied: true}, + {version: "1X", constraint: "1x", satisfied: true}, + {version: "1A", constraint: "1a", satisfied: true}, + {version: "1B", constraint: "1b", satisfied: true}, + {version: "1M", constraint: "1m", satisfied: true}, + {version: "1Ga", constraint: "1", satisfied: true}, + {version: "1GA", constraint: "1", satisfied: true}, + {version: "1RELEASE", constraint: "1", satisfied: true}, + {version: "1release", constraint: "1", satisfied: true}, + {version: "1RELeaSE", constraint: "1", satisfied: true}, + {version: "1Final", constraint: "1", satisfied: true}, + {version: "1FinaL", constraint: "1", satisfied: true}, + {version: "1FINAL", constraint: "1", satisfied: true}, + {version: "1Cr", constraint: "1Rc", satisfied: true}, + {version: "1cR", constraint: "1rC", satisfied: true}, + {version: "1m3", constraint: "1Milestone3", satisfied: true}, + {version: "1m3", constraint: "1MileStone3", satisfied: true}, + {version: "1m3", constraint: "1MILESTONE3", satisfied: true}, + {version: "1", constraint: "01", satisfied: true}, + {version: "1", constraint: "001", satisfied: true}, + {version: "1.1", constraint: "1.01", satisfied: true}, + {version: "1.1", constraint: "1.001", satisfied: true}, + {version: "1-1", constraint: "1-01", satisfied: true}, + {version: "1-1", constraint: "1-001", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, MavenFormat) + + assert.NoError(t, err, "unexpected error from newMavenConstraint %s: %v", test.version, err) + test.assertVersionConstraint(t, MavenFormat, constraint) + + }) + } +} + +func TestMavenVersion_Compare(t *testing.T) { tests := []struct { - name string - compare string - want int + v1 string + v2 string + want int }{ { - name: "1", - compare: "2", - want: -1, + v1: "1", + v2: "2", + want: -1, }, { - name: "1.8.0_282", - compare: "1.8.0_282", - want: 0, + v1: "1.8.0_282", + v2: "1.8.0_282", + want: 0, }, { - name: "2.5", - compare: "2.0", - want: 1, + v1: "2.5", + v2: "2.0", + want: 1, }, { - name: "2.414.2-cb-5", - compare: "2.414.2", - want: 1, + v1: "2.414.2-cb-5", + v2: "2.414.2", + want: 1, }, { - name: "5.2.25.RELEASE", // see https://mvnrepository.com/artifact/org.springframework/spring-web - compare: "5.2.25", - want: 0, + v1: "5.2.25.RELEASE", // see https://mvnrepository.com/artifact/org.springframework/spring-web + v2: "5.2.25", + want: 0, }, { - name: "5.2.25.release", - compare: "5.2.25", - want: 0, + v1: "5.2.25.release", + v2: "5.2.25", + want: 0, }, { - name: "5.2.25.FINAL", - compare: "5.2.25", - want: 0, + v1: "5.2.25.FINAL", + v2: "5.2.25", + want: 0, }, { - name: "5.2.25.final", - compare: "5.2.25", - want: 0, + v1: "5.2.25.final", + v2: "5.2.25", + want: 0, }, { - name: "5.2.25.GA", - compare: "5.2.25", - want: 0, + v1: "5.2.25.GA", + v2: "5.2.25", + want: 0, }, { - name: "5.2.25.ga", - compare: "5.2.25", - want: 0, + v1: "5.2.25.ga", + v2: "5.2.25", + want: 0, }, } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - j, err := NewVersion(tt.name, MavenFormat) - assert.NoError(t, err) + t.Run(tt.v1+" vs "+tt.v2, func(t *testing.T) { + v1 := NewVersion(tt.v1, MavenFormat) + v2 := NewVersion(tt.v2, MavenFormat) - j2, err := NewVersion(tt.compare, MavenFormat) - assert.NoError(t, err) - - if got, _ := j2.rich.mavenVer.Compare(j); got != tt.want { + if got, _ := v1.Compare(v2); got != tt.want { t.Errorf("Compare() = %v, want %v", got, tt.want) } }) } } -func TestMavenVersionCompare_Format(t *testing.T) { +func TestMavenVersion_Compare_Format(t *testing.T) { tests := []struct { name string thisVersion string @@ -103,22 +188,6 @@ func TestMavenVersionCompare_Format(t *testing.T) { otherFormat: MavenFormat, expectError: false, }, - { - name: "different format returns error", - thisVersion: "1.2.3", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.2.3", - otherVersion: "1.2.3-r4", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, { name: "unknown format attempts upgrade - valid maven format", thisVersion: "1.2.3", @@ -130,11 +199,8 @@ func TestMavenVersionCompare_Format(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, err := newMavenVersion(test.thisVersion) - require.NoError(t, err) - - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + thisVer := NewVersion(test.thisVersion, MavenFormat) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) @@ -152,51 +218,35 @@ func TestMavenVersionCompare_Format(t *testing.T) { } } -func TestMavenVersionCompareEdgeCases(t *testing.T) { +func TestMavenVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*mavenVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*mavenVersion, *Version) { - thisVer, _ := newMavenVersion("1.2.3") + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3", MavenFormat) return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty mavenVersion in other object", - setupFunc: func() (*mavenVersion, *Version) { - thisVer, _ := newMavenVersion("1.2.3") - - otherVer := &Version{ - Raw: "1.2.4", - Format: MavenFormat, - rich: rich{}, - } - - return thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty mavenVersion object", - }, { name: "incomparable maven versions", - setupFunc: func() (*mavenVersion, *Version) { + setupFunc: func(t testing.TB) (*Version, *Version) { // This test would be hard to construct in practice since the Maven // version library handles most comparisons, but we can simulate the // error condition by creating a mock that would trigger the last // error condition in the Compare function - thisVer, _ := newMavenVersion("1.2.3") + thisVer := NewVersion("1.2.3", MavenFormat) // We'd need to modify the otherVer manually to create a scenario // where none of the comparison methods return true, which is unlikely // in real usage but could be simulated for test coverage - otherVer, _ := NewVersion("1.2.4", MavenFormat) + otherVer := NewVersion("1.2.4", MavenFormat) return thisVer, otherVer }, @@ -207,12 +257,12 @@ func TestMavenVersionCompareEdgeCases(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) if test.expectError { - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/operator.go b/grype/version/operator.go index 17d4d498ead..9f3577d09c3 100644 --- a/grype/version/operator.go +++ b/grype/version/operator.go @@ -3,18 +3,16 @@ package version import "fmt" const ( - EQ operator = "=" - GT operator = ">" - LT operator = "<" - GTE operator = ">=" - LTE operator = "<=" - OR operator = "||" - AND operator = "," + EQ Operator = "=" + GT Operator = ">" + LT Operator = "<" + GTE Operator = ">=" + LTE Operator = "<=" ) -type operator string +type Operator string -func parseOperator(op string) (operator, error) { +func parseOperator(op string) (Operator, error) { switch op { case string(EQ), "": return EQ, nil @@ -26,10 +24,6 @@ func parseOperator(op string) (operator, error) { return LT, nil case string(LTE): return LTE, nil - case string(OR): - return OR, nil - case string(AND): - return AND, nil } return "", fmt.Errorf("unknown operator: '%s'", op) } diff --git a/grype/version/pep440_constraint.go b/grype/version/pep440_constraint.go deleted file mode 100644 index cc0c0b319ea..00000000000 --- a/grype/version/pep440_constraint.go +++ /dev/null @@ -1,62 +0,0 @@ -package version - -import "fmt" - -type pep440Constraint struct { - raw string - expression constraintExpression -} - -func (p pep440Constraint) String() string { - if p.raw == "" { - return "none (python)" - } - return fmt.Sprintf("%s (python)", p.raw) -} - -func (p pep440Constraint) Satisfied(version *Version) (bool, error) { - if p.raw == "" && version != nil { - // an empty constraint is always satisfied - return true, nil - } else if version == nil { - if p.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - return true, nil - } - if version.Format != PythonFormat { - return false, NewUnsupportedFormatError(PythonFormat, version.Format) - } - - if version.rich.pep440version == nil { - return false, fmt.Errorf("no rich PEP440 version given: %+v", version) - } - return p.expression.satisfied(version) -} - -var _ Constraint = (*pep440Constraint)(nil) - -func newPep440Constraint(raw string) (pep440Constraint, error) { - if raw == "" { - return pep440Constraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newPep440Comparator) - if err != nil { - return pep440Constraint{}, fmt.Errorf("unable to parse pep440 constrain phrase %w", err) - } - - return pep440Constraint{ - expression: constraints, - raw: raw, - }, nil -} - -func newPep440Comparator(unit constraintUnit) (Comparator, error) { - ver, err := newPep440Version(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint version (%s): %w", unit.version, err) - } - return ver, nil -} diff --git a/grype/version/pep440_constraint_test.go b/grype/version/pep440_constraint_test.go deleted file mode 100644 index 718145bebdf..00000000000 --- a/grype/version/pep440_constraint_test.go +++ /dev/null @@ -1,221 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestItWorks(t *testing.T) { - tests := []testCase{ - { - name: "empty constraint", - version: "2.3.1", - constraint: "", - satisfied: true, - }, - { - name: "version range within", - constraint: ">1.0, <2.0", - version: "1.2+beta-3", - satisfied: true, - }, - { - name: "version within compound range", - constraint: ">1.0, <2.0 || > 3.0", - version: "3.2+beta-3", - satisfied: true, - }, - { - name: "version within compound range (2)", - constraint: ">1.0, <2.0 || > 3.0", - version: "1.2+beta-3", - satisfied: true, - }, - { - name: "version not within compound range", - constraint: ">1.0, <2.0 || > 3.0", - version: "2.2+beta-3", - satisfied: false, - }, - { - name: "version range outside (right)", - constraint: ">1.0, <2.0", - version: "2.1-beta-3", - satisfied: false, - }, - { - name: "version range outside (left)", - constraint: ">1.0, <2.0", - version: "0.9-beta-2", - satisfied: false, - }, - { - name: "version range within (excluding left, prerelease)", - constraint: ">=1.0, <2.0", - version: "1.0-beta-3", - satisfied: false, - }, - { - name: "version range within (including left)", - constraint: ">=1.1, <2.0", - version: "1.1", - satisfied: true, - }, - { - name: "version range within (excluding right, 1)", - constraint: ">1.0, <=2.0", - version: "2.0-beta-3", - satisfied: true, - }, - { - name: "version range within (excluding right, 2)", - constraint: ">1.0, <2.0", - version: "2.0-beta-3", - satisfied: true, - }, - { - name: "version range within (including right)", - constraint: ">1.0, <=2.0", - version: "2.0", - satisfied: true, - }, - { - name: "version range within (including right, longer version [valid semver, bad fuzzy])", - constraint: ">1.0, <=2.0", - version: "2.0.0", - satisfied: true, - }, - { - name: "bad semver (eq)", - version: "5a2", - constraint: "=5a2", - satisfied: true, - }, - { - name: "bad semver (gt)", - version: "5a2", - constraint: ">5a1", - satisfied: true, - }, - { - name: "bad semver (lt)", - version: "5a2", - constraint: "<6a1", - satisfied: true, - }, - { - name: "bad semver (lte)", - version: "5a2", - constraint: "<=5a2", - satisfied: true, - }, - { - name: "bad semver (gte)", - version: "5a2", - constraint: ">=5a2", - satisfied: true, - }, - { - name: "bad semver (lt boundary)", - version: "5a2", - constraint: "<5a2", - satisfied: false, - }, - // regression for https://github.com/anchore/go-version/pull/2 - { - name: "indirect package match", - version: "1.3.2-r0", - constraint: "<= 1.3.3-r0", - satisfied: true, - }, - { - name: "indirect package no match", - version: "1.3.4-r0", - constraint: "<= 1.3.3-r0", - satisfied: false, - }, - { - name: "vulndb fuzzy constraint single quoted", - version: "4.5.2", - constraint: "'4.5.1' || '4.5.2'", - satisfied: true, - }, - { - name: "vulndb fuzzy constraint double quoted", - version: "4.5.2", - constraint: "\"4.5.1\" || \"4.5.2\"", - satisfied: true, - }, - { - name: "rc candidates with no '-' can match semver pattern", - version: "1.20rc1", - constraint: " = 1.20.0-rc1", - satisfied: true, - }, - { - name: "candidates ahead of alpha", - version: "3.11.0", - constraint: "> 3.11.0-alpha1", - satisfied: true, - }, - { - name: "candidates ahead of beta", - version: "3.11.0", - constraint: "> 3.11.0-beta1", - satisfied: true, - }, - { - name: "candidates ahead of same alpha versions", - version: "3.11.0-alpha5", - constraint: "> 3.11.0-alpha1", - satisfied: true, - }, - { - name: "candidates are placed correctly between alpha and release", - version: "3.11.0-beta5", - constraint: "3.11.0 || = 3.11.0-alpha1", - satisfied: false, - }, - { - name: "candidates with pre suffix are sorted numerically", - version: "1.0.2pre1", - constraint: " < 1.0.2pre2", - satisfied: true, - }, - { - name: "openssl pre2 is still considered less than release", - version: "1.1.1-pre2", - constraint: "> 1.1.1-pre1, < 1.1.1", - satisfied: true, - }, - { - name: "major version releases are less than their subsequent patch releases with letter suffixes", - version: "1.1.1", - constraint: "> 1.1.1-a", - satisfied: true, - }, - { - name: "date based pep440 version string boundary condition", - version: "2022.12.7", - constraint: ">=2017.11.05,<2022.12.07", - }, - { - name: "certifi false positive is fixed", - version: "2022.12.7", - constraint: ">=2017.11.05,<2022.12.07", - }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - c, err := newPep440Constraint(tc.constraint) - require.NoError(t, err) - v, err := NewVersion(tc.version, PythonFormat) - require.NoError(t, err) - sat, err := c.Satisfied(v) - require.NoError(t, err) - assert.Equal(t, tc.satisfied, sat) - }) - } -} diff --git a/grype/version/pep440_version.go b/grype/version/pep440_version.go index 2c9ec696d9d..8c472e1651e 100644 --- a/grype/version/pep440_version.go +++ b/grype/version/pep440_version.go @@ -1,8 +1,6 @@ package version import ( - "fmt" - goPepVersion "github.com/aquasecurity/go-pep440-version" ) @@ -12,25 +10,25 @@ type pep440Version struct { obj goPepVersion.Version } -func (p pep440Version) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, PythonFormat) - if err != nil { - return -1, err - } - - if other.rich.pep440version == nil { - return -1, fmt.Errorf("given empty pep440 object") - } - - return other.rich.pep440version.obj.Compare(p.obj), nil -} - func newPep440Version(raw string) (pep440Version, error) { parsed, err := goPepVersion.Parse(raw) if err != nil { - return pep440Version{}, fmt.Errorf("could not parse pep440 version: %w", err) + return pep440Version{}, invalidFormatError(SemanticFormat, raw, err) } return pep440Version{ obj: parsed, }, nil } + +func (v pep440Version) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided + } + + o, err := newPep440Version(other.Raw) + if err != nil { + return 0, err + } + + return v.obj.Compare(o.obj), nil +} diff --git a/grype/version/pep440_version_test.go b/grype/version/pep440_version_test.go index 3cb69f85d06..6685df7f8ca 100644 --- a/grype/version/pep440_version_test.go +++ b/grype/version/pep440_version_test.go @@ -8,7 +8,219 @@ import ( "github.com/stretchr/testify/require" ) -func TestPep440VersionCompare(t *testing.T) { +func TestPep440Version_Constraint(t *testing.T) { + tests := []testCase{ + { + name: "empty constraint", + version: "2.3.1", + constraint: "", + satisfied: true, + }, + { + name: "version range within", + constraint: ">1.0, <2.0", + version: "1.2+beta-3", + satisfied: true, + }, + { + name: "version within compound range", + constraint: ">1.0, <2.0 || > 3.0", + version: "3.2+beta-3", + satisfied: true, + }, + { + name: "version within compound range (2)", + constraint: ">1.0, <2.0 || > 3.0", + version: "1.2+beta-3", + satisfied: true, + }, + { + name: "version not within compound range", + constraint: ">1.0, <2.0 || > 3.0", + version: "2.2+beta-3", + satisfied: false, + }, + { + name: "version range outside (right)", + constraint: ">1.0, <2.0", + version: "2.1-beta-3", + satisfied: false, + }, + { + name: "version range outside (left)", + constraint: ">1.0, <2.0", + version: "0.9-beta-2", + satisfied: false, + }, + { + name: "version range within (excluding left, prerelease)", + constraint: ">=1.0, <2.0", + version: "1.0-beta-3", + satisfied: false, + }, + { + name: "version range within (including left)", + constraint: ">=1.1, <2.0", + version: "1.1", + satisfied: true, + }, + { + name: "version range within (excluding right, 1)", + constraint: ">1.0, <=2.0", + version: "2.0-beta-3", + satisfied: true, + }, + { + name: "version range within (excluding right, 2)", + constraint: ">1.0, <2.0", + version: "2.0-beta-3", + satisfied: true, + }, + { + name: "version range within (including right)", + constraint: ">1.0, <=2.0", + version: "2.0", + satisfied: true, + }, + { + name: "version range within (including right, longer version [valid semver, bad fuzzy])", + constraint: ">1.0, <=2.0", + version: "2.0.0", + satisfied: true, + }, + { + name: "bad semver (eq)", + version: "5a2", + constraint: "=5a2", + satisfied: true, + }, + { + name: "bad semver (gt)", + version: "5a2", + constraint: ">5a1", + satisfied: true, + }, + { + name: "bad semver (lt)", + version: "5a2", + constraint: "<6a1", + satisfied: true, + }, + { + name: "bad semver (lte)", + version: "5a2", + constraint: "<=5a2", + satisfied: true, + }, + { + name: "bad semver (gte)", + version: "5a2", + constraint: ">=5a2", + satisfied: true, + }, + { + name: "bad semver (lt boundary)", + version: "5a2", + constraint: "<5a2", + satisfied: false, + }, + // regression for https://github.com/anchore/go-version/pull/2 + { + name: "indirect package match", + version: "1.3.2-r0", + constraint: "<= 1.3.3-r0", + satisfied: true, + }, + { + name: "indirect package no match", + version: "1.3.4-r0", + constraint: "<= 1.3.3-r0", + satisfied: false, + }, + { + name: "vulndb fuzzy constraint single quoted", + version: "4.5.2", + constraint: "'4.5.1' || '4.5.2'", + satisfied: true, + }, + { + name: "vulndb fuzzy constraint double quoted", + version: "4.5.2", + constraint: "\"4.5.1\" || \"4.5.2\"", + satisfied: true, + }, + { + name: "rc candidates with no '-' can match semver pattern", + version: "1.20rc1", + constraint: " = 1.20.0-rc1", + satisfied: true, + }, + { + name: "candidates ahead of alpha", + version: "3.11.0", + constraint: "> 3.11.0-alpha1", + satisfied: true, + }, + { + name: "candidates ahead of beta", + version: "3.11.0", + constraint: "> 3.11.0-beta1", + satisfied: true, + }, + { + name: "candidates ahead of same alpha versions", + version: "3.11.0-alpha5", + constraint: "> 3.11.0-alpha1", + satisfied: true, + }, + { + name: "candidates are placed correctly between alpha and release", + version: "3.11.0-beta5", + constraint: "3.11.0 || = 3.11.0-alpha1", + satisfied: false, + }, + { + name: "candidates with pre suffix are sorted numerically", + version: "1.0.2pre1", + constraint: " < 1.0.2pre2", + satisfied: true, + }, + { + name: "openssl pre2 is still considered less than release", + version: "1.1.1-pre2", + constraint: "> 1.1.1-pre1, < 1.1.1", + satisfied: true, + }, + { + name: "major version releases are less than their subsequent patch releases with letter suffixes", + version: "1.1.1", + constraint: "> 1.1.1-a", + satisfied: true, + }, + { + name: "date based pep440 version string boundary condition", + version: "2022.12.7", + constraint: ">=2017.11.05,<2022.12.07", + }, + { + name: "certifi false positive is fixed", + version: "2022.12.7", + constraint: ">=2017.11.05,<2022.12.07", + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + c, err := GetConstraint(tc.constraint, PythonFormat) + require.NoError(t, err) + v := NewVersion(tc.version, PythonFormat) + sat, err := c.Satisfied(v) + require.NoError(t, err) + assert.Equal(t, tc.satisfied, sat) + }) + } +} + +func TestPep440Version_Compare(t *testing.T) { tests := []struct { name string thisVersion string @@ -31,22 +243,6 @@ func TestPep440VersionCompare(t *testing.T) { otherFormat: PythonFormat, expectError: false, }, - { - name: "different format returns error", - thisVersion: "1.2.3", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.2.3", - otherVersion: "1.2.3-r4", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, { name: "unknown format attempts upgrade - valid python format", thisVersion: "1.2.3", @@ -60,7 +256,7 @@ func TestPep440VersionCompare(t *testing.T) { otherVersion: "not/valid/python-format", otherFormat: UnknownFormat, expectError: true, - errorSubstring: "unsupported version format for comparison", + errorSubstring: "invalid", }, } @@ -69,8 +265,7 @@ func TestPep440VersionCompare(t *testing.T) { thisVer, err := newPep440Version(test.thisVersion) require.NoError(t, err) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) @@ -88,47 +283,31 @@ func TestPep440VersionCompare(t *testing.T) { } } -func TestPep440VersionCompareEdgeCases(t *testing.T) { +func TestPep440Version_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (pep440Version, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (pep440Version, *Version) { - thisVer, _ := newPep440Version("1.2.3") + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3", PythonFormat) return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty pep440version in other object", - setupFunc: func() (pep440Version, *Version) { - thisVer, _ := newPep440Version("1.2.3") - - otherVer := &Version{ - Raw: "1.2.4", - Format: PythonFormat, - rich: rich{}, - } - - return thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty pep440 object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/portage_constraint.go b/grype/version/portage_constraint.go deleted file mode 100644 index d04dc56b056..00000000000 --- a/grype/version/portage_constraint.go +++ /dev/null @@ -1,66 +0,0 @@ -package version - -import ( - "fmt" -) - -type portageConstraint struct { - raw string - expression constraintExpression -} - -func newPortageConstraint(raw string) (portageConstraint, error) { - if raw == "" { - // an empty constraint is always satisfied - return portageConstraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newPortageComparator) - if err != nil { - return portageConstraint{}, fmt.Errorf("unable to parse portage constraint phrase: %w", err) - } - - return portageConstraint{ - raw: raw, - expression: constraints, - }, nil -} - -func newPortageComparator(unit constraintUnit) (Comparator, error) { - ver := newPortageVersion(unit.version) - return &ver, nil -} - -func (c portageConstraint) supported(format Format) bool { - return format == PortageFormat -} - -func (c portageConstraint) Satisfied(version *Version) (bool, error) { - if c.raw == "" && version != nil { - // an empty constraint is always satisfied - return true, nil - } else if version == nil { - if c.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(PortageFormat, version.Format) - } - - if version.rich.portVer == nil { - return false, fmt.Errorf("no rich portage version given: %+v", version) - } - - return c.expression.satisfied(version) -} - -func (c portageConstraint) String() string { - if c.raw == "" { - return "none (portage)" - } - return fmt.Sprintf("%s (portage)", c.raw) -} diff --git a/grype/version/portage_version.go b/grype/version/portage_version.go index 067b4756586..c34fe776900 100644 --- a/grype/version/portage_version.go +++ b/grype/version/portage_version.go @@ -1,12 +1,21 @@ package version import ( - "fmt" "math/big" "regexp" "strings" ) +var _ Comparator = (*portageVersion)(nil) + +// for the original python implementation, see: +// https://github.com/gentoo/portage/blob/master/lib/portage/versions.py +var ( + versionRegexp = regexp.MustCompile(`(\d+)((\.\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\d*)*)(-r(\d+))?`) + suffixRegexp = regexp.MustCompile(`^(alpha|beta|rc|pre|p)(\d*)$`) + suffixValue = map[string]int{"pre": -2, "p": 0, "alpha": -4, "beta": -3, "rc": -1} +) + type portageVersion struct { version string } @@ -17,17 +26,12 @@ func newPortageVersion(raw string) portageVersion { } } -func (v *portageVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, PortageFormat) - if err != nil { - return -1, err +func (v portageVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - if other.rich.portVer == nil { - return -1, fmt.Errorf("given empty portageVersion object") - } - - return other.rich.portVer.compare(*v), nil + return v.compare(newPortageVersion(other.Raw)), nil } // Compare returns 0 if v == v2, -1 if v < v2, and +1 if v > v2. @@ -38,14 +42,6 @@ func (v portageVersion) compare(v2 portageVersion) int { return comparePortageVersions(v.version, v2.version) } -// For the original python implementation, see: -// https://github.com/gentoo/portage/blob/master/lib/portage/versions.py -var ( - versionRegexp = regexp.MustCompile(`(\d+)((\.\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\d*)*)(-r(\d+))?`) - suffixRegexp = regexp.MustCompile(`^(alpha|beta|rc|pre|p)(\d*)$`) - suffixValue = map[string]int{"pre": -2, "p": 0, "alpha": -4, "beta": -3, "rc": -1} -) - //nolint:funlen,gocognit func comparePortageVersions(a, b string) int { match1 := versionRegexp.FindStringSubmatch(a) diff --git a/grype/version/portage_version_test.go b/grype/version/portage_version_test.go index 1659d2bce41..9f2a1bb5a5f 100644 --- a/grype/version/portage_version_test.go +++ b/grype/version/portage_version_test.go @@ -8,7 +8,192 @@ import ( "github.com/stretchr/testify/require" ) -func TestVersionPortage(t *testing.T) { +func TestPortageVersion_Constraint(t *testing.T) { + tests := []testCase{ + // empty constraint is always satisfied + {version: "1.2.3", constraint: "", satisfied: true}, + {version: "1.2.3-r1", constraint: "", satisfied: true}, + {version: "1.2.3_alpha1", constraint: "", satisfied: true}, + + // simple equality + {version: "1.2.3", constraint: "= 1.2.3", satisfied: true}, + {version: "1.2.3-r1", constraint: "= 1.2.3-r1", satisfied: true}, + {version: "1.2.3", constraint: "= 1.2.4", satisfied: false}, + + // less than + {version: "1.2.3", constraint: "< 1.2.4", satisfied: true}, + {version: "1.2.3", constraint: "< 1.2.3", satisfied: false}, + {version: "1.2.3", constraint: "< 1.2.2", satisfied: false}, + {version: "1.2.3-r1", constraint: "< 1.2.3-r2", satisfied: true}, + {version: "1.2.3-r2", constraint: "< 1.2.3-r1", satisfied: false}, + + // less than or equal + {version: "1.2.3", constraint: "<= 1.2.3", satisfied: true}, + {version: "1.2.3", constraint: "<= 1.2.4", satisfied: true}, + {version: "1.2.3", constraint: "<= 1.2.2", satisfied: false}, + {version: "1.2.3-r1", constraint: "<= 1.2.3-r1", satisfied: true}, + + // greater than + {version: "1.2.4", constraint: "> 1.2.3", satisfied: true}, + {version: "1.2.3", constraint: "> 1.2.3", satisfied: false}, + {version: "1.2.2", constraint: "> 1.2.3", satisfied: false}, + {version: "1.2.3-r2", constraint: "> 1.2.3-r1", satisfied: true}, + {version: "1.2.3-r1", constraint: "> 1.2.3-r2", satisfied: false}, + + // greater than or equal + {version: "1.2.3", constraint: ">= 1.2.3", satisfied: true}, + {version: "1.2.4", constraint: ">= 1.2.3", satisfied: true}, + {version: "1.2.2", constraint: ">= 1.2.3", satisfied: false}, + {version: "1.2.3-r1", constraint: ">= 1.2.3-r1", satisfied: true}, + + // compound conditions with AND (comma) + {version: "1.5.0", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.5.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.2.3-r5", constraint: ">= 1.2.3-r1, <= 1.2.3-r10", satisfied: true}, + + // compound conditions with OR + {version: "0.5.0", constraint: "< 1.0.0 || > 2.0.0", satisfied: true}, + {version: "3.0.0", constraint: "< 1.0.0 || > 2.0.0", satisfied: true}, + {version: "1.5.0", constraint: "< 1.0.0 || > 2.0.0", satisfied: false}, + + // complex compound conditions + {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.3.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.8.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + + // portage-specific version features + // letter suffixes (a, b, c etc.) + {version: "1.2a", constraint: "< 1.2b", satisfied: true}, + {version: "1.2b", constraint: "< 1.2a", satisfied: false}, + {version: "12.2.5", constraint: "> 12.2b", satisfied: true}, + + // revision numbers (-r suffix) + {version: "1.0.0-r1", constraint: "> 1.0.0", satisfied: true}, + {version: "1.0.0", constraint: "> 1.0.0-r1", satisfied: false}, + {version: "1.2.3-r2", constraint: "> 1.2.3-r1", satisfied: true}, + {version: "1.2.3-r1", constraint: "< 1.2.3-r2", satisfied: true}, + + // version suffixes (alpha, beta, pre, rc, p) + {version: "1.0.0_alpha1", constraint: "< 1.0.0_beta1", satisfied: true}, + {version: "1.0.0_beta1", constraint: "< 1.0.0_rc1", satisfied: true}, + {version: "1.0.0_rc1", constraint: "< 1.0.0", satisfied: true}, + {version: "1.0.0", constraint: "< 1.0.0_p1", satisfied: true}, + {version: "1.0.0_pre1", constraint: "> 1.0.0_alpha1", satisfied: true}, + + // patch level suffixes + {version: "1_p1", constraint: "> 1_p0", satisfied: true}, + {version: "1_p0", constraint: "> 1", satisfied: true}, + + // decimal versions with leading zeros + {version: "1.01", constraint: "< 1.1", satisfied: true}, + {version: "1.1", constraint: "> 1.01", satisfied: true}, + + // version with missing patch components + {version: "12.2", constraint: "< 12.2.0", satisfied: true}, // 12.2 < 12.2.0 is true in portage 🤯 + {version: "12.2.0", constraint: "> 12.2", satisfied: true}, + + // edge cases - versions that should not match + {version: "1.2.3", constraint: "= 1.2.4", satisfied: false}, + {version: "1.2.3", constraint: "> 1.2.3", satisfied: false}, + {version: "1.2.3", constraint: "< 1.2.3", satisfied: false}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, PortageFormat) + assert.NoError(t, err) + + test.assertVersionConstraint(t, PortageFormat, constraint) + }) + } +} + +func TestPortageConstraint_Constraint_NilVersion(t *testing.T) { + tests := []struct { + name string + constraint string + expected bool + shouldError bool + }{ + { + name: "empty constraint with nil version", + constraint: "", + expected: true, + shouldError: false, + }, + { + name: "non-empty constraint with nil version", + constraint: "> 1.0.0", + expected: false, + shouldError: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + c, err := GetConstraint(test.constraint, PortageFormat) + assert.NoError(t, err) + + satisfied, err := c.Satisfied(nil) + if test.shouldError { + require.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expected, satisfied) + } + }) + } +} + +func TestPortageVersion_Constraint_UnsupportedFormat(t *testing.T) { + c, err := GetConstraint("> 1.0.0", PortageFormat) + assert.NoError(t, err) + + // test with a semantic version (wrong format) + version := NewVersion("1.2.3", SemanticFormat) + + satisfied, err := c.Satisfied(version) + require.Error(t, err) + assert.False(t, satisfied) + assert.Contains(t, err.Error(), "unsupported version comparison") +} + +func TestPortageConstraint_String(t *testing.T) { + tests := []struct { + name string + constraint string + expected string + }{ + { + name: "empty constraint", + constraint: "", + expected: "none (portage)", + }, + { + name: "simple constraint", + constraint: "> 1.0.0", + expected: "> 1.0.0 (portage)", + }, + { + name: "complex constraint", + constraint: "> 1.0.0, < 2.0.0", + expected: "> 1.0.0, < 2.0.0 (portage)", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, PortageFormat) + assert.NoError(t, err) + + result := constraint.String() + assert.Equal(t, test.expected, result) + }) + } +} + +func TestPortageVersion_Compare(t *testing.T) { tests := []struct { v1 string v2 string @@ -29,19 +214,17 @@ func TestVersionPortage(t *testing.T) { for _, test := range tests { name := test.v1 + "_vs_" + test.v2 t.Run(name, func(t *testing.T) { - v1 := newPortageVersion(test.v1) - v2 := newPortageVersion(test.v2) + v1 := NewVersion(test.v1, PortageFormat) + v2 := NewVersion(test.v2, PortageFormat) - actual := v1.compare(v2) - - if actual != test.result { - t.Errorf("bad result: %+v (expected: %+v)", actual, test.result) - } + actual, err := v1.Compare(v2) + require.NoError(t, err) + assert.Equal(t, test.result, actual, "expected comparison result to match") }) } } -func TestPortageVersionCompare_Format(t *testing.T) { +func TestPortageVersion_Compare_Format(t *testing.T) { tests := []struct { name string thisVersion string @@ -64,22 +247,6 @@ func TestPortageVersionCompare_Format(t *testing.T) { otherFormat: PortageFormat, expectError: false, }, - { - name: "different format returns error", - thisVersion: "1.2.3", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.2.3", - otherVersion: "1.2.3-r4", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, { name: "unknown format attempts upgrade - valid portage format", thisVersion: "1.2.3", @@ -91,10 +258,8 @@ func TestPortageVersionCompare_Format(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer := newPortageVersion(test.thisVersion) - - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + thisVer := NewVersion(test.thisVersion, PortageFormat) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) @@ -112,47 +277,31 @@ func TestPortageVersionCompare_Format(t *testing.T) { } } -func TestPortageVersionCompareEdgeCases(t *testing.T) { +func TestPortageVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*portageVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*portageVersion, *Version) { - thisVer := newPortageVersion("1.2.3") - return &thisVer, nil + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3", PortageFormat) + return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty portageVersion in other object", - setupFunc: func() (*portageVersion, *Version) { - thisVer := newPortageVersion("1.2.3") - - otherVer := &Version{ - Raw: "1.2.4", - Format: PortageFormat, - rich: rich{}, - } - - return &thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty portageVersion object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/range.go b/grype/version/range.go new file mode 100644 index 00000000000..6abb8cec47b --- /dev/null +++ b/grype/version/range.go @@ -0,0 +1,117 @@ +package version + +import ( + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/anchore/grype/internal/stringutil" +) + +// Operator group only matches on range operators (GT, LT, GTE, LTE, E) +// version group matches on everything except for whitespace and operators (range or boolean) +var constraintPartPattern = regexp.MustCompile(`\s*(?P[^><=a-zA-Z0-9().'"]*)(?P[><=]*)\s*(?P.+)`) + +type rangeUnit struct { + Operator Operator + Version string +} + +func parseRange(phrase string) (*rangeUnit, error) { + match := stringutil.MatchCaptureGroups(constraintPartPattern, phrase) + version, exists := match["version"] + if !exists { + return nil, nil + } + + opStr := match["operator"] + + prefix := match["prefix"] + + if prefix != "" && opStr == "" { + return nil, fmt.Errorf("constraint has an unprocessable prefix %q", prefix) + } + + version = strings.Trim(version, " ") + + if err := validateVersion(version); err != nil { + return nil, err + } + + // version may have quotes, attempt to unquote it (ignore errors) + unquoted, err := trimQuotes(version) + if err == nil { + version = unquoted + } + + op, err := parseOperator(opStr) + if err != nil { + return nil, fmt.Errorf("unable to parse constraint operator=%q: %+v", opStr, err) + } + return &rangeUnit{ + Operator: op, + Version: version, + }, nil +} + +// trimQuotes will attempt to remove double quotes. +// If removing double quotes is unsuccessful, it will attempt to remove single quotes. +// If neither operation is successful, it will return an error. +func trimQuotes(s string) (string, error) { + unquoted, err := strconv.Unquote(s) + switch { + case err == nil: + return unquoted, nil + case strings.HasPrefix(s, "'") && strings.HasSuffix(s, "'"): + return strings.Trim(s, "'"), nil + default: + return s, fmt.Errorf("string %s is not single or double quoted", s) + } +} + +func (c *rangeUnit) Satisfied(comparison int) bool { + switch c.Operator { + case EQ: + return comparison == 0 + case GT: + return comparison > 0 + case GTE: + return comparison >= 0 + case LT: + return comparison < 0 + case LTE: + return comparison <= 0 + default: + panic(fmt.Errorf("unknown operator: %s", c.Operator)) + } +} + +// validateVersion scans the version string and validates characters outside of quotes. +// invalid characters within quotes are allowed, but unbalanced quotes are not allowed. +func validateVersion(version string) error { + var inQuotes bool + var quoteChar rune + + for _, r := range version { + switch { + case !inQuotes && (r == '"' || r == '\''): + // start of quoted section + inQuotes = true + quoteChar = r + case inQuotes && r == quoteChar: + // end of quoted section + inQuotes = false + quoteChar = 0 + case !inQuotes && strings.ContainsRune("><=", r): + // invalid character outside of quotes + return fmt.Errorf("version %q potentially is a version constraint expression (should not contain '><=' outside of quotes)", version) + } + } + + if inQuotes { + return fmt.Errorf("version %q has unbalanced quotes", version) + } + + return nil +} diff --git a/grype/version/constraint_expression.go b/grype/version/range_expression.go similarity index 66% rename from grype/version/constraint_expression.go rename to grype/version/range_expression.go index 9870863b0f6..dbbe912a8d1 100644 --- a/grype/version/constraint_expression.go +++ b/grype/version/range_expression.go @@ -7,60 +7,52 @@ import ( "text/scanner" ) -type constraintExpression struct { - units [][]constraintUnit // only supports or'ing a group of and'ed groups - comparators [][]Comparator // only supports or'ing a group of and'ed groups +type simpleRangeExpression struct { + Units [][]rangeUnit // only supports or'ing a group of and'ed groups } -func newConstraintExpression(phrase string, genFn comparatorGenerator) (constraintExpression, error) { +func parseRangeExpression(phrase string) (simpleRangeExpression, error) { orParts, err := scanExpression(phrase) if err != nil { - return constraintExpression{}, fmt.Errorf("unable to create constraint expression from=%q : %w", phrase, err) + return simpleRangeExpression{}, fmt.Errorf("unable to create constraint expression from=%q : %w", phrase, err) } - orUnits := make([][]constraintUnit, len(orParts)) - orComparators := make([][]Comparator, len(orParts)) - + orUnits := make([][]rangeUnit, len(orParts)) + var fuzzyErr error for orIdx, andParts := range orParts { - andUnits := make([]constraintUnit, len(andParts)) - andComparators := make([]Comparator, len(andParts)) + andUnits := make([]rangeUnit, len(andParts)) for andIdx, part := range andParts { - unit, err := parseUnit(part) + unit, err := parseRange(part) if err != nil { - return constraintExpression{}, err + return simpleRangeExpression{}, err } if unit == nil { - return constraintExpression{}, fmt.Errorf("unable to parse unit: %q", part) + return simpleRangeExpression{}, fmt.Errorf("unable to parse unit: %q", part) } andUnits[andIdx] = *unit - - comparator, err := genFn(*unit) - if err != nil { - return constraintExpression{}, fmt.Errorf("failed to create comparator for '%s': %w", unit, err) - } - andComparators[andIdx] = comparator } orUnits[orIdx] = andUnits - orComparators[orIdx] = andComparators } - return constraintExpression{ - units: orUnits, - comparators: orComparators, - }, nil + return simpleRangeExpression{ + Units: orUnits, + }, fuzzyErr } -func (c *constraintExpression) satisfied(other *Version) (bool, error) { +func (c *simpleRangeExpression) satisfied(format Format, version *Version) (bool, error) { oneSatisfied := false - for i, andOperand := range c.comparators { + for i, andOperand := range c.Units { allSatisfied := true for j, andUnit := range andOperand { - result, err := andUnit.Compare(other) + result, err := version.Compare(&Version{ + Format: format, + Raw: andUnit.Version, + }) if err != nil { - return false, fmt.Errorf("uncomparable %#v vs %q: %w", andUnit, other.String(), err) + return false, fmt.Errorf("uncomparable %T vs %q: %w", andUnit, version.String(), err) } - unit := c.units[i][j] + unit := c.Units[i][j] if !unit.Satisfied(result) { allSatisfied = false diff --git a/grype/version/range_expression_test.go b/grype/version/range_expression_test.go new file mode 100644 index 00000000000..d5d84477a63 --- /dev/null +++ b/grype/version/range_expression_test.go @@ -0,0 +1,208 @@ +package version + +import ( + "testing" + + "github.com/go-test/deep" + "github.com/stretchr/testify/require" +) + +func TestScanExpression(t *testing.T) { + tests := []struct { + name string + phrase string + expected [][]string + wantErr require.ErrorAssertionFunc + }{ + { + name: "simple AND and OR expression", + phrase: "x,y||z", + expected: [][]string{ + { + "x", + "y", + }, + { + "z", + }, + }, + }, + { + name: "complex version constraints with operators", + phrase: "<1.0, >=2.0|| 3.0 || =4.0", + expected: [][]string{ + { + "<1.0", + ">=2.0", + }, + { + "3.0", + }, + { + "=4.0", + }, + }, + }, + { + name: "parenthetical expression not supported", + phrase: "(<1.0, >=2.0|| 3.0) || =4.0", + wantErr: require.Error, + }, + { + name: "whitespace handling", + phrase: ` > 1.0, <= 2.0,,, || = 3.0 `, + expected: [][]string{ + { + ">1.0", + "<=2.0", + }, + { + "=3.0", + }, + }, + }, + { + name: "quoted version with special characters", + phrase: ` > 1.0, <= " (2.0||),,, ", || = 3.0 `, + expected: [][]string{ + { + ">1.0", + `<=" (2.0||),,, "`, + }, + { + "=3.0", + }, + }, + }, + { + name: "empty string", + phrase: "", + expected: nil, + }, + { + name: "single version", + phrase: "1.0", + expected: [][]string{ + { + "1.0", + }, + }, + }, + { + name: "only AND operators", + phrase: ">=1.0, <2.0, !=1.5", + expected: [][]string{ + { + ">=1.0", + "<2.0", + "!=1.5", + }, + }, + }, + { + name: "only OR operators", + phrase: "1.0 || 2.0 || 3.0", + expected: [][]string{ + { + "1.0", + }, + { + "2.0", + }, + { + "3.0", + }, + }, + }, + { + name: "single pipe character should be treated as version", + phrase: "1.0|2.0", + expected: [][]string{ + { + "1.02.0", + }, + }, + }, + { + name: "multiple consecutive commas", + phrase: "1.0,,,2.0", + expected: [][]string{ + { + "1.0", + "2.0", + }, + }, + }, + { + name: "trailing comma", + phrase: "1.0,2.0,", + expected: [][]string{ + { + "1.0", + "2.0", + }, + }, + }, + { + name: "leading comma", + phrase: ",1.0,2.0", + expected: [][]string{ + { + "1.0", + "2.0", + }, + }, + }, + { + name: "complex version numbers", + phrase: "1.0.0-alpha+build.1,2.0.0-beta.2||3.0.0-rc.1", + expected: [][]string{ + { + "1.0.0-alpha+build.1", + "2.0.0-beta.2", + }, + { + "3.0.0-rc.1", + }, + }, + }, + { + name: "parentheses at start", + phrase: "(1.0", + wantErr: require.Error, + }, + { + name: "parentheses at end", + phrase: "1.0)", + wantErr: require.Error, + }, + { + name: "special characters in version", + phrase: "1.0.0+build.123-abc_def", + expected: [][]string{ + { + "1.0.0+build.123-abc_def", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.wantErr == nil { + tt.wantErr = require.NoError + } + + actual, err := scanExpression(tt.phrase) + tt.wantErr(t, err) + + if err != nil { + return + } + + for _, d := range deep.Equal(tt.expected, actual) { + t.Errorf("difference: %+v", d) + } + }) + } +} diff --git a/grype/version/constraint_unit_test.go b/grype/version/range_test.go similarity index 65% rename from grype/version/constraint_unit_test.go rename to grype/version/range_test.go index a06ff255f16..0caf9fd495d 100644 --- a/grype/version/constraint_unit_test.go +++ b/grype/version/range_test.go @@ -5,117 +5,124 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestSplitFuzzyPhrase(t *testing.T) { +func TestParseRangeUnit(t *testing.T) { tests := []struct { - phrase string - expected *constraintUnit - err bool + phrase string + expected *rangeUnit + wantError require.ErrorAssertionFunc }{ { phrase: "", }, { phrase: `="in<(b e t w e e n)>quotes<=||>=not!="`, - expected: &constraintUnit{ - rangeOperator: EQ, - version: "in<(b e t w e e n)>quotes<=||>=not!=", + expected: &rangeUnit{ + Operator: EQ, + Version: "in<(b e t w e e n)>quotes<=||>=not!=", }, }, { phrase: ` >= "in<(b e t w e e n)>quotes<=||>=not!=" `, - expected: &constraintUnit{ - rangeOperator: GTE, - version: "in<(b e t w e e n)>quotes<=||>=not!=", + expected: &rangeUnit{ + Operator: GTE, + Version: "in<(b e t w e e n)>quotes<=||>=not!=", }, }, { // to cover a version that has quotes within it, but not necessarily surrounding the entire version phrase: ` >= inbet"ween)>quotes" with trailing words `, - expected: &constraintUnit{ - rangeOperator: GTE, - version: `inbet"ween)>quotes" with trailing words`, + expected: &rangeUnit{ + Operator: GTE, + Version: `inbet"ween)>quotes" with trailing words`, }, }, + { + phrase: `="unbalandedquotes`, + wantError: require.Error, + }, { phrase: `="something"`, - expected: &constraintUnit{ - rangeOperator: EQ, - version: "something", + expected: &rangeUnit{ + Operator: EQ, + Version: "something", }, }, { phrase: "=something", - expected: &constraintUnit{ - rangeOperator: EQ, - version: "something", + expected: &rangeUnit{ + Operator: EQ, + Version: "something", }, }, { phrase: "= something", - expected: &constraintUnit{ - rangeOperator: EQ, - version: "something", + expected: &rangeUnit{ + Operator: EQ, + Version: "something", }, }, { phrase: "something", - expected: &constraintUnit{ + expected: &rangeUnit{ - rangeOperator: EQ, - version: "something", + Operator: EQ, + Version: "something", }, }, { phrase: "> something", - expected: &constraintUnit{ + expected: &rangeUnit{ - rangeOperator: GT, - version: "something", + Operator: GT, + Version: "something", }, }, { phrase: ">= 2.3", - expected: &constraintUnit{ + expected: &rangeUnit{ - rangeOperator: GTE, - version: "2.3", + Operator: GTE, + Version: "2.3", }, }, { phrase: "< 2.3", - expected: &constraintUnit{ + expected: &rangeUnit{ - rangeOperator: LT, - version: "2.3", + Operator: LT, + Version: "2.3", }, }, { phrase: "<=2.3", - expected: &constraintUnit{ + expected: &rangeUnit{ - rangeOperator: LTE, - version: "2.3", + Operator: LTE, + Version: "2.3", }, }, { phrase: " >= 1.0 ", - expected: &constraintUnit{ + expected: &rangeUnit{ - rangeOperator: GTE, - version: "1.0", + Operator: GTE, + Version: "1.0", }, }, } for _, test := range tests { t.Run(test.phrase, func(t *testing.T) { - actual, err := parseUnit(test.phrase) - if err != nil && test.err == false { - t.Fatalf("expected no error, got %+v", err) - } else if err == nil && test.err { - t.Fatalf("expected an error but did not get one") + if test.wantError == nil { + test.wantError = require.NoError + } + actual, err := parseRange(test.phrase) + test.wantError(t, err) + if err != nil { + return } if !reflect.DeepEqual(test.expected, actual) { @@ -176,7 +183,7 @@ func TestTrimQuotes(t *testing.T) { err: true, }, { - // This raises an error, but I do not believe that this is a scenario that we need to account for, so should be ok. + // this raises an error, but I do not believe that this is a scenario that we need to account for, so should be ok. name: "nested double/double quotes", input: "\"t\"es\"t\"", expected: "\"t\"es\"t\"", diff --git a/grype/version/rpm_constraint.go b/grype/version/rpm_constraint.go deleted file mode 100644 index 4973139ba20..00000000000 --- a/grype/version/rpm_constraint.go +++ /dev/null @@ -1,69 +0,0 @@ -package version - -import ( - "fmt" -) - -type rpmConstraint struct { - raw string - expression constraintExpression -} - -func newRpmConstraint(raw string) (rpmConstraint, error) { - if raw == "" { - // an empty constraint is always satisfied - return rpmConstraint{}, nil - } - - constraints, err := newConstraintExpression(raw, newRpmComparator) - if err != nil { - return rpmConstraint{}, fmt.Errorf("unable to parse rpm constraint phrase: %w", err) - } - - return rpmConstraint{ - raw: raw, - expression: constraints, - }, nil -} - -func newRpmComparator(unit constraintUnit) (Comparator, error) { - ver, err := newRpmVersion(unit.version) - if err != nil { - return nil, fmt.Errorf("unable to parse constraint version (%s): %w", unit.version, err) - } - return &ver, nil -} - -func (c rpmConstraint) supported(format Format) bool { - return format == RpmFormat -} - -func (c rpmConstraint) Satisfied(version *Version) (bool, error) { - if c.raw == "" && version != nil { - // an empty constraint is always satisfied - return true, nil - } else if version == nil { - if c.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(RpmFormat, version.Format) - } - - if version.rich.rpmVer == nil { - return false, fmt.Errorf("no rich rpm version given: %+v", version) - } - - return c.expression.satisfied(version) -} - -func (c rpmConstraint) String() string { - if c.raw == "" { - return "none (rpm)" - } - return fmt.Sprintf("%s (rpm)", c.raw) -} diff --git a/grype/version/rpm_constraint_test.go b/grype/version/rpm_constraint_test.go deleted file mode 100644 index 77139361d44..00000000000 --- a/grype/version/rpm_constraint_test.go +++ /dev/null @@ -1,80 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionRpmConstraint(t *testing.T) { - tests := []testCase{ - // empty values - {version: "2.3.1", constraint: "", satisfied: true}, - // trivial compound conditions - {version: "2.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, - {version: "2.0.0", constraint: "> 1.0.0, <= 2.0.0", satisfied: true}, - {version: "2.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.0.0", constraint: ">= 1.0.0, < 2.0.0", satisfied: true}, - {version: "1.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.9.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, - {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - // trivial scenarios - {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3.1", satisfied: false}, - {version: "2.3.1", constraint: "< 2.3.2", satisfied: true}, - {version: "2.3.1", constraint: "< 2.4", satisfied: true}, - {version: "2.3.1", constraint: "< 3", satisfied: true}, - {version: "2.3.1", constraint: "< 3.0", satisfied: true}, - {version: "2.3.1", constraint: "< 3.0.0", satisfied: true}, - // epoch - {version: "1:0", constraint: "< 0:1", satisfied: false}, - {version: "2:4.19.01-1.el7_5", constraint: "< 2:4.19.1-1.el7_5", satisfied: false}, - {version: "2:4.19.01-1.el7_5", constraint: "<= 2:4.19.1-1.el7_5", satisfied: true}, - {version: "0:4.19.1-1.el7_5", constraint: "< 2:4.19.1-1.el7_5", satisfied: true}, - {version: "11:4.19.0-1.el7_5", constraint: "< 12:4.19.0-1.el7", satisfied: true}, - {version: "13:4.19.0-1.el7_5", constraint: "< 12:4.19.0-1.el7", satisfied: false}, - // regression: https://github.com/anchore/grype/issues/316 - {version: "1.5.4-2.el7_9", constraint: "< 0:1.5.4-2.el7_9", satisfied: false}, - {version: "1.5.4-2.el7", constraint: "< 0:1.5.4-2.el7_9", satisfied: true}, - // Non-standard epoch handling. In comparisons with epoch on only one side, they are both ignored - {version: "1:0", constraint: "< 1", satisfied: true}, - {version: "0:0", constraint: "< 0", satisfied: false}, - {version: "0:0", constraint: "= 0", satisfied: true}, - {version: "0", constraint: "= 0:0", satisfied: true}, - {version: "1.0", constraint: "< 2:1.0", satisfied: false}, - {version: "1.0", constraint: "<= 2:1.0", satisfied: true}, - {version: "1:2", constraint: "< 1", satisfied: false}, - {version: "1:2", constraint: "> 1", satisfied: true}, - {version: "2:4.19.01-1.el7_5", constraint: "< 4.19.1-1.el7_5", satisfied: false}, - {version: "2:4.19.01-1.el7_5", constraint: "<= 4.19.1-1.el7_5", satisfied: true}, - {version: "4.19.01-1.el7_5", constraint: "< 2:4.19.1-1.el7_5", satisfied: false}, - {version: "4.19.0-1.el7_5", constraint: "< 12:4.19.0-1.el7", satisfied: false}, - {version: "4.19.0-1.el7_5", constraint: "<= 12:4.19.0-1.el7", satisfied: false}, - {version: "3:4.19.0-1.el7_5", constraint: "< 4.21.0-1.el7", satisfied: true}, - {version: "4:1.2.3-3-el7_5", constraint: "< 1.2.3-el7_5~snapshot1", satisfied: false}, - // regression https://github.com/anchore/grype/issues/398 - {version: "8.3.1-5.el8.4", constraint: "< 0:8.3.1-5.el8.5", satisfied: true}, - {version: "8.3.1-5.el8.40", constraint: "< 0:8.3.1-5.el8.5", satisfied: false}, - {version: "8.3.1-5.el8", constraint: "< 0:8.3.1-5.el8.0.0", satisfied: false}, - {version: "8.3.1-5.el8", constraint: "<= 0:8.3.1-5.el8.0.0", satisfied: true}, - {version: "8.3.1-5.el8.0.0", constraint: "> 0:8.3.1-5.el8", satisfied: false}, - {version: "8.3.1-5.el8.0.0", constraint: ">= 0:8.3.1-5.el8", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.tName(), func(t *testing.T) { - constraint, err := newRpmConstraint(test.constraint) - assert.NoError(t, err, "unexpected error from newRpmConstraint: %v", err) - - test.assertVersionConstraint(t, RpmFormat, constraint) - }) - } -} diff --git a/grype/version/rpm_version.go b/grype/version/rpm_version.go index ecbedfe5638..6992b950f10 100644 --- a/grype/version/rpm_version.go +++ b/grype/version/rpm_version.go @@ -9,6 +9,8 @@ import ( "unicode" ) +var _ Comparator = (*rpmVersion)(nil) + type rpmVersion struct { epoch *int version string @@ -37,43 +39,17 @@ func newRpmVersion(raw string) (rpmVersion, error) { }, nil } -func splitEpochFromVersion(rawVersion string) (*int, string, error) { - fields := strings.SplitN(rawVersion, ":", 2) - - // When the epoch is not included, should be considered to be 0 during - // comparisons (see https://github.com/rpm-software-management/rpm/issues/450). - // But, often the inclusion of the epoch in vuln databases or source RPM - // filenames is not consistent so, represent a missing epoch as nil. This allows - // the comparison logic itself to determine if it should use a zero or another - // value which supports more flexible comparison options because the version - // creation is not lossy - - if len(fields) == 1 { - return nil, rawVersion, nil +func (v rpmVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - // there is an epoch - epochStr := strings.TrimLeft(fields[0], " ") - - epoch, err := strconv.Atoi(epochStr) + o, err := newRpmVersion(other.Raw) if err != nil { - return nil, "", fmt.Errorf("unable to parse epoch (%s): %w", epochStr, err) - } - - return &epoch, fields[1], nil -} - -func (v *rpmVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, RpmFormat) - if err != nil { - return -1, err - } - - if other.rich.rpmVer == nil { - return -1, fmt.Errorf("given empty rpmVersion object") + return 0, err } - return other.rich.rpmVer.compare(*v), nil + return v.compare(o), nil } // Compare returns 0 if v == v2, -1 if v < v2, and +1 if v > v2. @@ -135,6 +111,32 @@ func (v rpmVersion) String() string { return version } +func splitEpochFromVersion(rawVersion string) (*int, string, error) { + fields := strings.SplitN(rawVersion, ":", 2) + + // When the epoch is not included, should be considered to be 0 during + // comparisons (see https://github.com/rpm-software-management/rpm/issues/450). + // But, often the inclusion of the epoch in vuln databases or source RPM + // filenames is not consistent so, represent a missing epoch as nil. This allows + // the comparison logic itself to determine if it should use a zero or another + // value which supports more flexible comparison options because the version + // creation is not lossy + + if len(fields) == 1 { + return nil, rawVersion, nil + } + + // there is an epoch + epochStr := strings.TrimLeft(fields[0], " ") + + epoch, err := strconv.Atoi(epochStr) + if err != nil { + return nil, "", fmt.Errorf("unable to parse epoch (%s): %w", epochStr, err) + } + + return &epoch, fields[1], nil +} + // compareRpmVersions compares two version or release strings without the epoch. // Source: https://github.com/cavaliercoder/go-rpm/blob/master/version.go // diff --git a/grype/version/rpm_version_test.go b/grype/version/rpm_version_test.go index 32eef13ab07..df4892d2c8e 100644 --- a/grype/version/rpm_version_test.go +++ b/grype/version/rpm_version_test.go @@ -8,7 +8,80 @@ import ( "github.com/stretchr/testify/require" ) -func TestVersionRpm(t *testing.T) { +func TestRpmVersion_Constraint(t *testing.T) { + tests := []testCase{ + // empty values + {version: "2.3.1", constraint: "", satisfied: true}, + // trivial compound conditions + {version: "2.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.3.1", constraint: "> 1.0.0, < 2.0.0", satisfied: true}, + {version: "2.0.0", constraint: "> 1.0.0, <= 2.0.0", satisfied: true}, + {version: "2.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.0.0", constraint: ">= 1.0.0, < 2.0.0", satisfied: true}, + {version: "1.0.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.9.0", constraint: "> 1.0.0, < 2.0.0", satisfied: false}, + {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + // trivial scenarios + {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3.1", satisfied: false}, + {version: "2.3.1", constraint: "< 2.3.2", satisfied: true}, + {version: "2.3.1", constraint: "< 2.4", satisfied: true}, + {version: "2.3.1", constraint: "< 3", satisfied: true}, + {version: "2.3.1", constraint: "< 3.0", satisfied: true}, + {version: "2.3.1", constraint: "< 3.0.0", satisfied: true}, + // epoch + {version: "1:0", constraint: "< 0:1", satisfied: false}, + {version: "2:4.19.01-1.el7_5", constraint: "< 2:4.19.1-1.el7_5", satisfied: false}, + {version: "2:4.19.01-1.el7_5", constraint: "<= 2:4.19.1-1.el7_5", satisfied: true}, + {version: "0:4.19.1-1.el7_5", constraint: "< 2:4.19.1-1.el7_5", satisfied: true}, + {version: "11:4.19.0-1.el7_5", constraint: "< 12:4.19.0-1.el7", satisfied: true}, + {version: "13:4.19.0-1.el7_5", constraint: "< 12:4.19.0-1.el7", satisfied: false}, + // regression: https://github.com/anchore/grype/issues/316 + {version: "1.5.4-2.el7_9", constraint: "< 0:1.5.4-2.el7_9", satisfied: false}, + {version: "1.5.4-2.el7", constraint: "< 0:1.5.4-2.el7_9", satisfied: true}, + // Non-standard epoch handling. In comparisons with epoch on only one side, they are both ignored + {version: "1:0", constraint: "< 1", satisfied: true}, + {version: "0:0", constraint: "< 0", satisfied: false}, + {version: "0:0", constraint: "= 0", satisfied: true}, + {version: "0", constraint: "= 0:0", satisfied: true}, + {version: "1.0", constraint: "< 2:1.0", satisfied: false}, + {version: "1.0", constraint: "<= 2:1.0", satisfied: true}, + {version: "1:2", constraint: "< 1", satisfied: false}, + {version: "1:2", constraint: "> 1", satisfied: true}, + {version: "2:4.19.01-1.el7_5", constraint: "< 4.19.1-1.el7_5", satisfied: false}, + {version: "2:4.19.01-1.el7_5", constraint: "<= 4.19.1-1.el7_5", satisfied: true}, + {version: "4.19.01-1.el7_5", constraint: "< 2:4.19.1-1.el7_5", satisfied: false}, + {version: "4.19.0-1.el7_5", constraint: "< 12:4.19.0-1.el7", satisfied: false}, + {version: "4.19.0-1.el7_5", constraint: "<= 12:4.19.0-1.el7", satisfied: false}, + {version: "3:4.19.0-1.el7_5", constraint: "< 4.21.0-1.el7", satisfied: true}, + {version: "4:1.2.3-3-el7_5", constraint: "< 1.2.3-el7_5~snapshot1", satisfied: false}, + // regression https://github.com/anchore/grype/issues/398 + {version: "8.3.1-5.el8.4", constraint: "< 0:8.3.1-5.el8.5", satisfied: true}, + {version: "8.3.1-5.el8.40", constraint: "< 0:8.3.1-5.el8.5", satisfied: false}, + {version: "8.3.1-5.el8", constraint: "< 0:8.3.1-5.el8.0.0", satisfied: false}, + {version: "8.3.1-5.el8", constraint: "<= 0:8.3.1-5.el8.0.0", satisfied: true}, + {version: "8.3.1-5.el8.0.0", constraint: "> 0:8.3.1-5.el8", satisfied: false}, + {version: "8.3.1-5.el8.0.0", constraint: ">= 0:8.3.1-5.el8", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, RpmFormat) + assert.NoError(t, err, "unexpected error from newRpmConstraint: %v", err) + + test.assertVersionConstraint(t, RpmFormat, constraint) + }) + } +} + +func TestRpmVersion_Compare(t *testing.T) { tests := []struct { v1 string v2 string @@ -29,7 +102,8 @@ func TestVersionRpm(t *testing.T) { {"1:2", "1", 1}, {"0:4.19.1-1.el7_5", "2:4.19.1-1.el7_5", -1}, {"4:1.2.3-3-el7_5", "1.2.3-el7_5~snapshot1", 1}, - //Non-standard comparisons that ignore epochs due to only one being available + + // non-standard comparisons that ignore epochs due to only one being available {"1:0", "1", -1}, {"2:4.19.01-1.el7_5", "4.19.1-1.el7_5", 0}, {"4.19.01-1.el7_5", "2:4.19.1-1.el7_5", 0}, @@ -40,26 +114,17 @@ func TestVersionRpm(t *testing.T) { for _, test := range tests { name := test.v1 + "_vs_" + test.v2 t.Run(name, func(t *testing.T) { - v1, err := newRpmVersion(test.v1) - if err != nil { - t.Fatalf("failed to create v1: %+v", err) - } - - v2, err := newRpmVersion(test.v2) - if err != nil { - t.Fatalf("failed to create v2: %+v", err) - } - - actual := v1.compare(v2) + v1 := NewVersion(test.v1, RpmFormat) + v2 := NewVersion(test.v2, RpmFormat) - if actual != test.result { - t.Errorf("bad result: %+v (expected: %+v)", actual, test.result) - } + actual, err := v1.Compare(v2) + require.NoError(t, err, "unexpected error comparing versions: %s vs %s", test.v1, test.v2) + assert.Equal(t, test.result, actual, "expected comparison result to match for %s vs %s", test.v1, test.v2) }) } } -func TestRpmVersionCompare_Format(t *testing.T) { +func TestRpmVersion_Compare_Format(t *testing.T) { tests := []struct { name string thisVersion string @@ -82,22 +147,6 @@ func TestRpmVersionCompare_Format(t *testing.T) { otherFormat: RpmFormat, expectError: false, }, - { - name: "different format returns error", - thisVersion: "1.2.3-1", - otherVersion: "1.2.3", - otherFormat: SemanticFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.2.3-1", - otherVersion: "1.2.3-r4", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, { name: "unknown format attempts upgrade - valid rpm format", thisVersion: "1.2.3-1", @@ -109,11 +158,9 @@ func TestRpmVersionCompare_Format(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, err := newRpmVersion(test.thisVersion) - require.NoError(t, err) + thisVer := NewVersion(test.thisVersion, RpmFormat) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) @@ -131,47 +178,31 @@ func TestRpmVersionCompare_Format(t *testing.T) { } } -func TestRpmVersionCompareEdgeCases(t *testing.T) { +func TestRpmVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*rpmVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*rpmVersion, *Version) { - thisVer, _ := newRpmVersion("1.2.3-1") - return &thisVer, nil + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3-1", RpmFormat) + return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty rpmVersion in other object", - setupFunc: func() (*rpmVersion, *Version) { - thisVer, _ := newRpmVersion("1.2.3-1") - - otherVer := &Version{ - Raw: "1.2.3-2", - Format: RpmFormat, - rich: rich{}, - } - - return &thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty rpmVersion object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/semantic_constraint.go b/grype/version/semantic_constraint.go deleted file mode 100644 index 69192dff630..00000000000 --- a/grype/version/semantic_constraint.go +++ /dev/null @@ -1,74 +0,0 @@ -package version - -import ( - "fmt" - "strings" - - hashiVer "github.com/anchore/go-version" -) - -// ruby packages such as activerecord and sprockets don't strictly follow semver -// note: this may result in missed matches for versioned betas -var normalizer = strings.NewReplacer(".alpha", "-alpha", ".beta", "-beta", ".rc", "-rc") - -type semanticConstraint struct { - raw string - constraint hashiVer.Constraints -} - -func newSemanticConstraint(constStr string) (semanticConstraint, error) { - if constStr == "" { - // an empty constraint is always satisfied - return semanticConstraint{}, nil - } - - normalized := normalizer.Replace(constStr) - - constraints, err := hashiVer.NewConstraint(normalized) - if err != nil { - return semanticConstraint{}, err - } - return semanticConstraint{ - raw: normalized, - constraint: constraints, - }, nil -} - -func (c semanticConstraint) supported(format Format) bool { - // gemfiles are a case of semantic version combined with non-semver - // and that doesn't work well. Gemfile_version.go extracts the semVer - // portion and makes a semVer object that is compatible with - // these constraints. In practice two formats (semVer, gem version) follow semVer, - // but one of them needs extra cleanup to function (gem). - return format == SemanticFormat || format == GemFormat -} - -func (c semanticConstraint) Satisfied(version *Version) (bool, error) { - if c.raw == "" && version != nil { - // an empty constraint is always satisfied - return true, nil - } else if version == nil { - if c.raw != "" { - // a non-empty constraint with no version given should always fail - return false, nil - } - return true, nil - } - - if !c.supported(version.Format) { - return false, NewUnsupportedFormatError(SemanticFormat, version.Format) - } - - if version.rich.semVer == nil { - return false, fmt.Errorf("no rich semantic version given: %+v", version) - } - - return c.constraint.Check(version.rich.semVer.verObj), nil -} - -func (c semanticConstraint) String() string { - if c.raw == "" { - return "none (semver)" - } - return fmt.Sprintf("%s (semver)", c.raw) -} diff --git a/grype/version/semantic_constraint_test.go b/grype/version/semantic_constraint_test.go deleted file mode 100644 index 43dae2f2c42..00000000000 --- a/grype/version/semantic_constraint_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package version - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionSemantic(t *testing.T) { - tests := []testCase{ - // empty values - {version: "2.3.1", constraint: "", satisfied: true}, - // typical cases - {version: "0.9.9-r0", constraint: "< 0.9.12-r1", satisfied: true}, // regression case - {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, - {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, - {version: "2.3.1", constraint: "2.3.1", satisfied: true}, - {version: "2.3.1", constraint: "= 2.3.1", satisfied: true}, - {version: "2.3.1", constraint: " = 2.3.1", satisfied: true}, - {version: "2.3.1", constraint: ">= 2.3.1", satisfied: true}, - {version: "2.3.1", constraint: "> 2.0.0", satisfied: true}, - {version: "2.3.1", constraint: "> 2.0", satisfied: true}, - {version: "2.3.1", constraint: "> 2", satisfied: true}, - {version: "2.3.1", constraint: "> 2, < 3", satisfied: true}, - {version: "2.3.1", constraint: "> 2.3, < 3.1", satisfied: true}, - {version: "2.3.1", constraint: "> 2.3.0, < 3.1", satisfied: true}, - {version: "2.3.1", constraint: ">= 2.3.1, < 3.1", satisfied: true}, - {version: "2.3.1", constraint: " = 2.3.2", satisfied: false}, - {version: "2.3.1", constraint: ">= 2.3.2", satisfied: false}, - {version: "2.3.1", constraint: "> 2.3.1", satisfied: false}, - {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2.0", satisfied: false}, - {version: "2.3.1", constraint: "< 2", satisfied: false}, - {version: "2.3.1", constraint: "< 2, > 3", satisfied: false}, - {version: "2.3.1+meta", constraint: "2.3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: "= 2.3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: " = 2.3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: ">= 2.3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: "> 2.0.0", satisfied: true}, - {version: "2.3.1+meta", constraint: "> 2.0", satisfied: true}, - {version: "2.3.1+meta", constraint: "> 2", satisfied: true}, - {version: "2.3.1+meta", constraint: "> 2, < 3", satisfied: true}, - {version: "2.3.1+meta", constraint: "> 2.3, < 3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: "> 2.3.0, < 3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: ">= 2.3.1, < 3.1", satisfied: true}, - {version: "2.3.1+meta", constraint: " = 2.3.2", satisfied: false}, - {version: "2.3.1+meta", constraint: ">= 2.3.2", satisfied: false}, - {version: "2.3.1+meta", constraint: "> 2.3.1", satisfied: false}, - {version: "2.3.1+meta", constraint: "< 2.0.0", satisfied: false}, - {version: "2.3.1+meta", constraint: "< 2.0", satisfied: false}, - {version: "2.3.1+meta", constraint: "< 2", satisfied: false}, - {version: "2.3.1+meta", constraint: "< 2, > 3", satisfied: false}, - // from https://github.com/hashicorp/go-version/issues/61 - // and https://semver.org/#spec-item-11 - // A larger set of pre-release fields has a higher precedence than a smaller set, if all of the preceding identifiers are equal. - {version: "1.0.0-alpha", constraint: "> 1.0.0-alpha.1", satisfied: false}, - {version: "1.0.0-alpha", constraint: "< 1.0.0-alpha.1", satisfied: true}, - {version: "1.0.0-alpha.1", constraint: "> 1.0.0-alpha.beta", satisfied: false}, - {version: "1.0.0-alpha.1", constraint: "< 1.0.0-alpha.beta", satisfied: true}, - {version: "1.0.0-alpha.beta", constraint: "> 1.0.0-beta", satisfied: false}, - {version: "1.0.0-alpha.beta", constraint: "< 1.0.0-beta", satisfied: true}, - {version: "1.0.0-beta", constraint: "> 1.0.0-beta.2", satisfied: false}, - {version: "1.0.0-beta", constraint: "< 1.0.0-beta.2", satisfied: true}, - {version: "1.0.0-beta.2", constraint: "> 1.0.0-beta.11", satisfied: false}, - {version: "1.0.0-beta.2", constraint: "< 1.0.0-beta.11", satisfied: true}, - {version: "1.0.0-beta.11", constraint: "> 1.0.0-rc.1", satisfied: false}, - {version: "1.0.0-beta.11", constraint: "< 1.0.0-rc.1", satisfied: true}, - {version: "1.0.0-rc.1", constraint: "> 1.0.0", satisfied: false}, - {version: "1.0.0-rc.1", constraint: "< 1.0.0", satisfied: true}, - {version: "1.20rc1", constraint: " = 1.20.0-rc1", satisfied: true}, - {version: "1.21rc2", constraint: " = 1.21.1", satisfied: false}, - {version: "1.21rc2", constraint: " = 1.21", satisfied: false}, - {version: "1.21rc2", constraint: " = 1.21-rc2", satisfied: true}, - {version: "1.21rc2", constraint: " = 1.21.0-rc2", satisfied: true}, - {version: "1.21rc2", constraint: " = 1.21.0rc2", satisfied: true}, - {version: "1.0.0-alpha.1", constraint: "> 1.0.0-alpha.1", satisfied: false}, - {version: "1.0.0-alpha.2", constraint: "> 1.0.0-alpha.1", satisfied: true}, - {version: "1.2.0-beta", constraint: ">1.0, <2.0", satisfied: true}, - {version: "1.2.0-beta", constraint: ">1.0", satisfied: true}, - {version: "1.2.0-beta", constraint: "<2.0", satisfied: true}, - {version: "1.2.0", constraint: ">1.0, <2.0", satisfied: true}, - } - - for _, test := range tests { - t.Run(test.tName(), func(t *testing.T) { - constraint, err := newSemanticConstraint(test.constraint) - assert.NoError(t, err, "unexpected error from newSemanticConstraint: %v", err) - - test.assertVersionConstraint(t, SemanticFormat, constraint) - }) - } -} diff --git a/grype/version/semantic_version.go b/grype/version/semantic_version.go index d9d2383afe3..b9c0439e46c 100644 --- a/grype/version/semantic_version.go +++ b/grype/version/semantic_version.go @@ -1,34 +1,56 @@ package version import ( - "fmt" + "regexp" + "strings" hashiVer "github.com/anchore/go-version" ) +var _ Comparator = (*semanticVersion)(nil) + +// semverPrereleaseNormalizer are meant to replace common pre-release suffixes with standard semver pre-release suffixes. +// this is primarily intended for to cover ruby packages such as activerecord and sprockets, which don't strictly +// follow semver, however, this can generally be applied to other cases using semver as well. +// note: this may result in missed matches for versioned betas +var semverPrereleaseNormalizer = strings.NewReplacer(".alpha", "-alpha", ".beta", "-beta", ".rc", "-rc") + type semanticVersion struct { - verObj *hashiVer.Version + obj *hashiVer.Version } -func newSemanticVersion(raw string) (*semanticVersion, error) { - verObj, err := hashiVer.NewVersion(normalizer.Replace(raw)) +var versionStartsWithV = regexp.MustCompile(`^v\d+`) + +func newSemanticVersion(raw string, strict bool) (semanticVersion, error) { + clean := semverPrereleaseNormalizer.Replace(raw) + + var verObj *hashiVer.Version + var err error + if strict { + // we still want v-prefix processing + if versionStartsWithV.MatchString(clean) { + clean = strings.TrimPrefix(clean, "v") + } + verObj, err = hashiVer.NewSemver(clean) + } else { + verObj, err = hashiVer.NewVersion(clean) + } if err != nil { - return nil, fmt.Errorf("unable to create semver obj: %w", err) + return semanticVersion{}, invalidFormatError(SemanticFormat, raw, err) } - return &semanticVersion{ - verObj: verObj, + return semanticVersion{ + obj: verObj, }, nil } -func (v *semanticVersion) Compare(other *Version) (int, error) { - other, err := finalizeComparisonVersion(other, SemanticFormat) - if err != nil { - return -1, err +func (v semanticVersion) Compare(other *Version) (int, error) { + if other == nil { + return -1, ErrNoVersionProvided } - if other.rich.semVer == nil { - return -1, fmt.Errorf("given empty semanticVersion object") + o, err := newSemanticVersion(other.Raw, false) + if err != nil { + return 0, err } - - return other.rich.semVer.verObj.Compare(v.verObj), nil + return v.obj.Compare(o.obj), nil } diff --git a/grype/version/semantic_version_test.go b/grype/version/semantic_version_test.go index 81d518dcf6f..ca4deef0d4e 100644 --- a/grype/version/semantic_version_test.go +++ b/grype/version/semantic_version_test.go @@ -8,7 +8,210 @@ import ( "github.com/stretchr/testify/require" ) -func TestSemanticVersionCompare_Format(t *testing.T) { +func TestSemanticVersion_Constraint(t *testing.T) { + tests := []testCase{ + // empty values + {version: "2.3.1", constraint: "", satisfied: true}, + // typical cases + {version: "0.9.9-r0", constraint: "< 0.9.12-r1", satisfied: true}, // regression case + {version: "1.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.2.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: true}, + {version: "0.0.1", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "0.6.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.5.0", constraint: "> 0.1.0, < 0.5.0 || > 1.0.0, < 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "2.3.1", satisfied: true}, + {version: "2.3.1", constraint: "= 2.3.1", satisfied: true}, + {version: "2.3.1", constraint: " = 2.3.1", satisfied: true}, + {version: "2.3.1", constraint: ">= 2.3.1", satisfied: true}, + {version: "2.3.1", constraint: "> 2.0.0", satisfied: true}, + {version: "2.3.1", constraint: "> 2.0", satisfied: true}, + {version: "2.3.1", constraint: "> 2", satisfied: true}, + {version: "2.3.1", constraint: "> 2, < 3", satisfied: true}, + {version: "2.3.1", constraint: "> 2.3, < 3.1", satisfied: true}, + {version: "2.3.1", constraint: "> 2.3.0, < 3.1", satisfied: true}, + {version: "2.3.1", constraint: ">= 2.3.1, < 3.1", satisfied: true}, + {version: "2.3.1", constraint: " = 2.3.2", satisfied: false}, + {version: "2.3.1", constraint: ">= 2.3.2", satisfied: false}, + {version: "2.3.1", constraint: "> 2.3.1", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1", constraint: "< 2", satisfied: false}, + {version: "2.3.1", constraint: "< 2, > 3", satisfied: false}, + {version: "2.3.1+meta", constraint: "2.3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: "= 2.3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: " = 2.3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: ">= 2.3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: "> 2.0.0", satisfied: true}, + {version: "2.3.1+meta", constraint: "> 2.0", satisfied: true}, + {version: "2.3.1+meta", constraint: "> 2", satisfied: true}, + {version: "2.3.1+meta", constraint: "> 2, < 3", satisfied: true}, + {version: "2.3.1+meta", constraint: "> 2.3, < 3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: "> 2.3.0, < 3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: ">= 2.3.1, < 3.1", satisfied: true}, + {version: "2.3.1+meta", constraint: " = 2.3.2", satisfied: false}, + {version: "2.3.1+meta", constraint: ">= 2.3.2", satisfied: false}, + {version: "2.3.1+meta", constraint: "> 2.3.1", satisfied: false}, + {version: "2.3.1+meta", constraint: "< 2.0.0", satisfied: false}, + {version: "2.3.1+meta", constraint: "< 2.0", satisfied: false}, + {version: "2.3.1+meta", constraint: "< 2", satisfied: false}, + {version: "2.3.1+meta", constraint: "< 2, > 3", satisfied: false}, + // from https://github.com/hashicorp/go-version/issues/61 + // and https://semver.org/#spec-item-11 + // A larger set of pre-release fields has a higher precedence than a smaller set, if all of the preceding identifiers are equal. + {version: "1.0.0-alpha", constraint: "> 1.0.0-alpha.1", satisfied: false}, + {version: "1.0.0-alpha", constraint: "< 1.0.0-alpha.1", satisfied: true}, + {version: "1.0.0-alpha.1", constraint: "> 1.0.0-alpha.beta", satisfied: false}, + {version: "1.0.0-alpha.1", constraint: "< 1.0.0-alpha.beta", satisfied: true}, + {version: "1.0.0-alpha.beta", constraint: "> 1.0.0-beta", satisfied: false}, + {version: "1.0.0-alpha.beta", constraint: "< 1.0.0-beta", satisfied: true}, + {version: "1.0.0-beta", constraint: "> 1.0.0-beta.2", satisfied: false}, + {version: "1.0.0-beta", constraint: "< 1.0.0-beta.2", satisfied: true}, + {version: "1.0.0-beta.2", constraint: "> 1.0.0-beta.11", satisfied: false}, + {version: "1.0.0-beta.2", constraint: "< 1.0.0-beta.11", satisfied: true}, + {version: "1.0.0-beta.11", constraint: "> 1.0.0-rc.1", satisfied: false}, + {version: "1.0.0-beta.11", constraint: "< 1.0.0-rc.1", satisfied: true}, + {version: "1.0.0-rc.1", constraint: "> 1.0.0", satisfied: false}, + {version: "1.0.0-rc.1", constraint: "< 1.0.0", satisfied: true}, + {version: "1.20rc1", constraint: " = 1.20.0-rc1", satisfied: true}, + {version: "1.21rc2", constraint: " = 1.21.1", satisfied: false}, + {version: "1.21rc2", constraint: " = 1.21", satisfied: false}, + {version: "1.21rc2", constraint: " = 1.21-rc2", satisfied: true}, + {version: "1.21rc2", constraint: " = 1.21.0-rc2", satisfied: true}, + {version: "1.21rc2", constraint: " = 1.21.0rc2", satisfied: true}, + {version: "1.0.0-alpha.1", constraint: "> 1.0.0-alpha.1", satisfied: false}, + {version: "1.0.0-alpha.2", constraint: "> 1.0.0-alpha.1", satisfied: true}, + {version: "1.2.0-beta", constraint: ">1.0, <2.0", satisfied: true}, + {version: "1.2.0-beta", constraint: ">1.0", satisfied: true}, + {version: "1.2.0-beta", constraint: "<2.0", satisfied: true}, + {version: "1.2.0", constraint: ">1.0, <2.0", satisfied: true}, + + // below are test cases for the ruby version normalizer that converts .alpha, .beta, .rc to -alpha, -beta, -rc + + // prerelease normalizer - alpha versions + {version: "1.0.0.alpha", constraint: "< 1.0.0", satisfied: true}, + {version: "1.0.0.alpha", constraint: "> 1.0.0-alpha", satisfied: false}, // should be equal after normalization + {version: "1.0.0.alpha", constraint: "= 1.0.0-alpha", satisfied: true}, + {version: "1.0.0.alpha1", constraint: "= 1.0.0-alpha1", satisfied: true}, + {version: "1.0.0.alpha.1", constraint: "= 1.0.0-alpha.1", satisfied: true}, + + // prerelease normalizer - beta versions + {version: "1.0.0.beta", constraint: "< 1.0.0", satisfied: true}, + {version: "1.0.0.beta", constraint: "> 1.0.0-alpha", satisfied: true}, + {version: "1.0.0.beta", constraint: "= 1.0.0-beta", satisfied: true}, + {version: "1.0.0.beta2", constraint: "= 1.0.0-beta2", satisfied: true}, + {version: "1.0.0.beta.2", constraint: "= 1.0.0-beta.2", satisfied: true}, + + // prerelease normalizer - rc versions + {version: "1.0.0.rc", constraint: "< 1.0.0", satisfied: true}, + {version: "1.0.0.rc", constraint: "> 1.0.0-beta", satisfied: true}, + {version: "1.0.0.rc", constraint: "= 1.0.0-rc", satisfied: true}, + {version: "1.0.0.rc1", constraint: "= 1.0.0-rc1", satisfied: true}, + {version: "1.0.0.rc.1", constraint: "= 1.0.0-rc.1", satisfied: true}, + + // prerelease normalizer - ordering tests to ensure normalization doesn't break semver precedence + {version: "1.0.0.alpha", constraint: "< 1.0.0-beta", satisfied: true}, + {version: "1.0.0.beta", constraint: "< 1.0.0-rc", satisfied: true}, + {version: "1.0.0.rc", constraint: "< 1.0.0", satisfied: true}, + {version: "1.0.0.alpha1", constraint: "< 1.0.0-alpha2", satisfied: true}, + + // prerelease normalizer - mixed ruby and standard semver styles in constraints + {version: "1.0.0.alpha", constraint: "< 1.0.0-beta", satisfied: true}, + {version: "1.0.0-alpha", constraint: "< 1.0.0-beta", satisfied: true}, + + // prerelease normalizer - complex constraints with ruby-style versions + {version: "1.0.0.alpha", constraint: "> 0.9.0, < 1.0.0", satisfied: true}, + {version: "1.0.0.beta", constraint: "> 1.0.0-alpha, < 1.0.0", satisfied: true}, + {version: "2.1.0.rc1", constraint: "> 2.0.0, < 2.1.0", satisfied: true}, + + // prerelease normalizer - edge cases + {version: "1.0.0.alpha.beta", constraint: "= 1.0.0-alpha-beta", satisfied: true}, // multiple replacements + {version: "1.0.0.rc.alpha", constraint: "= 1.0.0-rc-alpha", satisfied: true}, // mixed order + + // prerelease normalizer - ensure regular versions still work + {version: "1.0.0-alpha", constraint: "< 1.0.0", satisfied: true}, + {version: "1.0.0-beta", constraint: "> 1.0.0-alpha", satisfied: true}, + {version: "1.0.0-rc", constraint: "> 1.0.0-beta", satisfied: true}, + } + + for _, test := range tests { + t.Run(test.tName(), func(t *testing.T) { + constraint, err := GetConstraint(test.constraint, SemanticFormat) + assert.NoError(t, err) + + test.assertVersionConstraint(t, SemanticFormat, constraint) + }) + } +} + +func TestSemanticVersion_PrereleaseNormalizer_EdgeCases(t *testing.T) { + // test edge cases to ensure the normalizer can be safely retained + tests := []struct { + name string + version string + wantError require.ErrorAssertionFunc + }{ + { + name: "version with only alpha", + version: "alpha", + wantError: require.Error, // invalid semver + }, + { + name: "version with leading alpha", + version: "alpha.1.0.0", + wantError: require.Error, // invalid semver + }, + { + name: "empty version", + version: "", + wantError: require.Error, + }, + { + name: "version with multiple dots in prerelease", + version: "1.0.0.alpha.beta.rc", // should normalize to 1.0.0-alpha-beta-rc + }, + { + name: "version already in correct format", + version: "1.0.0-alpha", + }, + { + name: "version with build metadata", + version: "1.0.0.alpha+build", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if test.wantError == nil { + test.wantError = require.NoError + } + _, err := newSemanticVersion(test.version, false) + test.wantError(t, err, "expected error for version: %s", test.version) + }) + } +} + +func TestSemanticVersion_PrereleaseNormalizer_WithGemFormat(t *testing.T) { + // ensure that the prerelease normalizer in semantic format doesn't conflict with gem format + rubyStyleVersions := []string{ + "1.0.0.alpha", + "1.0.0.beta.1", + "1.0.0.rc2", + } + + for _, version := range rubyStyleVersions { + t.Run(version, func(t *testing.T) { + // both semantic and gem formats should be able to handle these versions + semanticVer := NewVersion(version, SemanticFormat) + gemVer := NewVersion(version, GemFormat) + + // they might have different comparison behavior, but both should be valid + assert.NotNil(t, semanticVer) + assert.NotNil(t, gemVer) + }) + } +} + +func TestSemanticVersion_Compare_Format(t *testing.T) { tests := []struct { name string thisVersion string @@ -38,30 +241,6 @@ func TestSemanticVersionCompare_Format(t *testing.T) { otherFormat: SemanticFormat, expectError: false, }, - { - name: "different format returns error", - thisVersion: "1.2.3", - otherVersion: "1.2.3-1", - otherFormat: DebFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - apk", - thisVersion: "1.2.3", - otherVersion: "1.2.3-r4", - otherFormat: ApkFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, - { - name: "different format returns error - rpm", - thisVersion: "1.2.3", - otherVersion: "1.2.3-1", - otherFormat: RpmFormat, - expectError: true, - errorSubstring: "unsupported version format for comparison", - }, { name: "unknown format attempts upgrade - valid semantic format", thisVersion: "1.2.3", @@ -75,17 +254,16 @@ func TestSemanticVersionCompare_Format(t *testing.T) { otherVersion: "not.valid.semver", otherFormat: UnknownFormat, expectError: true, - errorSubstring: "unsupported version format for comparison", + errorSubstring: "invalid", }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, err := newSemanticVersion(test.thisVersion) + thisVer, err := newSemanticVersion(test.thisVersion, true) require.NoError(t, err) - otherVer, err := NewVersion(test.otherVersion, test.otherFormat) - require.NoError(t, err) + otherVer := NewVersion(test.otherVersion, test.otherFormat) result, err := thisVer.Compare(otherVer) @@ -103,47 +281,31 @@ func TestSemanticVersionCompare_Format(t *testing.T) { } } -func TestSemanticVersionCompareEdgeCases(t *testing.T) { +func TestSemanticVersion_Compare_EdgeCases(t *testing.T) { tests := []struct { name string - setupFunc func() (*semanticVersion, *Version) + setupFunc func(testing.TB) (*Version, *Version) expectError bool errorSubstring string }{ { name: "nil version object", - setupFunc: func() (*semanticVersion, *Version) { - thisVer, _ := newSemanticVersion("1.2.3") + setupFunc: func(t testing.TB) (*Version, *Version) { + thisVer := NewVersion("1.2.3", SemanticFormat) return thisVer, nil }, expectError: true, errorSubstring: "no version provided for comparison", }, - { - name: "empty semanticVersion in other object", - setupFunc: func() (*semanticVersion, *Version) { - thisVer, _ := newSemanticVersion("1.2.3") - - otherVer := &Version{ - Raw: "1.2.4", - Format: SemanticFormat, - rich: rich{}, - } - - return thisVer, otherVer - }, - expectError: true, - errorSubstring: "given empty semanticVersion object", - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - thisVer, otherVer := test.setupFunc() + thisVer, otherVer := test.setupFunc(t) _, err := thisVer.Compare(otherVer) - assert.Error(t, err) + require.Error(t, err) if test.errorSubstring != "" { assert.True(t, strings.Contains(err.Error(), test.errorSubstring), "Expected error to contain '%s', got: %v", test.errorSubstring, err) diff --git a/grype/version/set.go b/grype/version/set.go new file mode 100644 index 00000000000..95f13543549 --- /dev/null +++ b/grype/version/set.go @@ -0,0 +1,116 @@ +package version + +import ( + "sort" +) + +type Set struct { + versions map[string]*Version + getKey func(v *Version) string +} + +func NewSet(ignoreFormat bool, vs ...*Version) *Set { + var getKey func(v *Version) string + if ignoreFormat { + getKey = func(v *Version) string { + if v == nil { + return "" + } + return v.Raw + } + } else { + getKey = func(v *Version) string { + if v == nil { + return "" + } + return v.Raw + ":" + v.Format.String() + } + } + s := &Set{ + versions: make(map[string]*Version), + getKey: getKey, + } + s.Add(vs...) + return s +} + +func (s *Set) Add(vs ...*Version) { + if s.versions == nil { + s.versions = make(map[string]*Version) + } + + for _, v := range vs { + if v == nil { + continue + } + key := s.getKey(v) + s.versions[key] = v + } +} + +func (s *Set) Remove(vs ...*Version) { + if s.versions == nil { + return + } + + for _, v := range vs { + if v == nil { + continue + } + key := s.getKey(v) + delete(s.versions, key) + } +} + +func (s *Set) Contains(v *Version) bool { + if v == nil || s.versions == nil { + return false + } + + key := s.getKey(v) + _, exists := s.versions[key] + return exists +} + +func (s *Set) Values() []*Version { + if len(s.versions) == 0 { + return nil + } + + out := make([]*Version, 0, len(s.versions)) + for _, v := range s.versions { + out = append(out, v) + } + + sort.Slice(out, func(i, j int) bool { + if out[i] == nil && out[j] == nil { + return false + } + if out[i] == nil { + return true + } + if out[j] == nil { + return false + } + cmp, err := out[i].Compare(out[j]) + if err != nil { + return false // if we can't compare, don't change the order + } + return cmp < 0 + }) + + return out +} + +func (s *Set) Size() int { + if s.versions == nil { + return 0 + } + return len(s.versions) +} + +func (s *Set) Clear() { + if s.versions != nil { + s.versions = make(map[string]*Version) + } +} diff --git a/grype/version/set_test.go b/grype/version/set_test.go new file mode 100644 index 00000000000..ac19de4c494 --- /dev/null +++ b/grype/version/set_test.go @@ -0,0 +1,539 @@ +package version + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewSet(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + versions []*Version + expectedSize int + }{ + { + name: "empty set", + ignoreFormat: false, + versions: nil, + expectedSize: 0, + }, + { + name: "set with versions", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("2.0.0", SemanticFormat), + }, + expectedSize: 2, + }, + { + name: "set with duplicate versions ignoring format", + ignoreFormat: true, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("1.0.0", ApkFormat), + }, + expectedSize: 1, + }, + { + name: "set with duplicate versions not ignoring format", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("1.0.0", ApkFormat), + }, + expectedSize: 2, + }, + { + name: "set with nil versions", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + nil, + NewVersion("2.0.0", SemanticFormat), + }, + expectedSize: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.versions...) + assert.Equal(t, tt.expectedSize, s.Size()) + }) + } +} + +func TestSet_Add(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + initialVersions []*Version + versionsToAdd []*Version + expectedSize int + expectedContains *Version + expectedNotContain *Version + }{ + { + name: "add to empty set", + ignoreFormat: false, + versionsToAdd: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedSize: 1, + expectedContains: NewVersion("1.0.0", SemanticFormat), + }, + { + name: "add nil version", + ignoreFormat: false, + versionsToAdd: []*Version{ + nil, + }, + expectedSize: 0, + }, + { + name: "add duplicate version", + ignoreFormat: false, + initialVersions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + versionsToAdd: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedSize: 1, + expectedContains: NewVersion("1.0.0", SemanticFormat), + }, + { + name: "add same version different format with ignoreFormat=true", + ignoreFormat: true, + initialVersions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + versionsToAdd: []*Version{ + NewVersion("1.0.0", ApkFormat), + }, + expectedSize: 1, + expectedContains: NewVersion("1.0.0", ApkFormat), // latest added wins + }, + { + name: "add same version different format with ignoreFormat=false", + ignoreFormat: false, + initialVersions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + versionsToAdd: []*Version{ + NewVersion("1.0.0", ApkFormat), + }, + expectedSize: 2, + expectedContains: NewVersion("1.0.0", SemanticFormat), + }, + { + name: "add to set with nil versions map", + ignoreFormat: false, + versionsToAdd: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedSize: 1, + expectedContains: NewVersion("1.0.0", SemanticFormat), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.initialVersions...) + // for testing nil versions map case + if tt.name == "add to set with nil versions map" { + s.versions = nil + } + + s.Add(tt.versionsToAdd...) + + assert.Equal(t, tt.expectedSize, s.Size()) + if tt.expectedContains != nil { + assert.True(t, s.Contains(tt.expectedContains)) + } + if tt.expectedNotContain != nil { + assert.False(t, s.Contains(tt.expectedNotContain)) + } + }) + } +} + +func TestSet_Remove(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + initialVersions []*Version + versionsToRemove []*Version + expectedSize int + shouldContain *Version + shouldNotContain *Version + }{ + { + name: "remove from empty set", + ignoreFormat: false, + versionsToRemove: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedSize: 0, + }, + { + name: "remove existing version", + ignoreFormat: false, + initialVersions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("2.0.0", SemanticFormat), + }, + versionsToRemove: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedSize: 1, + shouldContain: NewVersion("2.0.0", SemanticFormat), + shouldNotContain: NewVersion("1.0.0", SemanticFormat), + }, + { + name: "remove nil version", + ignoreFormat: false, + initialVersions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + versionsToRemove: []*Version{ + nil, + }, + expectedSize: 1, + shouldContain: NewVersion("1.0.0", SemanticFormat), + }, + { + name: "remove non-existing version", + ignoreFormat: false, + initialVersions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + versionsToRemove: []*Version{ + NewVersion("2.0.0", SemanticFormat), + }, + expectedSize: 1, + shouldContain: NewVersion("1.0.0", SemanticFormat), + }, + { + name: "remove from set with nil versions map", + ignoreFormat: false, + versionsToRemove: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedSize: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.initialVersions...) + // for testing nil versions map case + if tt.name == "remove from set with nil versions map" { + s.versions = nil + } + + s.Remove(tt.versionsToRemove...) + + assert.Equal(t, tt.expectedSize, s.Size()) + if tt.shouldContain != nil { + assert.True(t, s.Contains(tt.shouldContain)) + } + if tt.shouldNotContain != nil { + assert.False(t, s.Contains(tt.shouldNotContain)) + } + }) + } +} + +func TestSet_Contains(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + versions []*Version + checkVersion *Version + expected bool + }{ + { + name: "contains existing version", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("2.0.0", SemanticFormat), + }, + checkVersion: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "does not contain non-existing version", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + checkVersion: NewVersion("2.0.0", SemanticFormat), + expected: false, + }, + { + name: "check nil version", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + checkVersion: nil, + expected: false, + }, + { + name: "check version in empty set", + ignoreFormat: false, + versions: nil, + checkVersion: NewVersion("1.0.0", SemanticFormat), + expected: false, + }, + { + name: "contains same version different format with ignoreFormat=true", + ignoreFormat: true, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + checkVersion: NewVersion("1.0.0", ApkFormat), + expected: true, + }, + { + name: "does not contain same version different format with ignoreFormat=false", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + checkVersion: NewVersion("1.0.0", ApkFormat), + expected: false, + }, + { + name: "check version with nil versions map", + ignoreFormat: false, + versions: []*Version{}, + checkVersion: NewVersion("1.0.0", SemanticFormat), + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.versions...) + // for testing nil versions map case + if tt.name == "check version with nil versions map" { + s.versions = nil + } + + result := s.Contains(tt.checkVersion) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestSet_Values(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + versions []*Version + expectedLength int + expectedNil bool + checkSorted bool + }{ + { + name: "empty set returns nil", + ignoreFormat: false, + versions: nil, + expectedNil: true, + expectedLength: 0, + }, + { + name: "set with versions returns sorted list", + ignoreFormat: false, + versions: []*Version{ + NewVersion("2.0.0", SemanticFormat), + NewVersion("1.0.0", SemanticFormat), + NewVersion("3.0.0", SemanticFormat), + }, + expectedLength: 3, + checkSorted: true, + }, + { + name: "set with nil versions map returns nil", + ignoreFormat: false, + versions: []*Version{}, + expectedNil: true, + }, + { + name: "set with single version", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + }, + expectedLength: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.versions...) + // for testing nil versions map case + if tt.name == "set with nil versions map returns nil" { + s.versions = nil + } + + result := s.Values() + + if tt.expectedNil { + assert.Nil(t, result) + } else { + require.NotNil(t, result) + assert.Equal(t, tt.expectedLength, len(result)) + + if tt.checkSorted && len(result) > 1 { + // verify sorting - versions should be in ascending order + for i := 0; i < len(result)-1; i++ { + cmp, err := result[i].Compare(result[i+1]) + require.NoError(t, err) + assert.True(t, cmp < 0, "versions should be sorted in ascending order") + } + } + } + }) + } +} + +func TestSet_Size(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + versions []*Version + expected int + }{ + { + name: "empty set size is zero", + ignoreFormat: false, + versions: nil, + expected: 0, + }, + { + name: "set with versions", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("2.0.0", SemanticFormat), + }, + expected: 2, + }, + { + name: "set with duplicate versions", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("1.0.0", SemanticFormat), + }, + expected: 1, + }, + { + name: "set with nil versions map", + ignoreFormat: false, + versions: []*Version{}, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.versions...) + // for testing nil versions map case + if tt.name == "set with nil versions map" { + s.versions = nil + } + + result := s.Size() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestSet_Clear(t *testing.T) { + tests := []struct { + name string + ignoreFormat bool + versions []*Version + }{ + { + name: "clear non-empty set", + ignoreFormat: false, + versions: []*Version{ + NewVersion("1.0.0", SemanticFormat), + NewVersion("2.0.0", SemanticFormat), + }, + }, + { + name: "clear empty set", + ignoreFormat: false, + versions: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := NewSet(tt.ignoreFormat, tt.versions...) + + originalSize := s.Size() + s.Clear() + + assert.Equal(t, 0, s.Size()) + assert.NotNil(t, s.versions) // should have empty map, not nil + + // verify all previous versions are gone + if originalSize > 0 { + for _, v := range tt.versions { + if v != nil { + assert.False(t, s.Contains(v)) + } + } + } + }) + } +} + +func TestSet_Integration(t *testing.T) { + // test combining multiple operations + s := NewSet(false) + + v1 := NewVersion("1.0.0", SemanticFormat) + v2 := NewVersion("2.0.0", SemanticFormat) + v3 := NewVersion("3.0.0", SemanticFormat) + + // add versions + s.Add(v1, v2, v3) + assert.Equal(t, 3, s.Size()) + + // check contains + assert.True(t, s.Contains(v1)) + assert.True(t, s.Contains(v2)) + assert.True(t, s.Contains(v3)) + + // remove one version + s.Remove(v2) + assert.Equal(t, 2, s.Size()) + assert.False(t, s.Contains(v2)) + + // get values + values := s.Values() + require.Len(t, values, 2) + + // verify sorting + cmp, err := values[0].Compare(values[1]) + require.NoError(t, err) + assert.True(t, cmp < 0) + + // clear all + s.Clear() + assert.Equal(t, 0, s.Size()) + assert.Nil(t, s.Values()) +} diff --git a/grype/version/version.go b/grype/version/version.go index b8195940454..fe01c351ac6 100644 --- a/grype/version/version.go +++ b/grype/version/version.go @@ -1,172 +1,154 @@ package version import ( + "errors" "fmt" "github.com/anchore/grype/grype/pkg" - "github.com/anchore/syft/syft/cpe" ) -// ErrUnsupportedVersion is returned when a version string cannot be parsed into a rich version object -// for a known unsupported case (e.g. golang "devel" version). -var ErrUnsupportedVersion = fmt.Errorf("unsupported version value") +var _ Comparator = (*Version)(nil) type Version struct { - Raw string - Format Format - rich rich + Raw string + Format Format + comparators map[Format]Comparator } -type rich struct { - cpeVers []cpe.CPE - semVer *semanticVersion - apkVer *apkVersion - debVer *debVersion - golangVersion *golangVersion - mavenVer *mavenVersion - rpmVer *rpmVersion - kbVer *kbVersion - portVer *portageVersion - pep440version *pep440Version - jvmVersion *jvmVersion -} - -func NewVersion(raw string, format Format) (*Version, error) { - version := &Version{ +func NewVersion(raw string, format Format) *Version { + return &Version{ Raw: raw, Format: format, } +} - err := version.populate() - if err != nil { - return nil, err +func NewVersionFromPkg(p pkg.Package) *Version { + if p.Version == "" { + return nil } - - return version, nil + return NewVersion(p.Version, FormatFromPkg(p)) } -func NewVersionFromPkg(p pkg.Package) (*Version, error) { - format := FormatFromPkg(p) +func (v *Version) Validate() error { + _, err := v.getComparator(v.Format) + return err +} - ver, err := NewVersion(p.Version, format) - if err != nil { - return nil, err +//nolint:funlen +func (v *Version) getComparator(format Format) (Comparator, error) { + if v.comparators == nil { + v.comparators = make(map[Format]Comparator) + } + if comparator, ok := v.comparators[format]; ok { + return comparator, nil } - ver.rich.cpeVers = p.CPEs - return ver, nil -} - -func (v *Version) populate() error { - switch v.Format { + var comparator Comparator + var err error + switch format { case SemanticFormat: - ver, err := newSemanticVersion(v.Raw) - v.rich.semVer = ver - return err + // not enforcing strict semver here, so that we can parse versions like "v1.0.0", "1.0", or "1.0a", which aren't strictly semver compliant + comparator, err = newSemanticVersion(v.Raw, false) case ApkFormat: - ver, err := newApkVersion(v.Raw) - v.rich.apkVer = ver - return err + comparator, err = newApkVersion(v.Raw) + case BitnamiFormat: + comparator, err = newBitnamiVersion(v.Raw) case DebFormat: - ver, err := newDebVersion(v.Raw) - v.rich.debVer = ver - return err + comparator, err = newDebVersion(v.Raw) case GolangFormat: - ver, err := newGolangVersion(v.Raw) - v.rich.golangVersion = ver - return err + comparator, err = newGolangVersion(v.Raw) case MavenFormat: - ver, err := newMavenVersion(v.Raw) - v.rich.mavenVer = ver - return err + comparator, err = newMavenVersion(v.Raw) case RpmFormat: - ver, err := newRpmVersion(v.Raw) - v.rich.rpmVer = &ver - return err + comparator, err = newRpmVersion(v.Raw) case PythonFormat: - ver, err := newPep440Version(v.Raw) - v.rich.pep440version = &ver - return err + comparator, err = newPep440Version(v.Raw) case KBFormat: - ver := newKBVersion(v.Raw) - v.rich.kbVer = &ver - return nil + comparator = newKBVersion(v.Raw) case GemFormat: - ver, err := newGemfileVersion(v.Raw) - v.rich.semVer = ver - return err + comparator, err = newGemVersion(v.Raw) case PortageFormat: - ver := newPortageVersion(v.Raw) - v.rich.portVer = &ver - return nil + comparator = newPortageVersion(v.Raw) case JVMFormat: - ver, err := newJvmVersion(v.Raw) - v.rich.jvmVersion = ver - return err + comparator, err = newJvmVersion(v.Raw) case UnknownFormat: - // use the raw string + fuzzy constraint - return nil + comparator, err = newFuzzyVersion(v.Raw) + default: + err = fmt.Errorf("no comparator available for format %q", v.Format) } - return fmt.Errorf("no rich version populated (format=%s)", v.Format) -} + v.comparators[format] = comparator -func (v Version) CPEs() []cpe.CPE { - return v.rich.cpeVers + return comparator, err } - func (v Version) String() string { return fmt.Sprintf("%s (%s)", v.Raw, v.Format) } +// Compare compares this version to another version. +// This returns -1, 0, or 1 if this version is smaller, +// equal, or larger than the other version, respectively. func (v Version) Compare(other *Version) (int, error) { if other == nil { return -1, ErrNoVersionProvided } - if other.Format == v.Format { - return v.compareSameFormat(other) + var result int + comparator, err := v.getComparator(v.Format) + if err == nil { + // if the package version, v was able to compare without error, return the result + result, err = comparator.Compare(other) + if err == nil { + // no error returned for package version or db version, return the result + return result, nil + } } - - // different formats, try to convert to a common format - common, err := finalizeComparisonVersion(other, v.Format) - if err != nil { - return -1, err + // we were unable to parse the package or db version as v.Format, try other.Format if they differ + if v.Format != other.Format { + originalErr := err + comparator, err = v.getComparator(other.Format) + if err == nil { + result, err = comparator.Compare(other) + if err == nil { + return result, nil + } + } + err = errors.Join(originalErr, err) } - return v.compareSameFormat(common) + // all formats returned error, return all errors + return 0, fmt.Errorf("unable to compare versions: %v %v due to %w", v, other, err) } -func (v Version) compareSameFormat(other *Version) (int, error) { - switch v.Format { - case SemanticFormat: - return v.rich.semVer.verObj.Compare(other.rich.semVer.verObj), nil - case ApkFormat: - return v.rich.apkVer.Compare(other) - case DebFormat: - return v.rich.debVer.Compare(other) - case GolangFormat: - return v.rich.golangVersion.Compare(other) - case MavenFormat: - return v.rich.mavenVer.Compare(other) - case RpmFormat: - return v.rich.rpmVer.Compare(other) - case PythonFormat: - return v.rich.pep440version.Compare(other) - case KBFormat: - return v.rich.kbVer.Compare(other) - case GemFormat: - return v.rich.semVer.verObj.Compare(other.rich.semVer.verObj), nil - case PortageFormat: - return v.rich.portVer.Compare(other) - case JVMFormat: - return v.rich.jvmVersion.Compare(other) +func (v *Version) Is(op Operator, other *Version) (bool, error) { + if v == nil { + return false, fmt.Errorf("cannot evaluate version with nil version") + } + if other == nil { + return false, ErrNoVersionProvided } - v1, err := newFuzzyVersion(v.Raw) + comparator, err := v.getComparator(v.Format) if err != nil { - return -1, fmt.Errorf("unable to parse version (%s) as a fuzzy version: %w", v.Raw, err) + return false, fmt.Errorf("unable to get comparator for %s: %w", v.Format, err) } - return v1.Compare(other) + result, err := comparator.Compare(other) + if err != nil { + return false, fmt.Errorf("unable to compare versions %s and %s: %w", v, other, err) + } + + switch op { + case EQ, "": + return result == 0, nil + case GT: + return result > 0, nil + case LT: + return result < 0, nil + case GTE: + return result >= 0, nil + case LTE: + return result <= 0, nil + } + return false, fmt.Errorf("unknown operator %s", op) } diff --git a/grype/version/version_test.go b/grype/version/version_test.go index fb59d2cbf79..9f4d007b3a8 100644 --- a/grype/version/version_test.go +++ b/grype/version/version_test.go @@ -1,6 +1,7 @@ package version import ( + "slices" "testing" "github.com/stretchr/testify/assert" @@ -10,82 +11,82 @@ import ( func TestVersionCompare(t *testing.T) { tests := []struct { name string - version1 string - version2 string - format Format + v1 string + v2 string expectedResult int - expectErr bool + expectErr require.ErrorAssertionFunc }{ { name: "v1 greater than v2", - version1: "2.0.0", - version2: "1.0.0", - format: SemanticFormat, + v1: "2.0.0", + v2: "1.0.0", expectedResult: 1, - expectErr: false, }, { name: "v1 less than v2", - version1: "1.0.0", - version2: "2.0.0", - format: SemanticFormat, + v1: "1.0.0", + v2: "2.0.0", expectedResult: -1, - expectErr: false, }, { name: "v1 equal to v2", - version1: "1.0.0", - version2: "1.0.0", - format: SemanticFormat, + v1: "1.0.0", + v2: "1.0.0", expectedResult: 0, - expectErr: false, }, { name: "compare with nil version", - version1: "1.0.0", - version2: "", - format: SemanticFormat, + v1: "1.0.0", + v2: "", expectedResult: -1, - expectErr: true, + expectErr: require.Error, }, } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - v1, err := NewVersion(tc.version1, tc.format) - require.NoError(t, err, "failed to create version1") - - var v2 *Version - if tc.version2 == "" { - v2 = nil // test nil case - } else if tc.name == "different formats" { - // use a different format for the second version - v2, err = NewVersion(tc.version2, PythonFormat) - require.NoError(t, err, "failed to create version2 with different format") - } else { - v2, err = NewVersion(tc.version2, tc.format) - require.NoError(t, err, "failed to create version2") - } + // the above test cases are pretty tame value-wise, so we can use (almost) all formats + var formats []Format + formats = append(formats, Formats...) - result, err := v1.Compare(v2) + // leave out some formats... + slices.DeleteFunc(formats, func(f Format) bool { + return f == KBFormat + }) + + for _, format := range formats { + t.Run(format.String(), func(t *testing.T) { + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + if tc.expectErr == nil { + tc.expectErr = require.NoError + } + v1 := NewVersion(tc.v1, format) + require.Equal(t, format, v1.Format) + + var v2 *Version + if tc.v2 != "" { + v2 = NewVersion(tc.v2, format) + require.Equal(t, format, v2.Format) + } + + result, err := v1.Compare(v2) + tc.expectErr(t, err, "unexpected error during comparison") + if err != nil { + return // skip further checks if there was an error + } - if tc.expectErr { - assert.Error(t, err, "expected an error but got none") - } else { - assert.NoError(t, err, "unexpected error during comparison") - assert.Equal(t, tc.expectedResult, result, "comparison result mismatch") + assert.NoError(t, err, "unexpected error during comparison") + assert.Equal(t, tc.expectedResult, result, "comparison result mismatch") + }) } }) } } -func Test_UpgradeUnknownRightSideComparison(t *testing.T) { - v1, err := NewVersion("1.0.0", SemanticFormat) - require.NoError(t, err) +func TestVersion_UpgradeUnknownRightSideComparison(t *testing.T) { + v1 := NewVersion("1.0.0", SemanticFormat) // test if we can upgrade an unknown format to a known format when the left hand side is known - v2, err := NewVersion("1.0.0", UnknownFormat) - require.NoError(t, err) + v2 := NewVersion("1.0.0", UnknownFormat) result, err := v1.Compare(v2) assert.NoError(t, err) @@ -115,19 +116,248 @@ func TestVersionCompareSameFormat(t *testing.T) { t.Run(fmt.name, func(t *testing.T) { // just test that we can create and compare versions of this format // without errors - not testing the actual comparison logic - v1, err := NewVersion("1.0.0", fmt.format) - if err != nil { - t.Skipf("Skipping %s format, couldn't create version: %v", fmt.name, err) + v1 := NewVersion("1.0.0", fmt.format) + v2 := NewVersion("1.0.0", fmt.format) + + result, err := v1.Compare(v2) + assert.NoError(t, err, "comparison error") + assert.Equal(t, 0, result, "equal versions should return 0") + }) + } +} + +func TestVersion_Is(t *testing.T) { + tests := []struct { + name string + version *Version + operator Operator + other *Version + expected bool + wantErr require.ErrorAssertionFunc + }{ + { + name: "equal versions - EQ operator", + version: NewVersion("1.0.0", SemanticFormat), + operator: EQ, + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "equal versions - empty operator (defaults to EQ)", + version: NewVersion("1.0.0", SemanticFormat), + operator: "", + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "unequal versions - EQ operator", + version: NewVersion("1.0.0", SemanticFormat), + operator: EQ, + other: NewVersion("2.0.0", SemanticFormat), + expected: false, + }, + { + name: "greater than - GT operator true", + version: NewVersion("2.0.0", SemanticFormat), + operator: GT, + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "greater than - GT operator false", + version: NewVersion("1.0.0", SemanticFormat), + operator: GT, + other: NewVersion("2.0.0", SemanticFormat), + expected: false, + }, + { + name: "greater than or equal - GTE operator true (greater)", + version: NewVersion("2.0.0", SemanticFormat), + operator: GTE, + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "greater than or equal - GTE operator true (equal)", + version: NewVersion("1.0.0", SemanticFormat), + operator: GTE, + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "greater than or equal - GTE operator false", + version: NewVersion("1.0.0", SemanticFormat), + operator: GTE, + other: NewVersion("2.0.0", SemanticFormat), + expected: false, + }, + { + name: "less than - LT operator true", + version: NewVersion("1.0.0", SemanticFormat), + operator: LT, + other: NewVersion("2.0.0", SemanticFormat), + expected: true, + }, + { + name: "less than - LT operator false", + version: NewVersion("2.0.0", SemanticFormat), + operator: LT, + other: NewVersion("1.0.0", SemanticFormat), + expected: false, + }, + { + name: "less than or equal - LTE operator true (less)", + version: NewVersion("1.0.0", SemanticFormat), + operator: LTE, + other: NewVersion("2.0.0", SemanticFormat), + expected: true, + }, + { + name: "less than or equal - LTE operator true (equal)", + version: NewVersion("1.0.0", SemanticFormat), + operator: LTE, + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "less than or equal - LTE operator false", + version: NewVersion("2.0.0", SemanticFormat), + operator: LTE, + other: NewVersion("1.0.0", SemanticFormat), + expected: false, + }, + { + name: "nil other version should return ErrNoVersionProvided", + version: NewVersion("1.0.0", SemanticFormat), + operator: EQ, + other: nil, + wantErr: require.Error, + }, + { + name: "unknown operator should return error", + version: NewVersion("1.0.0", SemanticFormat), + operator: "unknown", + other: NewVersion("1.0.0", SemanticFormat), + wantErr: require.Error, + }, + { + name: "invalid version format should return error", + version: NewVersion("invalid", SemanticFormat), + operator: EQ, + other: NewVersion("1.0.0", SemanticFormat), + wantErr: require.Error, + }, + { + name: "different formats - semantic vs apk", + version: NewVersion("1.0.0", SemanticFormat), + operator: EQ, + other: NewVersion("1.0.0", ApkFormat), + expected: true, + }, + { + name: "complex semantic versions", + version: NewVersion("1.2.3-alpha.1", SemanticFormat), + operator: LT, + other: NewVersion("1.2.3", SemanticFormat), + expected: true, + }, + { + name: "version with v prefix", + version: NewVersion("v1.0.0", SemanticFormat), + operator: EQ, + other: NewVersion("1.0.0", SemanticFormat), + expected: true, + }, + { + name: "nil other version is ErrNoVersionProvided", + version: NewVersion("1.0.0", SemanticFormat), + operator: EQ, + other: nil, + wantErr: func(t require.TestingT, err error, a ...interface{}) { + require.ErrorIs(t, err, ErrNoVersionProvided, a...) + }, + }, + { + name: "unknown operator error", + version: NewVersion("1.0.0", SemanticFormat), + operator: "!@#", + other: NewVersion("1.0.0", SemanticFormat), + wantErr: func(t require.TestingT, err error, a ...interface{}) { + require.ErrorContains(t, err, "unknown operator !@#", a...) + }, + }, + { + name: "invalid version format error contains format", + version: NewVersion("not-a-valid-version", SemanticFormat), + operator: EQ, + other: NewVersion("1.0.0", SemanticFormat), + wantErr: func(t require.TestingT, err error, a ...interface{}) { + require.ErrorContains(t, err, "unable to get comparator for Semantic", a...) + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.wantErr == nil { + tt.wantErr = require.NoError } - v2, err := NewVersion("1.0.0", fmt.format) + result, err := tt.version.Is(tt.operator, tt.other) + tt.wantErr(t, err) + if err != nil { - t.Skipf("Skipping %s format, couldn't create second version: %v", fmt.name, err) + return } - result, err := v1.Compare(v2) - assert.NoError(t, err, "comparison error") - assert.Equal(t, 0, result, "equal versions should return 0") + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestVersion_Is_AllOperators(t *testing.T) { + v1 := NewVersion("1.0.0", SemanticFormat) + v2 := NewVersion("2.0.0", SemanticFormat) + v1dup := NewVersion("1.0.0", SemanticFormat) + + tests := []struct { + name string + left *Version + operator Operator + right *Version + expected bool + }{ + // v1 (1.0.0) vs v2 (2.0.0) + {"1.0.0 = 2.0.0", v1, EQ, v2, false}, + {"1.0.0 > 2.0.0", v1, GT, v2, false}, + {"1.0.0 >= 2.0.0", v1, GTE, v2, false}, + {"1.0.0 < 2.0.0", v1, LT, v2, true}, + {"1.0.0 <= 2.0.0", v1, LTE, v2, true}, + + // v2 (2.0.0) vs v1 (1.0.0) + {"2.0.0 = 1.0.0", v2, EQ, v1, false}, + {"2.0.0 > 1.0.0", v2, GT, v1, true}, + {"2.0.0 >= 1.0.0", v2, GTE, v1, true}, + {"2.0.0 < 1.0.0", v2, LT, v1, false}, + {"2.0.0 <= 1.0.0", v2, LTE, v1, false}, + + // v1 (1.0.0) vs v1dup (1.0.0) + {"1.0.0 = 1.0.0", v1, EQ, v1dup, true}, + {"1.0.0 > 1.0.0", v1, GT, v1dup, false}, + {"1.0.0 >= 1.0.0", v1, GTE, v1dup, true}, + {"1.0.0 < 1.0.0", v1, LT, v1dup, false}, + {"1.0.0 <= 1.0.0", v1, LTE, v1dup, true}, + + // empty operator should default to EQ + {"1.0.0 (empty) 1.0.0", v1, "", v1dup, true}, + {"1.0.0 (empty) 2.0.0", v1, "", v2, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := tt.left.Is(tt.operator, tt.right) + require.NoError(t, err) + assert.Equal(t, tt.expected, result) }) } } diff --git a/grype/vulnerability/metadata.go b/grype/vulnerability/metadata.go index 633978e5229..5351f2baa01 100644 --- a/grype/vulnerability/metadata.go +++ b/grype/vulnerability/metadata.go @@ -1,19 +1,125 @@ package vulnerability import ( + "strings" "time" ) type Metadata struct { ID string - DataSource string + DataSource string // the primary reference URL, i.e. where the data originated Namespace string Severity string - URLs []string + URLs []string // secondary reference URLs a vulnerability may provide Description string Cvss []Cvss KnownExploited []KnownExploited EPSS []EPSS + + // calculated as-needed + risk float64 +} + +// RiskScore computes a basic quantitative risk by combining threat and severity. +// Threat is represented by epss (likelihood of exploitation), and severity by the cvss base score + string severity. +// Impact is currently fixed at 1 and may be integrated into the calculation in future versions. +// Raw risk is epss * (cvss / 10) * impact, then scaled to 0–100 for readability. +// If a vulnerability appears in the KEV list, apply an additional boost to reflect known exploitation. +// Known ransomware campaigns receive a further, distinct boost. +func (m *Metadata) RiskScore() float64 { + if m == nil { + return 0 + } + if m.risk != 0 { + return m.risk + } + m.risk = riskScore(*m) + return m.risk +} + +func riskScore(m Metadata) float64 { + return min(threat(m)*severity(m)*kevModifier(m), 1.0) * 100.0 +} + +func kevModifier(m Metadata) float64 { + if len(m.KnownExploited) > 0 { + for _, kev := range m.KnownExploited { + if strings.ToLower(kev.KnownRansomwareCampaignUse) == "known" { + // consider ransomware campaigns to be a greater kevModifier than other KEV threats + return 1.1 + } + } + return 1.05 // boost the final result, as if there is a greater kevModifier inherently from KEV threats + } + return 1.0 +} + +func threat(m Metadata) float64 { + if len(m.KnownExploited) > 0 { + // per the EPSS guidance, any evidence of exploitation in the wild (not just PoC) should be considered over EPSS data + return 1.0 + } + if len(m.EPSS) == 0 { + return 0.0 + } + return m.EPSS[0].EPSS +} + +// severity returns a 0-1 value, which is a combination of the string severity and the average of the cvss base scores. +// If there are no cvss scores, the string severity is used. Some vendors only update the string severity and not the +// cvss scores, so it's important to consider all sources. We are also not biasing towards any one source (multiple +// cvss scores won't over-weigh the string severity). +func severity(m Metadata) float64 { + // TODO: summarization should take a policy: prefer NVD over CNA or vice versa... + + stringSeverityScore := severityToScore(m.Severity) / 10.0 + avgBaseScore := average(validBaseScores(m.Cvss...)...) / 10.0 + if avgBaseScore == 0 { + return stringSeverityScore + } + return average(stringSeverityScore, avgBaseScore) +} + +func severityToScore(severity string) float64 { + // use the middle of the range for each severity + switch strings.ToLower(severity) { + case "negligible": + return 0.5 + case "low": + return 3.0 + case "medium": + return 5.0 + case "high": + return 7.5 + case "critical": + return 9.0 + } + // the severity value might be "unknown" or an unexpected value. These should not be lost + // in the noise and placed at the bottom of the list... instead we compromise to the middle of the list. + return 5.0 +} + +func validBaseScores(as ...Cvss) []float64 { + var out []float64 + for _, a := range as { + if a.Metrics.BaseScore == 0 { + // this is a mistake... base scores cannot be 0. Don't include this value and bring down the average + continue + } + out = append(out, a.Metrics.BaseScore) + } + return out +} + +func average(as ...float64) float64 { + if len(as) == 0 { + return 0 + } + sum := 0.0 + for _, a := range as { + sum += a + } + return sum / float64(len(as)) } type Cvss struct { diff --git a/grype/vulnerability/metadata_test.go b/grype/vulnerability/metadata_test.go new file mode 100644 index 00000000000..befcaa45a80 --- /dev/null +++ b/grype/vulnerability/metadata_test.go @@ -0,0 +1,457 @@ +package vulnerability + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestRiskScore(t *testing.T) { + tests := []struct { + name string + metadata Metadata + expected float64 + }{ + { + name: "nil metadata", + metadata: Metadata{}, + expected: 0, + }, + { + name: "already calculated risk", + metadata: Metadata{ + risk: 42.5, + }, + expected: 42.5, + }, + { + name: "no EPSS data, no KEV", + metadata: Metadata{ + Severity: "high", + Cvss: []Cvss{ + { + Metrics: CvssMetrics{ + BaseScore: 7.5, + }, + }, + }, + }, + expected: 0, // threat is 0 without EPSS or KEV + }, + { + name: "with EPSS data, no KEV", + metadata: Metadata{ + Severity: "high", + EPSS: []EPSS{ + { + EPSS: 0.5, + Percentile: 0.95, + }, + }, + Cvss: []Cvss{ + { + Metrics: CvssMetrics{ + BaseScore: 7.5, + }, + }, + }, + }, + expected: 37.5, // 0.5 * (7.5/10) * 1 * 100 + }, + { + name: "with KEV, no EPSS", + metadata: Metadata{ + Severity: "high", + KnownExploited: []KnownExploited{ + { + CVE: "CVE-2023-1234", + KnownRansomwareCampaignUse: "No", + }, + }, + Cvss: []Cvss{ + { + Metrics: CvssMetrics{ + BaseScore: 7.5, + }, + }, + }, + }, + expected: 78.75, // 1.0 * (7.5/10) * 1.05* 100 + }, + { + name: "with KEV ransomware", + metadata: Metadata{ + Severity: "high", + KnownExploited: []KnownExploited{ + { + CVE: "CVE-2023-1234", + KnownRansomwareCampaignUse: "Known", + }, + }, + Cvss: []Cvss{ + { + Metrics: CvssMetrics{ + BaseScore: 7.5, + }, + }, + }, + }, + expected: 82.5, // 1.0 * (7.5/10) * 1.1 * 100 + }, + { + name: "with severity string only", + metadata: Metadata{ + Severity: "critical", + EPSS: []EPSS{ + { + EPSS: 0.8, + Percentile: 0.99, + }, + }, + }, + expected: 72, // 0.8 * (9.0/10) * 1.0 * 100 + }, + { + name: "with multiple CVSS scores + string severity", + metadata: Metadata{ + Severity: "medium", + EPSS: []EPSS{ + { + EPSS: 0.6, + Percentile: 0.90, + }, + }, + Cvss: []Cvss{ + { + Source: "NVD", + Metrics: CvssMetrics{ + BaseScore: 6.5, + }, + }, + { + Source: "Vendor", + Metrics: CvssMetrics{ + BaseScore: 5.5, + }, + }, + }, + }, + expected: 33, // 0.6 * ( (((6.5+5.5)/2)+5)/2 /10) * 1.0 * 100 + }, + { + name: "with some invalid CVSS scores + string severity", + metadata: Metadata{ + Severity: "medium", + EPSS: []EPSS{ + { + EPSS: 0.4, + Percentile: 0.85, + }, + }, + Cvss: []Cvss{ + { + Source: "NVD", + Metrics: CvssMetrics{ + BaseScore: 0, // invalid, should be ignored + }, + }, + { + Source: "Vendor", + Metrics: CvssMetrics{ + BaseScore: 6.0, + }, + }, + }, + }, + expected: 22, // 0.4 * ((6.0+5)/2 /10) * 1.0 * 100 + }, + { + name: "unknown severity", + metadata: Metadata{ + Severity: "unknown", + EPSS: []EPSS{ + { + EPSS: 0.3, + Percentile: 0.80, + }, + }, + }, + expected: 15, // 0.3 * (5.0/10) * 1.0 * 100 + }, + { + name: "maximum risk clamp", + metadata: Metadata{ + Severity: "critical", + KnownExploited: []KnownExploited{ + { + CVE: "CVE-2023-1234", + KnownRansomwareCampaignUse: "Known", + }, + }, + Cvss: []Cvss{ + { + Metrics: CvssMetrics{ + BaseScore: 10.0, + }, + }, + }, + }, + expected: 100, // clamped to 100 as it would be 1.0 * 1.0 * 1.1 * 100 = 120 + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.metadata.RiskScore() + assert.InDelta(t, tt.expected, result, 0.01, "RiskScore method returned incorrect value") + + // test the calculated value is cached + if tt.name != "already calculated risk" && tt.name != "nil metadata" { + require.InDelta(t, tt.expected, tt.metadata.risk, 0.01, "risk was not cached") + } + + // test the standalone function + if tt.name != "nil metadata" && tt.name != "already calculated risk" { + funcResult := riskScore(tt.metadata) + assert.InDelta(t, tt.expected, funcResult, 0.0001, "riskScore function returned incorrect value") + } + }) + } +} + +func TestSeverityToScore(t *testing.T) { + tests := []struct { + severity string + expected float64 + }{ + {"negligible", 0.5}, + {"NEGLIGIBLE", 0.5}, + {"low", 3.0}, + {"LOW", 3.0}, + {"medium", 5.0}, + {"MEDIUM", 5.0}, + {"high", 7.5}, + {"HIGH", 7.5}, + {"critical", 9.0}, + {"CRITICAL", 9.0}, + {"unknown", 5.0}, + {"", 5.0}, + {"something-else", 5.0}, + } + + for _, tt := range tests { + t.Run(tt.severity, func(t *testing.T) { + result := severityToScore(tt.severity) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestAverageCVSS(t *testing.T) { + tests := []struct { + name string + cvss []Cvss + expected float64 + }{ + { + name: "empty slice", + cvss: []Cvss{}, + expected: 0, + }, + { + name: "single valid score", + cvss: []Cvss{ + {Metrics: CvssMetrics{BaseScore: 7.5}}, + }, + expected: 7.5, + }, + { + name: "multiple valid scores", + cvss: []Cvss{ + {Metrics: CvssMetrics{BaseScore: 7.5}}, + {Metrics: CvssMetrics{BaseScore: 8.5}}, + {Metrics: CvssMetrics{BaseScore: 9.0}}, + }, + expected: 8.33333, + }, + { + name: "with invalid scores", + cvss: []Cvss{ + {Metrics: CvssMetrics{BaseScore: 0}}, // invalid + {Metrics: CvssMetrics{BaseScore: 7.5}}, + {Metrics: CvssMetrics{BaseScore: 0}}, // invalid + {Metrics: CvssMetrics{BaseScore: 8.5}}, + }, + expected: 8.0, + }, + { + name: "all invalid scores", + cvss: []Cvss{ + {Metrics: CvssMetrics{BaseScore: 0}}, + {Metrics: CvssMetrics{BaseScore: 0}}, + }, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := average(validBaseScores(tt.cvss...)...) + assert.InDelta(t, tt.expected, result, 0.00001) + }) + } +} + +func TestThreat(t *testing.T) { + tests := []struct { + name string + metadata Metadata + expected float64 + }{ + { + name: "no EPSS, no KEV", + metadata: Metadata{}, + expected: 0, + }, + { + name: "with EPSS, no KEV", + metadata: Metadata{ + EPSS: []EPSS{ + {EPSS: 0.75}, + }, + }, + expected: 0.75, + }, + { + name: "with KEV, no EPSS", + metadata: Metadata{ + KnownExploited: []KnownExploited{ + {CVE: "CVE-2023-1234"}, + }, + }, + expected: 1.0, + }, + { + name: "with KEV and EPSS", + metadata: Metadata{ + EPSS: []EPSS{ + {EPSS: 0.5}, + }, + KnownExploited: []KnownExploited{ + {CVE: "CVE-2023-1234"}, + }, + }, + expected: 1.0, // KEV takes precedence + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := threat(tt.metadata) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestImpact(t *testing.T) { + tests := []struct { + name string + metadata Metadata + expected float64 + }{ + { + name: "no KEV", + metadata: Metadata{}, + expected: 1.0, + }, + { + name: "KEV without ransomware", + metadata: Metadata{ + KnownExploited: []KnownExploited{ + {KnownRansomwareCampaignUse: "No"}, + }, + }, + expected: 1.05, + }, + { + name: "KEV with ransomware", + metadata: Metadata{ + KnownExploited: []KnownExploited{ + {KnownRansomwareCampaignUse: "Known"}, + }, + }, + expected: 1.1, + }, + { + name: "KEV with case insensitive ransomware", + metadata: Metadata{ + KnownExploited: []KnownExploited{ + {KnownRansomwareCampaignUse: "KNOWN"}, + }, + }, + expected: 1.1, + }, + { + name: "multiple KEV entries, one with ransomware", + metadata: Metadata{ + KnownExploited: []KnownExploited{ + {KnownRansomwareCampaignUse: "No"}, + {KnownRansomwareCampaignUse: "Known"}, + }, + }, + expected: 1.1, // highest wins + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := kevModifier(tt.metadata) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestSeverity(t *testing.T) { + tests := []struct { + name string + metadata Metadata + expected float64 + }{ + { + name: "no CVSS, medium severity", + metadata: Metadata{ + Severity: "medium", + }, + expected: 0.5, + }, + { + name: "with CVSS + severity string", + metadata: Metadata{ + Severity: "medium", + Cvss: []Cvss{ + {Metrics: CvssMetrics{BaseScore: 8.0}}, + }, + }, + expected: 0.65, + }, + { + name: "multiple CVSS scores + severity string", + metadata: Metadata{ + Severity: "medium", + Cvss: []Cvss{ + {Metrics: CvssMetrics{BaseScore: 6.0}}, + {Metrics: CvssMetrics{BaseScore: 8.0}}, + }, + }, + expected: 0.6, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := severity(tt.metadata) + assert.InDelta(t, tt.expected, result, 0.00001) + }) + } +} diff --git a/grype/vulnerability/mock/vulnerability_provider.go b/grype/vulnerability/mock/vulnerability_provider.go index 346300db910..276daa95798 100644 --- a/grype/vulnerability/mock/vulnerability_provider.go +++ b/grype/vulnerability/mock/vulnerability_provider.go @@ -31,10 +31,10 @@ func (s *mockProvider) VulnerabilityMetadata(ref vulnerability.Reference) (*vuln for _, vuln := range s.Vulnerabilities { if vuln.ID == ref.ID && vuln.Namespace == ref.Namespace { var meta *vulnerability.Metadata - if m, ok := vuln.Reference.Internal.(vulnerability.Metadata); ok { + if m, ok := vuln.Internal.(vulnerability.Metadata); ok { meta = &m } - if m, ok := vuln.Reference.Internal.(*vulnerability.Metadata); ok { + if m, ok := vuln.Internal.(*vulnerability.Metadata); ok { meta = m } if meta != nil { diff --git a/grype/vulnerability/provider.go b/grype/vulnerability/provider.go index 5e0743f2693..d395bcecd13 100644 --- a/grype/vulnerability/provider.go +++ b/grype/vulnerability/provider.go @@ -1,7 +1,9 @@ package vulnerability import ( + "encoding/json" "io" + "time" grypePkg "github.com/anchore/grype/grype/pkg" ) @@ -30,3 +32,63 @@ type Provider interface { io.Closer } + +type StoreMetadataProvider interface { + DataProvenance() (map[string]DataProvenance, error) +} + +type DataProvenance struct { + DateCaptured time.Time `json:"captured,omitempty"` + InputDigest string `json:"input,omitempty"` +} + +type ProviderStatus struct { + SchemaVersion string `json:"schemaVersion"` + From string `json:"from,omitempty"` + Built time.Time `json:"built,omitempty"` + Path string `json:"path,omitempty"` + Error error `json:"error,omitempty"` +} + +func (s ProviderStatus) MarshalJSON() ([]byte, error) { + errStr := "" + if s.Error != nil { + errStr = s.Error.Error() + } + + var t string + if !s.Built.IsZero() { + t = s.Built.Format(time.RFC3339) + } + + return json.Marshal(&struct { + SchemaVersion string `json:"schemaVersion"` + From string `json:"from,omitempty"` + Built string `json:"built,omitempty"` + Path string `json:"path,omitempty"` + Valid bool `json:"valid"` + Error string `json:"error,omitempty"` + }{ + SchemaVersion: s.SchemaVersion, + From: s.From, + Built: t, + Path: s.Path, + Valid: s.Error == nil, + Error: errStr, + }) +} + +func (s DataProvenance) MarshalJSON() ([]byte, error) { + var t string + if !s.DateCaptured.IsZero() { + t = s.DateCaptured.Format(time.RFC3339) + } + + return json.Marshal(&struct { + DateCaptured string `json:"captured,omitempty"` + InputDigest string `json:"input,omitempty"` + }{ + DateCaptured: t, + InputDigest: s.InputDigest, + }) +} diff --git a/grype/vulnerability_matcher.go b/grype/vulnerability_matcher.go index 2ebc73ef7cb..b991e03b2bd 100644 --- a/grype/vulnerability_matcher.go +++ b/grype/vulnerability_matcher.go @@ -3,6 +3,8 @@ package grype import ( "errors" "fmt" + "runtime/debug" + "slices" "strings" "github.com/wagoodman/go-partybus" @@ -19,7 +21,6 @@ import ( "github.com/anchore/grype/grype/vulnerability" "github.com/anchore/grype/internal/bus" "github.com/anchore/grype/internal/log" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" ) @@ -64,7 +65,11 @@ func (m *VulnerabilityMatcher) FindMatches(pkgs []pkg.Package, context pkg.Conte } }() - remainingMatches, ignoredMatches = m.findDBMatches(pkgs, context, progressMonitor) + remainingMatches, ignoredMatches, err = m.findDBMatches(pkgs, context, progressMonitor) + if err != nil { + err = fmt.Errorf("unable to find matches against vulnerability database: %w", err) + return remainingMatches, ignoredMatches, err + } remainingMatches, ignoredMatches, err = m.findVEXMatches(context, remainingMatches, ignoredMatches, progressMonitor) if err != nil { @@ -84,13 +89,18 @@ func (m *VulnerabilityMatcher) FindMatches(pkgs []pkg.Package, context pkg.Conte return remainingMatches, ignoredMatches, nil } -func (m *VulnerabilityMatcher) findDBMatches(pkgs []pkg.Package, context pkg.Context, progressMonitor *monitorWriter) (*match.Matches, []match.IgnoredMatch) { +func (m *VulnerabilityMatcher) findDBMatches(pkgs []pkg.Package, context pkg.Context, progressMonitor *monitorWriter) (*match.Matches, []match.IgnoredMatch, error) { var ignoredMatches []match.IgnoredMatch log.Trace("finding matches against DB") matches, err := m.searchDBForMatches(context.Distro, pkgs, progressMonitor) if err != nil { - // errors returned from matchers during searchDBForMatches were being logged and not returned, so just log them here + if match.IsFatalError(err) { + return nil, nil, err + } + + // other errors returned from matchers during searchDBForMatches were being + // logged and not returned, so just log them here log.WithFields("error", err).Debug("error(s) returned from searchDBForMatches") } @@ -111,7 +121,7 @@ func (m *VulnerabilityMatcher) findDBMatches(pkgs []pkg.Package, context pkg.Con ignoredMatches = m.mergeIgnoredMatches(originalIgnoredMatches, ignoredMatches) } - return &matches, ignoredMatches + return &matches, ignoredMatches, nil } func (m *VulnerabilityMatcher) mergeIgnoredMatches(allIgnoredMatches ...[]match.IgnoredMatch) []match.IgnoredMatch { @@ -129,31 +139,19 @@ func (m *VulnerabilityMatcher) mergeIgnoredMatches(allIgnoredMatches ...[]match. //nolint:funlen func (m *VulnerabilityMatcher) searchDBForMatches( - release *linux.Release, + d *distro.Distro, packages []pkg.Package, progressMonitor *monitorWriter, ) (match.Matches, error) { - var errs error var allMatches []match.Match - var allIgnored []match.IgnoredMatch + var allIgnorers []match.IgnoreFilter matcherIndex, defaultMatcher := newMatcherIndex(m.Matchers) - var d *distro.Distro - if release != nil { - d, errs = distro.NewFromRelease(*release) - if errs != nil { - log.Warnf("unable to determine linux distribution: %+v", errs) - errs = nil - } - if d != nil && d.Disabled() { - log.Warnf("unsupported linux distribution: %s", d.Name()) - return match.NewMatches(), nil - } - } - if defaultMatcher == nil { defaultMatcher = stock.NewStockMatcher(stock.MatcherConfig{UseCPEs: true}) } + + var matcherErrs []error for _, p := range packages { progressMonitor.PackagesProcessed.Increment() log.WithFields("package", displayPackage(p)).Trace("searching for vulnerability matches") @@ -169,13 +167,17 @@ func (m *VulnerabilityMatcher) searchDBForMatches( matchAgainst = []match.Matcher{defaultMatcher} } for _, theMatcher := range matchAgainst { - matches, ignoredMatches, err := theMatcher.Match(m.VulnerabilityProvider, p) + matches, ignorers, err := callMatcherSafely(theMatcher, m.VulnerabilityProvider, p) if err != nil { + if match.IsFatalError(err) { + return match.Matches{}, err + } + log.WithFields("error", err, "package", displayPackage(p)).Warn("matcher returned error") - errs = errors.Join(errs, err) + matcherErrs = append(matcherErrs, err) } - allIgnored = append(allIgnored, ignoredMatches...) + allIgnorers = append(allIgnorers, ignorers...) // Filter out matches based on records in the database exclusion table and hard-coded rules filtered, dropped := match.ApplyExplicitIgnoreRules(m.ExclusionProvider, match.NewMatches(matches...)) @@ -197,7 +199,7 @@ func (m *VulnerabilityMatcher) searchDBForMatches( } // apply ignores based on matchers returning ignore rules - filtered, dropped := match.ApplyIgnoreFilters(allMatches, ignoredMatchFilter(allIgnored)) + filtered, dropped := match.ApplyIgnoreFilters(allMatches, ignoredMatchFilter(allIgnorers)) logIgnoredMatches(dropped) // get deduplicated set of matches @@ -206,7 +208,17 @@ func (m *VulnerabilityMatcher) searchDBForMatches( // update the total discovered matches after removing all duplicates and ignores progressMonitor.MatchesDiscovered.Set(int64(res.Count())) - return res, errs + return res, errors.Join(matcherErrs...) +} + +func callMatcherSafely(m match.Matcher, vp vulnerability.Provider, p pkg.Package) (matches []match.Match, ignoredMatches []match.IgnoreFilter, err error) { + // handle individual matcher panics + defer func() { + if e := recover(); e != nil { + err = match.NewFatalError(m.Type(), fmt.Errorf("%v at:\n%s", e, string(debug.Stack()))) + } + }() + return m.Match(vp, p) } func (m *VulnerabilityMatcher) findVEXMatches(context pkg.Context, remainingMatches *match.Matches, ignoredMatches []match.IgnoredMatch, progressMonitor *monitorWriter) (*match.Matches, []match.IgnoredMatch, error) { @@ -303,37 +315,40 @@ func (m *VulnerabilityMatcher) normalizeByCVE(match match.Match) match.Match { // ignoreRulesByLocation implements match.IgnoreFilter to filter each matching // package that overlaps by location and have the same vulnerability ID (CVE) type ignoreRulesByLocation struct { + remainingFilters []match.IgnoreFilter locationToIgnoreRules map[string][]match.IgnoreRule } func (i ignoreRulesByLocation) IgnoreMatch(m match.Match) []match.IgnoreRule { for _, l := range m.Package.Locations.ToSlice() { for _, rule := range i.locationToIgnoreRules[l.RealPath] { - if rule.Vulnerability == m.Vulnerability.ID { - return []match.IgnoreRule{rule} - } - for _, relatedVulnerability := range m.Vulnerability.RelatedVulnerabilities { - if rule.Vulnerability == relatedVulnerability.ID { - return []match.IgnoreRule{rule} - } + if matched := rule.IgnoreMatch(m); matched != nil { + return matched } } } + for _, f := range i.remainingFilters { + if matched := f.IgnoreMatch(m); matched != nil { + return matched + } + } return nil } -// ignoreMatchFilter creates an ignore filter based on the provided IgnoredMatches to filter out "the same" +// ignoredMatchFilter creates an ignore filter based on location-based IgnoredMatches to filter out "the same" // vulnerabilities reported by other matchers based on overlapping file locations -func ignoredMatchFilter(ignores []match.IgnoredMatch) match.IgnoreFilter { +func ignoredMatchFilter(ignores []match.IgnoreFilter) match.IgnoreFilter { out := ignoreRulesByLocation{locationToIgnoreRules: map[string][]match.IgnoreRule{}} - for _, ignore := range ignores { - // TODO should this be syftPkg.FileOwner interface or similar? - if m, ok := ignore.Package.Metadata.(pkg.ApkMetadata); ok { - for _, f := range m.Files { - out.locationToIgnoreRules[f.Path] = append(out.locationToIgnoreRules[f.Path], ignore.AppliedIgnoreRules...) - } + // the returned slice of remaining rules are not location-based rules + out.remainingFilters = slices.DeleteFunc(ignores, func(ignore match.IgnoreFilter) bool { + rule, ok := ignore.(match.IgnoreRule) + if ok && rule.Package.Location != "" && !strings.ContainsRune(rule.Package.Location, '*') { + // this rule is handled with location lookups, remove it from the remaining filter list + out.locationToIgnoreRules[rule.Package.Location] = append(out.locationToIgnoreRules[rule.Package.Location], rule) + return true } - } + return false + }) return out } @@ -355,12 +370,12 @@ func ignoredMatchesDiff(subject []match.IgnoredMatch, other []match.IgnoredMatch otherMap := make(map[match.Fingerprint]struct{}) for _, a := range other { - otherMap[a.Match.Fingerprint()] = struct{}{} + otherMap[a.Fingerprint()] = struct{}{} } var diff []match.IgnoredMatch for _, b := range subject { - if _, ok := otherMap[b.Match.Fingerprint()]; !ok { + if _, ok := otherMap[b.Fingerprint()]; !ok { diff = append(diff, b) } } diff --git a/grype/vulnerability_matcher_test.go b/grype/vulnerability_matcher_test.go index 988b30ef9d6..f782e8c1019 100644 --- a/grype/vulnerability_matcher_test.go +++ b/grype/vulnerability_matcher_test.go @@ -1,6 +1,7 @@ package grype import ( + "errors" "testing" "github.com/google/go-cmp/cmp" @@ -17,6 +18,7 @@ import ( "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/matcher" "github.com/anchore/grype/grype/matcher/apk" + matcherMock "github.com/anchore/grype/grype/matcher/mock" "github.com/anchore/grype/grype/matcher/ruby" "github.com/anchore/grype/grype/pkg" "github.com/anchore/grype/grype/pkg/qualifier" @@ -27,7 +29,6 @@ import ( "github.com/anchore/grype/internal/bus" "github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -250,7 +251,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "no matches", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers(matcher.Config{}), }, args: args{ @@ -263,9 +263,9 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { }, }, context: pkg.Context{ - Distro: &linux.Release{ - ID: "debian", - VersionID: "8", + Distro: &distro.Distro{ + Type: "debian", + Version: "8", }, }, }, @@ -273,7 +273,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "matches by exact-direct match (OS)", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers(matcher.Config{}), }, args: args{ @@ -281,9 +280,9 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { neutron2013Pkg, }, context: pkg.Context{ - Distro: &linux.Release{ - ID: "debian", - VersionID: "8", + Distro: &distro.Distro{ + Type: "debian", + Version: "8", }, }, }, @@ -304,14 +303,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "distro": map[string]string{"type": "debian", "version": "8"}, - "namespace": "debian:distro:debian:8", - "package": map[string]string{"name": "neutron", "version": "2013.1.1-1"}, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{Type: "debian", Version: "8"}, + Namespace: "debian:distro:debian:8", + Package: match.PackageParameter{Name: "neutron", Version: "2013.1.1-1"}, }, - Found: map[string]any{ - "versionConstraint": "< 2014.1.3-6 (deb)", - "vulnerabilityID": "CVE-2014-fake-1", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2014-fake-1", + VersionConstraint: "< 2014.1.3-6 (deb)", }, Matcher: "dpkg-matcher", Confidence: 1, @@ -325,7 +324,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "fail on severity threshold", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers(matcher.Config{}), FailSeverity: func() *vulnerability.Severity { x := vulnerability.LowSeverity @@ -337,9 +335,9 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { neutron2013Pkg, }, context: pkg.Context{ - Distro: &linux.Release{ - ID: "debian", - VersionID: "8", + Distro: &distro.Distro{ + Type: "debian", + Version: "8", }, }, }, @@ -360,14 +358,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "distro": map[string]string{"type": "debian", "version": "8"}, - "namespace": "debian:distro:debian:8", - "package": map[string]string{"name": "neutron", "version": "2013.1.1-1"}, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{Type: "debian", Version: "8"}, + Namespace: "debian:distro:debian:8", + Package: match.PackageParameter{Name: "neutron", Version: "2013.1.1-1"}, }, - Found: map[string]any{ - "versionConstraint": "< 2014.1.3-6 (deb)", - "vulnerabilityID": "CVE-2014-fake-1", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2014-fake-1", + VersionConstraint: "< 2014.1.3-6 (deb)", }, Matcher: "dpkg-matcher", Confidence: 1, @@ -381,7 +379,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "pass on severity threshold with VEX", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers(matcher.Config{}), FailSeverity: func() *vulnerability.Severity { x := vulnerability.LowSeverity @@ -412,9 +409,9 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { }, }, }, - Distro: &linux.Release{ - ID: "debian", - VersionID: "8", + Distro: &distro.Distro{ + Type: "debian", + Version: "8", }, }, }, @@ -443,14 +440,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "distro": map[string]string{"type": "debian", "version": "8"}, - "namespace": "debian:distro:debian:8", - "package": map[string]string{"name": "neutron", "version": "2013.1.1-1"}, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{Type: "debian", Version: "8"}, + Namespace: "debian:distro:debian:8", + Package: match.PackageParameter{Name: "neutron", Version: "2013.1.1-1"}, }, - Found: map[string]any{ - "versionConstraint": "< 2014.1.3-6 (deb)", - "vulnerabilityID": "CVE-2014-fake-1", + Found: match.DistroResult{ + VulnerabilityID: "CVE-2014-fake-1", + VersionConstraint: "< 2014.1.3-6 (deb)", }, Matcher: "dpkg-matcher", Confidence: 1, @@ -464,7 +461,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "matches by exact-direct match (language)", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers(matcher.Config{ Ruby: ruby.MatcherConfig{ UseCPEs: true, @@ -501,7 +497,7 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { CPEs: []string{ "cpe:2.3:*:activerecord:activerecord:3.7.5:*:*:*:*:rails:*:*", }, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, @@ -540,14 +536,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{"name": "activerecord", "version": "3.7.5"}, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{Name: "activerecord", Version: "3.7.5"}, }, - Found: map[string]any{ - "versionConstraint": "< 3.7.6 (unknown)", - "vulnerabilityID": "GHSA-2014-fake-3", + Found: match.EcosystemResult{ + VulnerabilityID: "GHSA-2014-fake-3", + VersionConstraint: "< 3.7.6 (unknown)", }, Matcher: "ruby-gem-matcher", Confidence: 1, @@ -561,7 +557,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "normalize by cve", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers( matcher.Config{ Ruby: ruby.MatcherConfig{ @@ -602,14 +597,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{"name": "activerecord", "version": "3.7.5"}, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{Name: "activerecord", Version: "3.7.5"}, }, - Found: map[string]any{ - "versionConstraint": "< 3.7.6 (unknown)", - "vulnerabilityID": "GHSA-2014-fake-3", + Found: match.EcosystemResult{ + VulnerabilityID: "GHSA-2014-fake-3", + VersionConstraint: "< 3.7.6 (unknown)", }, Matcher: "ruby-gem-matcher", Confidence: 1, @@ -621,7 +616,7 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { CPEs: []string{ "cpe:2.3:*:activerecord:activerecord:3.7.5:*:*:*:*:rails:*:*", }, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, @@ -645,7 +640,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "normalize by cve -- ignore GHSA", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers( matcher.Config{ Ruby: ruby.MatcherConfig{ @@ -690,7 +684,7 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { CPEs: []string{ "cpe:2.3:*:activerecord:activerecord:3.7.5:*:*:*:*:rails:*:*", }, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, @@ -733,14 +727,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{"name": "activerecord", "version": "3.7.5"}, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{Name: "activerecord", Version: "3.7.5"}, }, - Found: map[string]any{ - "versionConstraint": "< 3.7.6 (unknown)", - "vulnerabilityID": "GHSA-2014-fake-3", + Found: match.EcosystemResult{ + VulnerabilityID: "GHSA-2014-fake-3", + VersionConstraint: "< 3.7.6 (unknown)", }, Matcher: "ruby-gem-matcher", Confidence: 1, @@ -758,7 +752,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "normalize by cve -- ignore CVE", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers( matcher.Config{ Ruby: ruby.MatcherConfig{ @@ -806,7 +799,7 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { CPEs: []string{ "cpe:2.3:*:activerecord:activerecord:3.7.5:*:*:*:*:rails:*:*", }, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, @@ -857,14 +850,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{"name": "activerecord", "version": "3.7.5"}, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{Name: "activerecord", Version: "3.7.5"}, }, - Found: map[string]any{ - "versionConstraint": "< 3.7.6 (unknown)", - "vulnerabilityID": "GHSA-2014-fake-3", + Found: match.EcosystemResult{ + VulnerabilityID: "GHSA-2014-fake-3", + VersionConstraint: "< 3.7.6 (unknown)", }, Matcher: "ruby-gem-matcher", Confidence: 1, @@ -878,7 +871,6 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { { name: "ignore CVE (not normalized by CVE)", fields: fields{ - //Store: str, Matchers: matcher.NewDefaultMatchers(matcher.Config{ Ruby: ruby.MatcherConfig{ UseCPEs: true, @@ -918,14 +910,14 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { Details: match.Details{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]any{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{"name": "activerecord", "version": "3.7.5"}, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{Name: "activerecord", Version: "3.7.5"}, }, - Found: map[string]any{ - "versionConstraint": "< 3.7.6 (unknown)", - "vulnerabilityID": "GHSA-2014-fake-3", + Found: match.EcosystemResult{ + VulnerabilityID: "GHSA-2014-fake-3", + VersionConstraint: "< 3.7.6 (unknown)", }, Matcher: "ruby-gem-matcher", Confidence: 1, @@ -963,7 +955,7 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { CPEs: []string{ "cpe:2.3:*:activerecord:activerecord:3.7.5:*:*:*:*:rails:*:*", }, - Package: match.CPEPackageParameter{ + Package: match.PackageParameter{ Name: "activerecord", Version: "3.7.5", }, @@ -985,6 +977,7 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { wantErr: nil, }, } + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { m := &VulnerabilityMatcher{ @@ -1032,6 +1025,61 @@ func TestVulnerabilityMatcher_FindMatches(t *testing.T) { } } +func Test_fatalErrors(t *testing.T) { + tests := []struct { + name string + matcherFunc matcherMock.MatchFunc + assertErr assert.ErrorAssertionFunc + }{ + { + name: "no error", + matcherFunc: func(_ vulnerability.Provider, _ pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + return nil, nil, nil + }, + assertErr: assert.NoError, + }, + { + name: "non-fatal error", + matcherFunc: func(_ vulnerability.Provider, _ pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + return nil, nil, errors.New("some error") + }, + assertErr: assert.NoError, + }, + { + name: "fatal error", + matcherFunc: func(_ vulnerability.Provider, _ pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + return nil, nil, match.NewFatalError(match.UnknownMatcherType, errors.New("some error")) + }, + assertErr: assert.Error, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + m := &VulnerabilityMatcher{ + Matchers: []match.Matcher{matcherMock.New(syftPkg.JavaPkg, tt.matcherFunc)}, + } + + _, _, err := m.FindMatches([]pkg.Package{ + { + Name: "foo", + Version: "1.2.3", + Type: syftPkg.JavaPkg, + }, + }, + pkg.Context{ + Distro: &distro.Distro{ + Type: "debian", + Version: "8", + }, + }, + ) + + tt.assertErr(t, err) + }) + } +} + func Test_indexFalsePositivesByLocation(t *testing.T) { cases := []struct { name string @@ -1175,7 +1223,7 @@ func Test_indexFalsePositivesByLocation(t *testing.T) { apkMatcher := &apk.Matcher{} var allMatches []match.Match - var allIgnores []match.IgnoredMatch + var allIgnores []match.IgnoreFilter for _, p := range tt.pkgs { matches, ignores, err := apkMatcher.Match(vp, p) require.NoError(t, err) @@ -1185,13 +1233,14 @@ func Test_indexFalsePositivesByLocation(t *testing.T) { actualResult := map[string][]string{} for _, ignore := range allIgnores { - apkMetadata, ok := ignore.Package.Metadata.(pkg.ApkMetadata) - require.True(t, ok) - for _, f := range apkMetadata.Files { - for _, r := range ignore.AppliedIgnoreRules { - actualResult[f.Path] = append(actualResult[f.Path], r.Vulnerability) - } + rule, ok := ignore.(match.IgnoreRule) + if !ok { + continue + } + if rule.Package.Location == "" { + continue } + actualResult[rule.Package.Location] = append(actualResult[rule.Package.Location], rule.Vulnerability) } assert.Equal(t, tt.expectedResult, actualResult) }) @@ -1303,25 +1352,14 @@ func Test_filterMatchesUsingDistroFalsePositives(t *testing.T) { for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { - var allIgnores []match.IgnoredMatch + var allIgnores []match.IgnoreFilter for path, cves := range tt.fpIndex { for _, cve := range cves { - allIgnores = append(allIgnores, match.IgnoredMatch{ - Match: match.Match{ - Package: pkg.Package{ - Metadata: pkg.ApkMetadata{ - Files: []pkg.ApkFileRecord{ - { - Path: path, - }, - }, - }, - }, - }, - AppliedIgnoreRules: []match.IgnoreRule{ - { - Vulnerability: cve, - }, + allIgnores = append(allIgnores, match.IgnoreRule{ + Vulnerability: cve, + IncludeAliases: true, + Package: match.IgnoreRulePackage{ + Location: path, }, }) } @@ -1336,6 +1374,105 @@ func Test_filterMatchesUsingDistroFalsePositives(t *testing.T) { } } +func Test_ignoredMatchFilter(t *testing.T) { + matches := []match.Match{ + { + Vulnerability: vulnerability.Vulnerability{ + Reference: vulnerability.Reference{ + ID: "CVE-123", + }, + }, + Package: pkg.Package{ + Locations: file.NewLocationSet(file.NewLocation("/usr/bin/thing")), + }, + }, + { + Vulnerability: vulnerability.Vulnerability{ + Reference: vulnerability.Reference{ + ID: "CVE-456", + }, + }, + }, + { + Vulnerability: vulnerability.Vulnerability{ + Reference: vulnerability.Reference{ + ID: "CVE-789", + }, + Status: "filter-me", + }, + }, + } + + ignores := []match.IgnoreFilter{ + match.IgnoreRule{ + Reason: "test-location-ignore-rule", + Package: match.IgnoreRulePackage{ + Location: "/usr/bin/thing", + }, + }, + testIgnoreFilter{ + f: func(m match.Match) bool { + return m.Vulnerability.Status == "filter-me" + }, + }, + } + + f := ignoredMatchFilter(ignores) + + var ignoredReasons []string + for _, m := range matches { + got := f.IgnoreMatch(m) + for _, r := range got { + ignoredReasons = append(ignoredReasons, r.Reason) + } + } + + require.ElementsMatch(t, []string{"test-location-ignore-rule", "test-filtered"}, ignoredReasons) +} + +type testIgnoreFilter struct { + f func(match.Match) bool +} + +func (t testIgnoreFilter) IgnoreMatch(m match.Match) []match.IgnoreRule { + if t.f(m) { + return []match.IgnoreRule{ + { + Reason: "test-filtered", + }, + } + } + return nil +} + +type panicyMatcher struct { + matcherType match.MatcherType +} + +func (m *panicyMatcher) PackageTypes() []syftPkg.Type { + return nil +} + +func (m *panicyMatcher) Type() match.MatcherType { + return m.matcherType +} + +func (m *panicyMatcher) Match(_ vulnerability.Provider, _ pkg.Package) ([]match.Match, []match.IgnoreFilter, error) { + panic("test panic message") +} + +func TestCallMatcherSafely_RecoverFromPanic(t *testing.T) { + matcher := &panicyMatcher{ + matcherType: "test-matcher", + } + _, _, err := callMatcherSafely(matcher, nil, pkg.Package{}) + + require.Error(t, err) + assert.True(t, match.IsFatalError(err)) + require.Contains(t, err.Error(), "test panic message", "missing message") + require.Contains(t, err.Error(), "test-matcher", "missing matcher name") +} + type busListener struct { matching monitor.Matching } diff --git a/internal/cvss/metrics.go b/internal/cvss/metrics.go new file mode 100644 index 00000000000..0a8fe77ba48 --- /dev/null +++ b/internal/cvss/metrics.go @@ -0,0 +1,93 @@ +package cvss + +import ( + "fmt" + "math" + "strings" + + gocvss20 "github.com/pandatix/go-cvss/20" + gocvss30 "github.com/pandatix/go-cvss/30" + gocvss31 "github.com/pandatix/go-cvss/31" + gocvss40 "github.com/pandatix/go-cvss/40" + + "github.com/anchore/grype/grype/vulnerability" +) + +func ParseMetricsFromVector(vector string) (*vulnerability.CvssMetrics, error) { + switch { + case strings.HasPrefix(vector, "CVSS:3.0"): + cvss, err := gocvss30.ParseVector(vector) + if err != nil { + return nil, fmt.Errorf("unable to parse CVSS v3 vector: %w", err) + } + ex := roundScore(cvss.Exploitability()) + im := roundScore(cvss.Impact()) + return &vulnerability.CvssMetrics{ + BaseScore: roundScore(cvss.BaseScore()), + ExploitabilityScore: &ex, + ImpactScore: &im, + }, nil + case strings.HasPrefix(vector, "CVSS:3.1"): + cvss, err := gocvss31.ParseVector(vector) + if err != nil { + return nil, fmt.Errorf("unable to parse CVSS v3.1 vector: %w", err) + } + ex := roundScore(cvss.Exploitability()) + im := roundScore(cvss.Impact()) + return &vulnerability.CvssMetrics{ + BaseScore: roundScore(cvss.BaseScore()), + ExploitabilityScore: &ex, + ImpactScore: &im, + }, nil + case strings.HasPrefix(vector, "CVSS:4.0"): + cvss, err := gocvss40.ParseVector(vector) + if err != nil { + return nil, fmt.Errorf("unable to parse CVSS v4.0 vector: %w", err) + } + // there are no exploitability and impact scores in CVSS v4.0 + return &vulnerability.CvssMetrics{ + BaseScore: roundScore(cvss.Score()), + }, nil + default: + // should be CVSS v2.0 or is invalid + cvss, err := gocvss20.ParseVector(vector) + if err != nil { + return nil, fmt.Errorf("unable to parse CVSS v2 vector: %w", err) + } + ex := roundScore(cvss.Exploitability()) + im := roundScore(cvss.Impact()) + return &vulnerability.CvssMetrics{ + BaseScore: roundScore(cvss.BaseScore()), + ExploitabilityScore: &ex, + ImpactScore: &im, + }, nil + } +} + +func SeverityFromBaseScore(bs float64) vulnerability.Severity { + switch { + case bs >= 10.0: + return vulnerability.UnknownSeverity + case bs >= 9.0: + return vulnerability.CriticalSeverity + case bs >= 7.0: + return vulnerability.HighSeverity + case bs >= 4.0: + return vulnerability.MediumSeverity + case bs >= 0.1: + return vulnerability.LowSeverity + case bs > 0: + return vulnerability.NegligibleSeverity + } + return vulnerability.UnknownSeverity +} + +// roundScore rounds the score to the nearest tenth based on first.org rounding rules +// see https://www.first.org/cvss/v3.1/specification-document#Appendix-A---Floating-Point-Rounding +func roundScore(score float64) float64 { + intInput := int(math.Round(score * 100000)) + if intInput%10000 == 0 { + return float64(intInput) / 100000.0 + } + return (math.Floor(float64(intInput)/10000.0) + 1) / 10.0 +} diff --git a/internal/cvss/metrics_test.go b/internal/cvss/metrics_test.go new file mode 100644 index 00000000000..83e4a19c1e1 --- /dev/null +++ b/internal/cvss/metrics_test.go @@ -0,0 +1,195 @@ +package cvss + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/anchore/grype/grype/vulnerability" +) + +func TestParseMetricsFromVector(t *testing.T) { + tests := []struct { + name string + vector string + expectedMetrics *vulnerability.CvssMetrics + wantErr require.ErrorAssertionFunc + }{ + { + name: "valid CVSS 2.0", + vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P", + expectedMetrics: &vulnerability.CvssMetrics{ + BaseScore: 7.5, + ExploitabilityScore: ptr(10.0), + ImpactScore: ptr(6.5), + }, + }, + { + name: "valid CVSS 3.0", + vector: "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + expectedMetrics: &vulnerability.CvssMetrics{ + BaseScore: 9.8, + ExploitabilityScore: ptr(3.9), + ImpactScore: ptr(5.9), + }, + }, + { + name: "valid CVSS 3.1", + vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + expectedMetrics: &vulnerability.CvssMetrics{ + BaseScore: 9.8, + ExploitabilityScore: ptr(3.9), + ImpactScore: ptr(5.9), + }, + }, + { + name: "valid CVSS 4.0", + vector: "CVSS:4.0/AV:N/AC:H/AT:P/PR:L/UI:N/VC:N/VI:H/VA:L/SC:L/SI:H/SA:L/MAC:L/MAT:P/MPR:N/S:N/R:A/RE:L/U:Clear", + expectedMetrics: &vulnerability.CvssMetrics{ + BaseScore: 9.1, + }, + }, + { + name: "invalid CVSS 2.0", + vector: "AV:N/AC:INVALID", + wantErr: require.Error, + }, + { + name: "invalid CVSS 3.0", + vector: "CVSS:3.0/AV:INVALID", + wantErr: require.Error, + }, + { + name: "invalid CVSS 3.1", + vector: "CVSS:3.1/AV:INVALID", + wantErr: require.Error, + }, + { + name: "invalid CVSS 4.0", + vector: "CVSS:4.0/AV:INVALID", + wantErr: require.Error, + }, + { + name: "empty vector", + vector: "", + wantErr: require.Error, + }, + { + name: "malformed vector", + vector: "INVALID:VECTOR", + wantErr: require.Error, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.wantErr == nil { + tt.wantErr = require.NoError + } + result, err := ParseMetricsFromVector(tt.vector) + tt.wantErr(t, err) + if err != nil { + assert.Nil(t, result) + return + } + + require.NotNil(t, result) + assert.Equal(t, tt.expectedMetrics.BaseScore, result.BaseScore, "given vector: %s", tt.vector) + + if tt.expectedMetrics.ExploitabilityScore != nil { + require.NotNil(t, result.ExploitabilityScore) + assert.Equal(t, *tt.expectedMetrics.ExploitabilityScore, *result.ExploitabilityScore, "given vector: %s", tt.vector) + } + + if tt.expectedMetrics.ImpactScore != nil { + require.NotNil(t, result.ImpactScore) + assert.Equal(t, *tt.expectedMetrics.ImpactScore, *result.ImpactScore, "given vector: %s", tt.vector) + } + }) + } +} + +func TestSeverityFromBaseScore(t *testing.T) { + tests := []struct { + name string + score float64 + expected vulnerability.Severity + }{ + { + name: "unknown severity (exactly 10.0)", + score: 10.0, + expected: vulnerability.UnknownSeverity, + }, + { + name: "unknown severity (greater than 10.0)", + score: 10.1, + expected: vulnerability.UnknownSeverity, + }, + { + name: "critical severity (lower bound)", + score: 9.0, + expected: vulnerability.CriticalSeverity, + }, + { + name: "critical severity (upper bound)", + score: 9.9, + expected: vulnerability.CriticalSeverity, + }, + { + name: "high severity (lower bound)", + score: 7.0, + expected: vulnerability.HighSeverity, + }, + { + name: "high severity (upper bound)", + score: 8.9, + expected: vulnerability.HighSeverity, + }, + { + name: "medium severity (lower bound)", + score: 4.0, + expected: vulnerability.MediumSeverity, + }, + { + name: "medium severity (upper bound)", + score: 6.9, + expected: vulnerability.MediumSeverity, + }, + { + name: "low severity (lower bound)", + score: 0.1, + expected: vulnerability.LowSeverity, + }, + { + name: "low severity (upper bound)", + score: 3.9, + expected: vulnerability.LowSeverity, + }, + { + name: "negligible severity (between 0 and 0.1)", + score: 0.05, + expected: vulnerability.NegligibleSeverity, + }, + { + name: "unknown severity (exactly zero)", + score: 0.0, + expected: vulnerability.UnknownSeverity, + }, + { + name: "unknown severity (negative)", + score: -1.0, + expected: vulnerability.UnknownSeverity, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, SeverityFromBaseScore(tt.score)) + }) + } +} + +func ptr(f float64) *float64 { + return &f +} diff --git a/internal/file/copy.go b/internal/file/copy.go index 4508044420d..53fcb6547c8 100644 --- a/internal/file/copy.go +++ b/internal/file/copy.go @@ -11,7 +11,7 @@ import ( func CopyDir(fs afero.Fs, src string, dst string) error { var err error - var fds []os.DirEntry + var fds []os.FileInfo // <-- afero.ReadDir returns []os.FileInfo var srcinfo os.FileInfo if srcinfo, err = fs.Stat(src); err != nil { @@ -22,7 +22,7 @@ func CopyDir(fs afero.Fs, src string, dst string) error { return err } - if fds, err = os.ReadDir(src); err != nil { + if fds, err = afero.ReadDir(fs, src); err != nil { return err } for _, fd := range fds { diff --git a/internal/format/writer.go b/internal/format/writer.go index a55351a77e4..03e65652ff4 100644 --- a/internal/format/writer.go +++ b/internal/format/writer.go @@ -9,8 +9,8 @@ import ( "strings" "github.com/hashicorp/go-multierror" - "github.com/mitchellh/go-homedir" + "github.com/anchore/go-homedir" "github.com/anchore/grype/grype/presenter/models" "github.com/anchore/grype/internal/bus" "github.com/anchore/grype/internal/log" diff --git a/internal/schemaver/schema_ver.go b/internal/schemaver/schema_ver.go index 83423f3ef3c..16957e0ef60 100644 --- a/internal/schemaver/schema_ver.go +++ b/internal/schemaver/schema_ver.go @@ -83,6 +83,26 @@ func (s SchemaVer) LessThan(other SchemaVer) bool { return s.Addition < other.Addition } +func (s SchemaVer) LessThanOrEqualTo(other SchemaVer) bool { + return s.LessThan(other) || s.Equal(other) +} + +func (s SchemaVer) Equal(other SchemaVer) bool { + return s.Model == other.Model && s.Revision == other.Revision && s.Addition == other.Addition +} + +func (s SchemaVer) GreaterThan(other SchemaVer) bool { + if s.Model != other.Model { + return s.Model > other.Model + } + + if s.Revision != other.Revision { + return s.Revision > other.Revision + } + + return s.Addition > other.Addition +} + func (s SchemaVer) GreaterOrEqualTo(other SchemaVer) bool { - return !s.LessThan(other) + return s.GreaterThan(other) || s.Equal(other) } diff --git a/internal/schemaver/schema_ver_test.go b/internal/schemaver/schema_ver_test.go index 04d5bf84d6d..37de3ea53cd 100644 --- a/internal/schemaver/schema_ver_test.go +++ b/internal/schemaver/schema_ver_test.go @@ -1,78 +1,263 @@ package schemaver import ( + "encoding/json" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestSchemaVerComparisons(t *testing.T) { +func TestSchemaVer_LessThan(t *testing.T) { tests := []struct { - name string - v1 SchemaVer - v2 SchemaVer - lessThan bool - greaterOrEqual bool + name string + v1 SchemaVer + v2 SchemaVer + want bool }{ { - name: "equal versions", - v1: New(1, 0, 0), - v2: New(1, 0, 0), - lessThan: false, - greaterOrEqual: true, + name: "equal versions", + v1: New(1, 0, 0), + v2: New(1, 0, 0), + want: false, }, { - name: "different model versions", - v1: New(1, 0, 0), - v2: New(2, 0, 0), - lessThan: true, - greaterOrEqual: false, + name: "different model versions", + v1: New(1, 0, 0), + v2: New(2, 0, 0), + want: true, }, { - name: "different revision versions", - v1: New(1, 1, 0), - v2: New(1, 2, 0), - lessThan: true, - greaterOrEqual: false, + name: "different revision versions", + v1: New(1, 1, 0), + v2: New(1, 2, 0), + want: true, }, { - name: "different addition versions", - v1: New(1, 0, 1), - v2: New(1, 0, 2), - lessThan: true, - greaterOrEqual: false, + name: "different addition versions", + v1: New(1, 0, 1), + v2: New(1, 0, 2), + want: true, }, { - name: "inverted addition versions", - v1: New(1, 0, 2), - v2: New(1, 0, 1), - lessThan: false, - greaterOrEqual: true, + name: "inverted addition versions", + v1: New(1, 0, 2), + v2: New(1, 0, 1), + want: false, }, { - name: "greater model overrides lower revision", - v1: New(2, 0, 0), - v2: New(1, 9, 9), - lessThan: false, - greaterOrEqual: true, + name: "greater model overrides lower revision", + v1: New(2, 0, 0), + v2: New(1, 9, 9), + want: false, }, { - name: "greater revision overrides lower addition", - v1: New(1, 2, 0), - v2: New(1, 1, 9), - lessThan: false, - greaterOrEqual: true, + name: "greater revision overrides lower addition", + v1: New(1, 2, 0), + v2: New(1, 1, 9), + want: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if got := tt.v1.LessThan(tt.v2); got != tt.lessThan { - t.Errorf("LessThan() = %v, want %v", got, tt.lessThan) - } - if got := tt.v1.GreaterOrEqualTo(tt.v2); got != tt.greaterOrEqual { - t.Errorf("GreaterOrEqualTo() = %v, want %v", got, tt.greaterOrEqual) - } + assert.Equal(t, tt.want, tt.v1.LessThan(tt.v2)) + }) + } +} + +func TestSchemaVer_GreaterOrEqualTo(t *testing.T) { + tests := []struct { + name string + v1 SchemaVer + v2 SchemaVer + want bool + }{ + { + name: "equal versions", + v1: New(1, 0, 0), + v2: New(1, 0, 0), + want: true, + }, + { + name: "different model versions", + v1: New(1, 0, 0), + v2: New(2, 0, 0), + want: false, + }, + { + name: "different revision versions", + v1: New(1, 1, 0), + v2: New(1, 2, 0), + want: false, + }, + { + name: "different addition versions", + v1: New(1, 0, 1), + v2: New(1, 0, 2), + want: false, + }, + { + name: "inverted addition versions", + v1: New(1, 0, 2), + v2: New(1, 0, 1), + want: true, + }, + { + name: "greater model overrides lower revision", + v1: New(2, 0, 0), + v2: New(1, 9, 9), + want: true, + }, + { + name: "greater revision overrides lower addition", + v1: New(1, 2, 0), + v2: New(1, 1, 9), + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.v1.GreaterOrEqualTo(tt.v2)) + }) + } +} + +func TestSchemaVer_LessThanOrEqualTo(t *testing.T) { + tests := []struct { + name string + v1 SchemaVer + v2 SchemaVer + want bool + }{ + { + name: "equal versions", + v1: New(1, 2, 3), + v2: New(1, 2, 3), + want: true, + }, + { + name: "less than version", + v1: New(1, 2, 3), + v2: New(1, 2, 4), + want: true, + }, + { + name: "greater than version", + v1: New(1, 2, 4), + v2: New(1, 2, 3), + want: false, + }, + { + name: "different model - less", + v1: New(1, 9, 9), + v2: New(2, 0, 0), + want: true, + }, + { + name: "different model - greater", + v1: New(2, 0, 0), + v2: New(1, 9, 9), + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.v1.LessThanOrEqualTo(tt.v2)) + }) + } +} + +func TestSchemaVer_Equal(t *testing.T) { + tests := []struct { + name string + v1 SchemaVer + v2 SchemaVer + want bool + }{ + { + name: "equal versions", + v1: New(1, 2, 3), + v2: New(1, 2, 3), + want: true, + }, + { + name: "different addition", + v1: New(1, 2, 3), + v2: New(1, 2, 4), + want: false, + }, + { + name: "different revision", + v1: New(1, 2, 3), + v2: New(1, 3, 3), + want: false, + }, + { + name: "different model", + v1: New(1, 2, 3), + v2: New(2, 2, 3), + want: false, + }, + { + name: "zero values equal", + v1: New(1, 0, 0), + v2: New(1, 0, 0), + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.v1.Equal(tt.v2)) + }) + } +} + +func TestSchemaVer_GreaterThan(t *testing.T) { + tests := []struct { + name string + v1 SchemaVer + v2 SchemaVer + want bool + }{ + { + name: "equal versions", + v1: New(1, 2, 3), + v2: New(1, 2, 3), + want: false, + }, + { + name: "greater addition", + v1: New(1, 2, 4), + v2: New(1, 2, 3), + want: true, + }, + { + name: "greater revision", + v1: New(1, 3, 0), + v2: New(1, 2, 9), + want: true, + }, + { + name: "greater model", + v1: New(2, 0, 0), + v2: New(1, 9, 9), + want: true, + }, + { + name: "less than", + v1: New(1, 2, 3), + v2: New(1, 2, 4), + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.v1.GreaterThan(tt.v2)) }) } } @@ -90,6 +275,18 @@ func TestParse(t *testing.T) { want: New(1, 2, 3), wantErr: false, }, + { + name: "valid version with v prefix", + input: "v1.2.3", + want: New(1, 2, 3), + wantErr: false, + }, + { + name: "valid version with v prefix and zeros", + input: "v1.0.0", + want: New(1, 0, 0), + wantErr: false, + }, { name: "valid large numbers", input: "999.888.777", @@ -108,6 +305,12 @@ func TestParse(t *testing.T) { want: New(0, 0, 0), wantErr: true, }, + { + name: "invalid version with v prefix and zero model", + input: "v0.0.0", + want: New(0, 0, 0), + wantErr: true, + }, { name: "invalid empty string", input: "", @@ -243,3 +446,165 @@ func TestSchemaVer_Valid(t *testing.T) { }) } } + +func TestSchemaVer_String(t *testing.T) { + tests := []struct { + name string + schema SchemaVer + want string + }{ + { + name: "basic version", + schema: New(1, 2, 3), + want: "v1.2.3", + }, + { + name: "version with zeros", + schema: New(1, 0, 0), + want: "v1.0.0", + }, + { + name: "large numbers", + schema: New(999, 888, 777), + want: "v999.888.777", + }, + { + name: "single digits", + schema: New(5, 4, 3), + want: "v5.4.3", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.schema.String()) + }) + } +} + +func TestSchemaVer_MarshalJSON(t *testing.T) { + tests := []struct { + name string + schema SchemaVer + want string + }{ + { + name: "basic version", + schema: New(1, 2, 3), + want: `"v1.2.3"`, + }, + { + name: "version with zeros", + schema: New(1, 0, 0), + want: `"v1.0.0"`, + }, + { + name: "large numbers", + schema: New(999, 888, 777), + want: `"v999.888.777"`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.schema.MarshalJSON() + require.NoError(t, err) + assert.Equal(t, tt.want, string(got)) + }) + } +} + +func TestSchemaVer_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + input string + want SchemaVer + wantErr require.ErrorAssertionFunc + }{ + { + name: "valid version", + input: `"v1.2.3"`, + want: New(1, 2, 3), + wantErr: require.NoError, + }, + { + name: "valid version without v prefix", + input: `"1.2.3"`, + want: New(1, 2, 3), + wantErr: require.NoError, + }, + { + name: "valid version with zeros", + input: `"v1.0.0"`, + want: New(1, 0, 0), + wantErr: require.NoError, + }, + { + name: "invalid JSON format", + input: `{"version": "v1.2.3"}`, + wantErr: require.Error, + }, + { + name: "invalid version format", + input: `"invalid"`, + wantErr: require.Error, + }, + { + name: "invalid zero model", + input: `"v0.1.2"`, + wantErr: require.Error, + }, + { + name: "malformed JSON", + input: `"v1.2.3`, + wantErr: require.Error, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got SchemaVer + err := json.Unmarshal([]byte(tt.input), &got) + tt.wantErr(t, err) + if err == nil { + assert.Equal(t, tt.want, got) + } + }) + } +} + +func TestSchemaVer_JSONRoundTrip(t *testing.T) { + tests := []struct { + name string + schema SchemaVer + }{ + { + name: "basic version", + schema: New(1, 2, 3), + }, + { + name: "version with zeros", + schema: New(1, 0, 0), + }, + { + name: "large numbers", + schema: New(999, 888, 777), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // marshal + data, err := json.Marshal(tt.schema) + require.NoError(t, err) + + // unmarshal + var got SchemaVer + err = json.Unmarshal(data, &got) + require.NoError(t, err) + + // should be equal + assert.Equal(t, tt.schema, got) + }) + } +} diff --git a/schema/cyclonedx/.gitignore b/schema/cyclonedx/.gitignore deleted file mode 100644 index 472d439fa14..00000000000 --- a/schema/cyclonedx/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -bom.xml -bom.json diff --git a/schema/cyclonedx/Makefile b/schema/cyclonedx/Makefile deleted file mode 100644 index 028ea982c1d..00000000000 --- a/schema/cyclonedx/Makefile +++ /dev/null @@ -1,14 +0,0 @@ -.DEFAULT_GOAL := validate-schema - -.PHONY: validate-schema -validate-schema: validate-schema-xml validate-schema-json - -.PHONY: validate-schema-xml -validate-schema-xml: - go run ../../cmd/grype -c ../../test/grype-test-config.yaml ubuntu:latest -v -o cyclonedx-xml > bom.xml - xmllint --noout --schema ./cyclonedx.xsd bom.xml - -.PHONY: validate-schema-json -validate-schema-json: - go run ../../cmd/grype -c ../../test/grype-test-config.yaml ubuntu:latest -v -o cyclonedx-json > bom.json - ../../.tool/yajsv -s cyclonedx.json bom.json diff --git a/schema/cyclonedx/README.md b/schema/cyclonedx/README.md deleted file mode 100644 index db17f8414e6..00000000000 --- a/schema/cyclonedx/README.md +++ /dev/null @@ -1,20 +0,0 @@ -# CycloneDX Schemas - -`grype` generates a CycloneDX output. This validation is similar to what is done in `syft`, validating output against CycloneDX schemas. - -Validation is done with `xmllint`, which requires a copy of all schemas because it can't work with HTTP references. The schemas are modified to reference local copies of dependent schemas. - -## Updating - -You will need to go to https://github.com/CycloneDX/specification/blob/1.5/schema and download the latest `bom-#.#.xsd` and `spdx.xsd`. - -Additionally, for `xmllint` to function you will need to patch the bom schema with the location to the SPDX schema by changing: - -```xml - -``` - -To: -```xml - -``` diff --git a/schema/cyclonedx/cyclonedx.json b/schema/cyclonedx/cyclonedx.json deleted file mode 100644 index cffc2b99136..00000000000 --- a/schema/cyclonedx/cyclonedx.json +++ /dev/null @@ -1,3800 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://cyclonedx.org/schema/bom-1.6.schema.json", - "type": "object", - "title": "CycloneDX Software Bill of Materials Standard", - "$comment" : "CycloneDX JSON schema is published under the terms of the Apache License 2.0.", - "required": [ - "bomFormat", - "specVersion", - "version" - ], - "additionalProperties": false, - "properties": { - "$schema": { - "type": "string", - "enum": [ - "http://cyclonedx.org/schema/bom-1.6.schema.json" - ] - }, - "bomFormat": { - "type": "string", - "title": "BOM Format", - "description": "Specifies the format of the BOM. This helps to identify the file as CycloneDX since BOMs do not have a filename convention nor does JSON schema support namespaces. This value MUST be \"CycloneDX\".", - "enum": [ - "CycloneDX" - ] - }, - "specVersion": { - "type": "string", - "title": "CycloneDX Specification Version", - "description": "The version of the CycloneDX specification a BOM conforms to (starting at version 1.2).", - "examples": ["1.5"] - }, - "serialNumber": { - "type": "string", - "title": "BOM Serial Number", - "description": "Every BOM generated SHOULD have a unique serial number, even if the contents of the BOM have not changed over time. If specified, the serial number MUST conform to RFC-4122. Use of serial numbers are RECOMMENDED.", - "examples": ["urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79"], - "pattern": "^urn:uuid:[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" - }, - "version": { - "type": "integer", - "title": "BOM Version", - "description": "Whenever an existing BOM is modified, either manually or through automated processes, the version of the BOM SHOULD be incremented by 1. When a system is presented with multiple BOMs with identical serial numbers, the system SHOULD use the most recent version of the BOM. The default version is '1'.", - "minimum": 1, - "default": 1, - "examples": [1] - }, - "metadata": { - "$ref": "#/definitions/metadata", - "title": "BOM Metadata", - "description": "Provides additional information about a BOM." - }, - "components": { - "type": "array", - "items": {"$ref": "#/definitions/component"}, - "uniqueItems": true, - "title": "Components", - "description": "A list of software and hardware components." - }, - "services": { - "type": "array", - "items": {"$ref": "#/definitions/service"}, - "uniqueItems": true, - "title": "Services", - "description": "A list of services. This may include microservices, function-as-a-service, and other types of network or intra-process services." - }, - "externalReferences": { - "type": "array", - "items": {"$ref": "#/definitions/externalReference"}, - "title": "External References", - "description": "External references provide a way to document systems, sites, and information that may be relevant, but are not included with the BOM. They may also establish specific relationships within or external to the BOM." - }, - "dependencies": { - "type": "array", - "items": {"$ref": "#/definitions/dependency"}, - "uniqueItems": true, - "title": "Dependencies", - "description": "Provides the ability to document dependency relationships." - }, - "compositions": { - "type": "array", - "items": {"$ref": "#/definitions/compositions"}, - "uniqueItems": true, - "title": "Compositions", - "description": "Compositions describe constituent parts (including components, services, and dependency relationships) and their completeness. The completeness of vulnerabilities expressed in a BOM may also be described." - }, - "vulnerabilities": { - "type": "array", - "items": {"$ref": "#/definitions/vulnerability"}, - "uniqueItems": true, - "title": "Vulnerabilities", - "description": "Vulnerabilities identified in components or services." - }, - "annotations": { - "type": "array", - "items": {"$ref": "#/definitions/annotations"}, - "uniqueItems": true, - "title": "Annotations", - "description": "Comments made by people, organizations, or tools about any object with a bom-ref, such as components, services, vulnerabilities, or the BOM itself. Unlike inventory information, annotations may contain opinion or commentary from various stakeholders. Annotations may be inline (with inventory) or externalized via BOM-Link, and may optionally be signed." - }, - "formulation": { - "type": "array", - "items": {"$ref": "#/definitions/formula"}, - "uniqueItems": true, - "title": "Formulation", - "description": "Describes how a component or service was manufactured or deployed. This is achieved through the use of formulas, workflows, tasks, and steps, which declare the precise steps to reproduce along with the observed formulas describing the steps which transpired in the manufacturing process." - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": { - "$ref": "#/definitions/property" - } - }, - "signature": { - "$ref": "#/definitions/signature", - "title": "Signature", - "description": "Enveloped signature in [JSON Signature Format (JSF)](https://cyberphone.github.io/doc/security/jsf.html)." - } - }, - "definitions": { - "refType": { - "description": "Identifier for referable and therefore interlink-able elements.", - "type": "string", - "minLength": 1, - "$comment": "value SHOULD not start with the BOM-Link intro 'urn:cdx:'" - }, - "refLinkType": { - "description": "Descriptor for an element identified by the attribute 'bom-ref' in the same BOM document.\nIn contrast to `bomLinkElementType`.", - "allOf": [{"$ref": "#/definitions/refType"}] - }, - "bomLinkDocumentType": { - "title": "BOM-Link Document", - "description": "Descriptor for another BOM document. See https://cyclonedx.org/capabilities/bomlink/", - "type": "string", - "format": "iri-reference", - "pattern": "^urn:cdx:[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/[1-9][0-9]*$", - "$comment": "part of the pattern is based on `bom.serialNumber`'s pattern" - }, - "bomLinkElementType": { - "title": "BOM-Link Element", - "description": "Descriptor for an element in a BOM document. See https://cyclonedx.org/capabilities/bomlink/", - "type": "string", - "format": "iri-reference", - "pattern": "^urn:cdx:[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/[1-9][0-9]*#.+$", - "$comment": "part of the pattern is based on `bom.serialNumber`'s pattern" - }, - "bomLink": { - "anyOf": [ - { - "title": "BOM-Link Document", - "$ref": "#/definitions/bomLinkDocumentType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "metadata": { - "type": "object", - "title": "BOM Metadata Object", - "additionalProperties": false, - "properties": { - "timestamp": { - "type": "string", - "format": "date-time", - "title": "Timestamp", - "description": "The date and time (timestamp) when the BOM was created." - }, - "lifecycles": { - "type": "array", - "title": "Lifecycles", - "description": "", - "items": { - "type": "object", - "title": "Lifecycle", - "description": "The product lifecycle(s) that this BOM represents.", - "oneOf": [ - { - "required": ["phase"], - "additionalProperties": false, - "properties": { - "phase": { - "type": "string", - "title": "Phase", - "description": "A pre-defined phase in the product lifecycle.\n\n* __design__ = BOM produced early in the development lifecycle containing inventory of components and services that are proposed or planned to be used. The inventory may need to be procured, retrieved, or resourced prior to use.\n* __pre-build__ = BOM consisting of information obtained prior to a build process and may contain source files and development artifacts and manifests. The inventory may need to be resolved and retrieved prior to use.\n* __build__ = BOM consisting of information obtained during a build process where component inventory is available for use. The precise versions of resolved components are usually available at this time as well as the provenance of where the components were retrieved from.\n* __post-build__ = BOM consisting of information obtained after a build process has completed and the resulting components(s) are available for further analysis. Built components may exist as the result of a CI/CD process, may have been installed or deployed to a system or device, and may need to be retrieved or extracted from the system or device.\n* __operations__ = BOM produced that represents inventory that is running and operational. This may include staging or production environments and will generally encompass multiple SBOMs describing the applications and operating system, along with HBOMs describing the hardware that makes up the system. Operations Bill of Materials (OBOM) can provide full-stack inventory of runtime environments, configurations, and additional dependencies.\n* __discovery__ = BOM consisting of information observed through network discovery providing point-in-time enumeration of embedded, on-premise, and cloud-native services such as server applications, connected devices, microservices, and serverless functions.\n* __decommission__ = BOM containing inventory that will be, or has been retired from operations.", - "enum": [ - "design", - "pre-build", - "build", - "post-build", - "operations", - "discovery", - "decommission" - ] - } - } - }, - { - "required": ["name"], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "The name of the lifecycle phase" - }, - "description": { - "type": "string", - "title": "Description", - "description": "The description of the lifecycle phase" - } - } - } - ] - } - }, - "tools": { - "oneOf": [ - { - "type": "object", - "title": "Creation Tools", - "description": "The tool(s) used in the creation of the BOM.", - "additionalProperties": false, - "properties": { - "components": { - "type": "array", - "items": {"$ref": "#/definitions/component"}, - "uniqueItems": true, - "title": "Components", - "description": "A list of software and hardware components used as tools" - }, - "services": { - "type": "array", - "items": {"$ref": "#/definitions/service"}, - "uniqueItems": true, - "title": "Services", - "description": "A list of services used as tools. This may include microservices, function-as-a-service, and other types of network or intra-process services." - } - } - }, - { - "type": "array", - "title": "Creation Tools (legacy)", - "description": "[Deprecated] The tool(s) used in the creation of the BOM.", - "items": {"$ref": "#/definitions/tool"} - } - ] - }, - "authors" :{ - "type": "array", - "title": "Authors", - "description": "The person(s) who created the BOM. Authors are common in BOMs created through manual processes. BOMs created through automated means may not have authors.", - "items": {"$ref": "#/definitions/organizationalContact"} - }, - "component": { - "title": "Component", - "description": "The component that the BOM describes.", - "$ref": "#/definitions/component" - }, - "manufacture": { - "title": "Manufacture", - "description": "The organization that manufactured the component that the BOM describes.", - "$ref": "#/definitions/organizationalEntity" - }, - "supplier": { - "title": "Supplier", - "description": " The organization that supplied the component that the BOM describes. The supplier may often be the manufacturer, but may also be a distributor or repackager.", - "$ref": "#/definitions/organizationalEntity" - }, - "licenses": { - "title": "BOM License(s)", - "$ref": "#/definitions/licenseChoice" - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": {"$ref": "#/definitions/property"} - } - } - }, - "tool": { - "type": "object", - "title": "Tool", - "description": "[Deprecated] - DO NOT USE. This will be removed in a future version. This will be removed in a future version. Use component or service instead. Information about the automated or manual tool used", - "additionalProperties": false, - "properties": { - "vendor": { - "type": "string", - "title": "Tool Vendor", - "description": "The name of the vendor who created the tool" - }, - "name": { - "type": "string", - "title": "Tool Name", - "description": "The name of the tool" - }, - "version": { - "type": "string", - "title": "Tool Version", - "description": "The version of the tool" - }, - "hashes": { - "type": "array", - "items": {"$ref": "#/definitions/hash"}, - "title": "Hashes", - "description": "The hashes of the tool (if applicable)." - }, - "externalReferences": { - "type": "array", - "items": {"$ref": "#/definitions/externalReference"}, - "title": "External References", - "description": "External references provide a way to document systems, sites, and information that may be relevant, but are not included with the BOM. They may also establish specific relationships within or external to the BOM." - } - } - }, - "organizationalEntity": { - "type": "object", - "title": "Organizational Entity Object", - "description": "", - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the object elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "name": { - "type": "string", - "title": "Name", - "description": "The name of the organization", - "examples": [ - "Example Inc." - ] - }, - "url": { - "type": "array", - "items": { - "type": "string", - "format": "iri-reference" - }, - "title": "URL", - "description": "The URL of the organization. Multiple URLs are allowed.", - "examples": ["https://example.com"] - }, - "contact": { - "type": "array", - "title": "Contact", - "description": "A contact at the organization. Multiple contacts are allowed.", - "items": {"$ref": "#/definitions/organizationalContact"} - } - } - }, - "organizationalContact": { - "type": "object", - "title": "Organizational Contact Object", - "description": "", - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the object elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "name": { - "type": "string", - "title": "Name", - "description": "The name of a contact", - "examples": ["Contact name"] - }, - "email": { - "type": "string", - "format": "idn-email", - "title": "Email Address", - "description": "The email address of the contact.", - "examples": ["firstname.lastname@example.com"] - }, - "phone": { - "type": "string", - "title": "Phone", - "description": "The phone number of the contact.", - "examples": ["800-555-1212"] - } - } - }, - "component": { - "type": "object", - "title": "Component Object", - "required": [ - "type", - "name" - ], - "additionalProperties": false, - "properties": { - "type": { - "type": "string", - "enum": [ - "application", - "framework", - "library", - "container", - "platform", - "operating-system", - "device", - "device-driver", - "firmware", - "file", - "machine-learning-model", - "data" - ], - "title": "Component Type", - "description": "Specifies the type of component. For software components, classify as application if no more specific appropriate classification is available or cannot be determined for the component. Types include:\n\n* __application__ = A software application. Refer to [https://en.wikipedia.org/wiki/Application_software](https://en.wikipedia.org/wiki/Application_software) for information about applications.\n* __framework__ = A software framework. Refer to [https://en.wikipedia.org/wiki/Software_framework](https://en.wikipedia.org/wiki/Software_framework) for information on how frameworks vary slightly from libraries.\n* __library__ = A software library. Refer to [https://en.wikipedia.org/wiki/Library_(computing)](https://en.wikipedia.org/wiki/Library_(computing))\n for information about libraries. All third-party and open source reusable components will likely be a library. If the library also has key features of a framework, then it should be classified as a framework. If not, or is unknown, then specifying library is RECOMMENDED.\n* __container__ = A packaging and/or runtime format, not specific to any particular technology, which isolates software inside the container from software outside of a container through virtualization technology. Refer to [https://en.wikipedia.org/wiki/OS-level_virtualization](https://en.wikipedia.org/wiki/OS-level_virtualization)\n* __platform__ = A runtime environment which interprets or executes software. This may include runtimes such as those that execute bytecode or low-code/no-code application platforms.\n* __operating-system__ = A software operating system without regard to deployment model (i.e. installed on physical hardware, virtual machine, image, etc) Refer to [https://en.wikipedia.org/wiki/Operating_system](https://en.wikipedia.org/wiki/Operating_system)\n* __device__ = A hardware device such as a processor, or chip-set. A hardware device containing firmware SHOULD include a component for the physical hardware itself, and another component of type 'firmware' or 'operating-system' (whichever is relevant), describing information about the software running on the device.\n See also the list of [known device properties](https://github.com/CycloneDX/cyclonedx-property-taxonomy/blob/main/cdx/device.md).\n* __device-driver__ = A special type of software that operates or controls a particular type of device. Refer to [https://en.wikipedia.org/wiki/Device_driver](https://en.wikipedia.org/wiki/Device_driver)\n* __firmware__ = A special type of software that provides low-level control over a devices hardware. Refer to [https://en.wikipedia.org/wiki/Firmware](https://en.wikipedia.org/wiki/Firmware)\n* __file__ = A computer file. Refer to [https://en.wikipedia.org/wiki/Computer_file](https://en.wikipedia.org/wiki/Computer_file) for information about files.\n* __machine-learning-model__ = A model based on training data that can make predictions or decisions without being explicitly programmed to do so.\n* __data__ = A collection of discrete values that convey information.", - "examples": ["library"] - }, - "mime-type": { - "type": "string", - "title": "Mime-Type", - "description": "The optional mime-type of the component. When used on file components, the mime-type can provide additional context about the kind of file being represented such as an image, font, or executable. Some library or framework components may also have an associated mime-type.", - "examples": ["image/jpeg"], - "pattern": "^[-+a-z0-9.]+/[-+a-z0-9.]+$" - }, - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the component elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "supplier": { - "title": "Component Supplier", - "description": " The organization that supplied the component. The supplier may often be the manufacturer, but may also be a distributor or repackager.", - "$ref": "#/definitions/organizationalEntity" - }, - "author": { - "type": "string", - "title": "Component Author", - "description": "The person(s) or organization(s) that authored the component", - "examples": ["Acme Inc"] - }, - "publisher": { - "type": "string", - "title": "Component Publisher", - "description": "The person(s) or organization(s) that published the component", - "examples": ["Acme Inc"] - }, - "group": { - "type": "string", - "title": "Component Group", - "description": "The grouping name or identifier. This will often be a shortened, single name of the company or project that produced the component, or the source package or domain name. Whitespace and special characters should be avoided. Examples include: apache, org.apache.commons, and apache.org.", - "examples": ["com.acme"] - }, - "name": { - "type": "string", - "title": "Component Name", - "description": "The name of the component. This will often be a shortened, single name of the component. Examples: commons-lang3 and jquery", - "examples": ["tomcat-catalina"] - }, - "version": { - "type": "string", - "title": "Component Version", - "description": "The component version. The version should ideally comply with semantic versioning but is not enforced.", - "examples": ["9.0.14"] - }, - "description": { - "type": "string", - "title": "Component Description", - "description": "Specifies a description for the component" - }, - "scope": { - "type": "string", - "enum": [ - "required", - "optional", - "excluded" - ], - "title": "Component Scope", - "description": "Specifies the scope of the component. If scope is not specified, 'required' scope SHOULD be assumed by the consumer of the BOM.", - "default": "required" - }, - "hashes": { - "type": "array", - "title": "Component Hashes", - "items": {"$ref": "#/definitions/hash"} - }, - "licenses": { - "$ref": "#/definitions/licenseChoice", - "title": "Component License(s)" - }, - "copyright": { - "type": "string", - "title": "Component Copyright", - "description": "A copyright notice informing users of the underlying claims to copyright ownership in a published work.", - "examples": ["Acme Inc"] - }, - "cpe": { - "type": "string", - "title": "Component Common Platform Enumeration (CPE)", - "description": "Specifies a well-formed CPE name that conforms to the CPE 2.2 or 2.3 specification. See [https://nvd.nist.gov/products/cpe](https://nvd.nist.gov/products/cpe)", - "examples": ["cpe:2.3:a:acme:component_framework:-:*:*:*:*:*:*:*"] - }, - "purl": { - "type": "string", - "title": "Component Package URL (purl)", - "description": "Specifies the package-url (purl). The purl, if specified, MUST be valid and conform to the specification defined at: [https://github.com/package-url/purl-spec](https://github.com/package-url/purl-spec)", - "examples": ["pkg:maven/com.acme/tomcat-catalina@9.0.14?packaging=jar"] - }, - "swid": { - "$ref": "#/definitions/swid", - "title": "SWID Tag", - "description": "Specifies metadata and content for [ISO-IEC 19770-2 Software Identification (SWID) Tags](https://www.iso.org/standard/65666.html)." - }, - "modified": { - "type": "boolean", - "title": "Component Modified From Original", - "description": "[Deprecated] - DO NOT USE. This will be removed in a future version. Use the pedigree element instead to supply information on exactly how the component was modified. A boolean value indicating if the component has been modified from the original. A value of true indicates the component is a derivative of the original. A value of false indicates the component has not been modified from the original." - }, - "pedigree": { - "type": "object", - "title": "Component Pedigree", - "description": "Component pedigree is a way to document complex supply chain scenarios where components are created, distributed, modified, redistributed, combined with other components, etc. Pedigree supports viewing this complex chain from the beginning, the end, or anywhere in the middle. It also provides a way to document variants where the exact relation may not be known.", - "additionalProperties": false, - "properties": { - "ancestors": { - "type": "array", - "title": "Ancestors", - "description": "Describes zero or more components in which a component is derived from. This is commonly used to describe forks from existing projects where the forked version contains a ancestor node containing the original component it was forked from. For example, Component A is the original component. Component B is the component being used and documented in the BOM. However, Component B contains a pedigree node with a single ancestor documenting Component A - the original component from which Component B is derived from.", - "items": {"$ref": "#/definitions/component"} - }, - "descendants": { - "type": "array", - "title": "Descendants", - "description": "Descendants are the exact opposite of ancestors. This provides a way to document all forks (and their forks) of an original or root component.", - "items": {"$ref": "#/definitions/component"} - }, - "variants": { - "type": "array", - "title": "Variants", - "description": "Variants describe relations where the relationship between the components are not known. For example, if Component A contains nearly identical code to Component B. They are both related, but it is unclear if one is derived from the other, or if they share a common ancestor.", - "items": {"$ref": "#/definitions/component"} - }, - "commits": { - "type": "array", - "title": "Commits", - "description": "A list of zero or more commits which provide a trail describing how the component deviates from an ancestor, descendant, or variant.", - "items": {"$ref": "#/definitions/commit"} - }, - "patches": { - "type": "array", - "title": "Patches", - "description": ">A list of zero or more patches describing how the component deviates from an ancestor, descendant, or variant. Patches may be complimentary to commits or may be used in place of commits.", - "items": {"$ref": "#/definitions/patch"} - }, - "notes": { - "type": "string", - "title": "Notes", - "description": "Notes, observations, and other non-structured commentary describing the components pedigree." - } - } - }, - "externalReferences": { - "type": "array", - "items": {"$ref": "#/definitions/externalReference"}, - "title": "External References", - "description": "External references provide a way to document systems, sites, and information that may be relevant, but are not included with the BOM. They may also establish specific relationships within or external to the BOM." - }, - "components": { - "type": "array", - "items": {"$ref": "#/definitions/component"}, - "uniqueItems": true, - "title": "Components", - "description": "A list of software and hardware components included in the parent component. This is not a dependency tree. It provides a way to specify a hierarchical representation of component assemblies, similar to system → subsystem → parts assembly in physical supply chains." - }, - "evidence": { - "$ref": "#/definitions/componentEvidence", - "title": "Evidence", - "description": "Provides the ability to document evidence collected through various forms of extraction or analysis." - }, - "releaseNotes": { - "$ref": "#/definitions/releaseNotes", - "title": "Release notes", - "description": "Specifies optional release notes." - }, - "modelCard": { - "$ref": "#/definitions/modelCard", - "title": "Machine Learning Model Card" - }, - "data": { - "type": "array", - "items": {"$ref": "#/definitions/componentData"}, - "title": "Data", - "description": "This object SHOULD be specified for any component of type `data` and MUST NOT be specified for other component types." - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": {"$ref": "#/definitions/property"} - }, - "signature": { - "$ref": "#/definitions/signature", - "title": "Signature", - "description": "Enveloped signature in [JSON Signature Format (JSF)](https://cyberphone.github.io/doc/security/jsf.html)." - } - } - }, - "swid": { - "type": "object", - "title": "SWID Tag", - "description": "Specifies metadata and content for ISO-IEC 19770-2 Software Identification (SWID) Tags.", - "required": [ - "tagId", - "name" - ], - "additionalProperties": false, - "properties": { - "tagId": { - "type": "string", - "title": "Tag ID", - "description": "Maps to the tagId of a SoftwareIdentity." - }, - "name": { - "type": "string", - "title": "Name", - "description": "Maps to the name of a SoftwareIdentity." - }, - "version": { - "type": "string", - "title": "Version", - "default": "0.0", - "description": "Maps to the version of a SoftwareIdentity." - }, - "tagVersion": { - "type": "integer", - "title": "Tag Version", - "default": 0, - "description": "Maps to the tagVersion of a SoftwareIdentity." - }, - "patch": { - "type": "boolean", - "title": "Patch", - "default": false, - "description": "Maps to the patch of a SoftwareIdentity." - }, - "text": { - "title": "Attachment text", - "description": "Specifies the metadata and content of the SWID tag.", - "$ref": "#/definitions/attachment" - }, - "url": { - "type": "string", - "title": "URL", - "description": "The URL to the SWID file.", - "format": "iri-reference" - } - } - }, - "attachment": { - "type": "object", - "title": "Attachment", - "description": "Specifies the metadata and content for an attachment.", - "required": [ - "content" - ], - "additionalProperties": false, - "properties": { - "contentType": { - "type": "string", - "title": "Content-Type", - "description": "Specifies the content type of the text. Defaults to text/plain if not specified.", - "default": "text/plain" - }, - "encoding": { - "type": "string", - "title": "Encoding", - "description": "Specifies the optional encoding the text is represented in.", - "enum": [ - "base64" - ] - }, - "content": { - "type": "string", - "title": "Attachment Text", - "description": "The attachment data. Proactive controls such as input validation and sanitization should be employed to prevent misuse of attachment text." - } - } - }, - "hash": { - "type": "object", - "title": "Hash Objects", - "required": [ - "alg", - "content" - ], - "additionalProperties": false, - "properties": { - "alg": { - "$ref": "#/definitions/hash-alg" - }, - "content": { - "$ref": "#/definitions/hash-content" - } - } - }, - "hash-alg": { - "type": "string", - "enum": [ - "MD5", - "SHA-1", - "SHA-256", - "SHA-384", - "SHA-512", - "SHA3-256", - "SHA3-384", - "SHA3-512", - "BLAKE2b-256", - "BLAKE2b-384", - "BLAKE2b-512", - "BLAKE3" - ], - "title": "Hash Algorithm" - }, - "hash-content": { - "type": "string", - "title": "Hash Content (value)", - "examples": ["3942447fac867ae5cdb3229b658f4d48"], - "pattern": "^([a-fA-F0-9]{32}|[a-fA-F0-9]{40}|[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128})$" - }, - "license": { - "type": "object", - "title": "License Object", - "oneOf": [ - { - "required": ["id"] - }, - { - "required": ["name"] - } - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the license elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "id": { - "$ref": "spdx.schema.json", - "title": "License ID (SPDX)", - "description": "A valid SPDX license ID", - "examples": ["Apache-2.0"] - }, - "name": { - "type": "string", - "title": "License Name", - "description": "If SPDX does not define the license used, this field may be used to provide the license name", - "examples": ["Acme Software License"] - }, - "text": { - "title": "License text", - "description": "An optional way to include the textual content of a license.", - "$ref": "#/definitions/attachment" - }, - "url": { - "type": "string", - "title": "License URL", - "description": "The URL to the license file. If specified, a 'license' externalReference should also be specified for completeness", - "examples": ["https://www.apache.org/licenses/LICENSE-2.0.txt"], - "format": "iri-reference" - }, - "licensing": { - "type": "object", - "title": "Licensing information", - "description": "Licensing details describing the licensor/licensee, license type, renewal and expiration dates, and other important metadata", - "additionalProperties": false, - "properties": { - "altIds": { - "type": "array", - "title": "Alternate License Identifiers", - "description": "License identifiers that may be used to manage licenses and their lifecycle", - "items": { - "type": "string" - } - }, - "licensor": { - "title": "Licensor", - "description": "The individual or organization that grants a license to another individual or organization", - "type": "object", - "additionalProperties": false, - "properties": { - "organization": { - "title": "Licensor (Organization)", - "description": "The organization that granted the license", - "$ref": "#/definitions/organizationalEntity" - }, - "individual": { - "title": "Licensor (Individual)", - "description": "The individual, not associated with an organization, that granted the license", - "$ref": "#/definitions/organizationalContact" - } - }, - "oneOf":[ - { - "required": ["organization"] - }, - { - "required": ["individual"] - } - ] - }, - "licensee": { - "title": "Licensee", - "description": "The individual or organization for which a license was granted to", - "type": "object", - "additionalProperties": false, - "properties": { - "organization": { - "title": "Licensee (Organization)", - "description": "The organization that was granted the license", - "$ref": "#/definitions/organizationalEntity" - }, - "individual": { - "title": "Licensee (Individual)", - "description": "The individual, not associated with an organization, that was granted the license", - "$ref": "#/definitions/organizationalContact" - } - }, - "oneOf":[ - { - "required": ["organization"] - }, - { - "required": ["individual"] - } - ] - }, - "purchaser": { - "title": "Purchaser", - "description": "The individual or organization that purchased the license", - "type": "object", - "additionalProperties": false, - "properties": { - "organization": { - "title": "Purchaser (Organization)", - "description": "The organization that purchased the license", - "$ref": "#/definitions/organizationalEntity" - }, - "individual": { - "title": "Purchaser (Individual)", - "description": "The individual, not associated with an organization, that purchased the license", - "$ref": "#/definitions/organizationalContact" - } - }, - "oneOf":[ - { - "required": ["organization"] - }, - { - "required": ["individual"] - } - ] - }, - "purchaseOrder": { - "type": "string", - "title": "Purchase Order", - "description": "The purchase order identifier the purchaser sent to a supplier or vendor to authorize a purchase" - }, - "licenseTypes": { - "type": "array", - "title": "License Type", - "description": "The type of license(s) that was granted to the licensee\n\n* __academic__ = A license that grants use of software solely for the purpose of education or research.\n* __appliance__ = A license covering use of software embedded in a specific piece of hardware.\n* __client-access__ = A Client Access License (CAL) allows client computers to access services provided by server software.\n* __concurrent-user__ = A Concurrent User license (aka floating license) limits the number of licenses for a software application and licenses are shared among a larger number of users.\n* __core-points__ = A license where the core of a computer's processor is assigned a specific number of points.\n* __custom-metric__ = A license for which consumption is measured by non-standard metrics.\n* __device__ = A license that covers a defined number of installations on computers and other types of devices.\n* __evaluation__ = A license that grants permission to install and use software for trial purposes.\n* __named-user__ = A license that grants access to the software to one or more pre-defined users.\n* __node-locked__ = A license that grants access to the software on one or more pre-defined computers or devices.\n* __oem__ = An Original Equipment Manufacturer license that is delivered with hardware, cannot be transferred to other hardware, and is valid for the life of the hardware.\n* __perpetual__ = A license where the software is sold on a one-time basis and the licensee can use a copy of the software indefinitely.\n* __processor-points__ = A license where each installation consumes points per processor.\n* __subscription__ = A license where the licensee pays a fee to use the software or service.\n* __user__ = A license that grants access to the software or service by a specified number of users.\n* __other__ = Another license type.\n", - "items": { - "type": "string", - "enum": [ - "academic", - "appliance", - "client-access", - "concurrent-user", - "core-points", - "custom-metric", - "device", - "evaluation", - "named-user", - "node-locked", - "oem", - "perpetual", - "processor-points", - "subscription", - "user", - "other" - ] - } - }, - "lastRenewal": { - "type": "string", - "format": "date-time", - "title": "Last Renewal", - "description": "The timestamp indicating when the license was last renewed. For new purchases, this is often the purchase or acquisition date. For non-perpetual licenses or subscriptions, this is the timestamp of when the license was last renewed." - }, - "expiration": { - "type": "string", - "format": "date-time", - "title": "Expiration", - "description": "The timestamp indicating when the current license expires (if applicable)." - } - } - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": {"$ref": "#/definitions/property"} - } - } - }, - "licenseChoice": { - "title": "License Choice", - "description": "EITHER (list of SPDX licenses and/or named licenses) OR (tuple of one SPDX License Expression)", - "type": "array", - "oneOf": [ - { - "title": "Multiple licenses", - "description": "A list of SPDX licenses and/or named licenses.", - "type": "array", - "items": { - "type": "object", - "required": ["license"], - "additionalProperties": false, - "properties": { - "license": {"$ref": "#/definitions/license"} - } - } - }, - { - "title": "SPDX License Expression", - "description": "A tuple of exactly one SPDX License Expression.", - "type": "array", - "additionalItems": false, - "minItems": 1, - "maxItems": 1, - "items": [{ - "type": "object", - "additionalProperties": false, - "required": ["expression"], - "properties": { - "expression": { - "type": "string", - "title": "SPDX License Expression", - "examples": [ - "Apache-2.0 AND (MIT OR GPL-2.0-only)", - "GPL-3.0-only WITH Classpath-exception-2.0" - ] - }, - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the license elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - } - } - }] - } - ] - }, - "commit": { - "type": "object", - "title": "Commit", - "description": "Specifies an individual commit", - "additionalProperties": false, - "properties": { - "uid": { - "type": "string", - "title": "UID", - "description": "A unique identifier of the commit. This may be version control specific. For example, Subversion uses revision numbers whereas git uses commit hashes." - }, - "url": { - "type": "string", - "title": "URL", - "description": "The URL to the commit. This URL will typically point to a commit in a version control system.", - "format": "iri-reference" - }, - "author": { - "title": "Author", - "description": "The author who created the changes in the commit", - "$ref": "#/definitions/identifiableAction" - }, - "committer": { - "title": "Committer", - "description": "The person who committed or pushed the commit", - "$ref": "#/definitions/identifiableAction" - }, - "message": { - "type": "string", - "title": "Message", - "description": "The text description of the contents of the commit" - } - } - }, - "patch": { - "type": "object", - "title": "Patch", - "description": "Specifies an individual patch", - "required": [ - "type" - ], - "additionalProperties": false, - "properties": { - "type": { - "type": "string", - "enum": [ - "unofficial", - "monkey", - "backport", - "cherry-pick" - ], - "title": "Type", - "description": "Specifies the purpose for the patch including the resolution of defects, security issues, or new behavior or functionality.\n\n* __unofficial__ = A patch which is not developed by the creators or maintainers of the software being patched. Refer to [https://en.wikipedia.org/wiki/Unofficial_patch](https://en.wikipedia.org/wiki/Unofficial_patch)\n* __monkey__ = A patch which dynamically modifies runtime behavior. Refer to [https://en.wikipedia.org/wiki/Monkey_patch](https://en.wikipedia.org/wiki/Monkey_patch)\n* __backport__ = A patch which takes code from a newer version of software and applies it to older versions of the same software. Refer to [https://en.wikipedia.org/wiki/Backporting](https://en.wikipedia.org/wiki/Backporting)\n* __cherry-pick__ = A patch created by selectively applying commits from other versions or branches of the same software." - }, - "diff": { - "title": "Diff", - "description": "The patch file (or diff) that show changes. Refer to [https://en.wikipedia.org/wiki/Diff](https://en.wikipedia.org/wiki/Diff)", - "$ref": "#/definitions/diff" - }, - "resolves": { - "type": "array", - "items": {"$ref": "#/definitions/issue"}, - "title": "Resolves", - "description": "A collection of issues the patch resolves" - } - } - }, - "diff": { - "type": "object", - "title": "Diff", - "description": "The patch file (or diff) that show changes. Refer to https://en.wikipedia.org/wiki/Diff", - "additionalProperties": false, - "properties": { - "text": { - "title": "Diff text", - "description": "Specifies the optional text of the diff", - "$ref": "#/definitions/attachment" - }, - "url": { - "type": "string", - "title": "URL", - "description": "Specifies the URL to the diff", - "format": "iri-reference" - } - } - }, - "issue": { - "type": "object", - "title": "Diff", - "description": "An individual issue that has been resolved.", - "required": [ - "type" - ], - "additionalProperties": false, - "properties": { - "type": { - "type": "string", - "enum": [ - "defect", - "enhancement", - "security" - ], - "title": "Type", - "description": "Specifies the type of issue" - }, - "id": { - "type": "string", - "title": "ID", - "description": "The identifier of the issue assigned by the source of the issue" - }, - "name": { - "type": "string", - "title": "Name", - "description": "The name of the issue" - }, - "description": { - "type": "string", - "title": "Description", - "description": "A description of the issue" - }, - "source": { - "type": "object", - "title": "Source", - "description": "The source of the issue where it is documented", - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "The name of the source. For example 'National Vulnerability Database', 'NVD', and 'Apache'" - }, - "url": { - "type": "string", - "title": "URL", - "description": "The url of the issue documentation as provided by the source", - "format": "iri-reference" - } - } - }, - "references": { - "type": "array", - "items": { - "type": "string", - "format": "iri-reference" - }, - "title": "References", - "description": "A collection of URL's for reference. Multiple URLs are allowed.", - "examples": ["https://example.com"] - } - } - }, - "identifiableAction": { - "type": "object", - "title": "Identifiable Action", - "description": "Specifies an individual commit", - "additionalProperties": false, - "properties": { - "timestamp": { - "type": "string", - "format": "date-time", - "title": "Timestamp", - "description": "The timestamp in which the action occurred" - }, - "name": { - "type": "string", - "title": "Name", - "description": "The name of the individual who performed the action" - }, - "email": { - "type": "string", - "format": "idn-email", - "title": "E-mail", - "description": "The email address of the individual who performed the action" - } - } - }, - "externalReference": { - "type": "object", - "title": "External Reference", - "description": "External references provide a way to document systems, sites, and information that may be relevant, but are not included with the BOM. They may also establish specific relationships within or external to the BOM.", - "required": [ - "url", - "type" - ], - "additionalProperties": false, - "properties": { - "url": { - "anyOf": [ - { - "title": "URL", - "type": "string", - "format": "iri-reference" - }, - { - "title": "BOM-Link", - "$ref": "#/definitions/bomLink" - } - ], - "title": "URL", - "description": "The URI (URL or URN) to the external reference. External references are URIs and therefore can accept any URL scheme including https ([RFC-7230](https://www.ietf.org/rfc/rfc7230.txt)), mailto ([RFC-2368](https://www.ietf.org/rfc/rfc2368.txt)), tel ([RFC-3966](https://www.ietf.org/rfc/rfc3966.txt)), and dns ([RFC-4501](https://www.ietf.org/rfc/rfc4501.txt)). External references may also include formally registered URNs such as [CycloneDX BOM-Link](https://cyclonedx.org/capabilities/bomlink/) to reference CycloneDX BOMs or any object within a BOM. BOM-Link transforms applicable external references into relationships that can be expressed in a BOM or across BOMs." - }, - "comment": { - "type": "string", - "title": "Comment", - "description": "An optional comment describing the external reference" - }, - "type": { - "type": "string", - "title": "Type", - "description": "Specifies the type of external reference.\n\n* __vcs__ = Version Control System\n* __issue-tracker__ = Issue or defect tracking system, or an Application Lifecycle Management (ALM) system\n* __website__ = Website\n* __advisories__ = Security advisories\n* __bom__ = Bill of Materials (SBOM, OBOM, HBOM, SaaSBOM, etc)\n* __mailing-list__ = Mailing list or discussion group\n* __social__ = Social media account\n* __chat__ = Real-time chat platform\n* __documentation__ = Documentation, guides, or how-to instructions\n* __support__ = Community or commercial support\n* __distribution__ = Direct or repository download location\n* __distribution-intake__ = The location where a component was published to. This is often the same as \"distribution\" but may also include specialized publishing processes that act as an intermediary\n* __license__ = The URL to the license file. If a license URL has been defined in the license node, it should also be defined as an external reference for completeness\n* __build-meta__ = Build-system specific meta file (i.e. pom.xml, package.json, .nuspec, etc)\n* __build-system__ = URL to an automated build system\n* __release-notes__ = URL to release notes\n* __security-contact__ = Specifies a way to contact the maintainer, supplier, or provider in the event of a security incident. Common URIs include links to a disclosure procedure, a mailto (RFC-2368) that specifies an email address, a tel (RFC-3966) that specifies a phone number, or dns (RFC-4501) that specifies the records containing DNS Security TXT\n* __model-card__ = A model card describes the intended uses of a machine learning model, potential limitations, biases, ethical considerations, training parameters, datasets used to train the model, performance metrics, and other relevant data useful for ML transparency\n* __log__ = A record of events that occurred in a computer system or application, such as problems, errors, or information on current operations\n* __configuration__ = Parameters or settings that may be used by other components or services\n* __evidence__ = Information used to substantiate a claim\n* __formulation__ = Describes how a component or service was manufactured or deployed\n* __attestation__ = Human or machine-readable statements containing facts, evidence, or testimony\n* __threat-model__ = An enumeration of identified weaknesses, threats, and countermeasures, dataflow diagram (DFD), attack tree, and other supporting documentation in human-readable or machine-readable format\n* __adversary-model__ = The defined assumptions, goals, and capabilities of an adversary.\n* __risk-assessment__ = Identifies and analyzes the potential of future events that may negatively impact individuals, assets, and/or the environment. Risk assessments may also include judgments on the tolerability of each risk.\n* __vulnerability-assertion__ = A Vulnerability Disclosure Report (VDR) which asserts the known and previously unknown vulnerabilities that affect a component, service, or product including the analysis and findings describing the impact (or lack of impact) that the reported vulnerability has on a component, service, or product.\n* __exploitability-statement__ = A Vulnerability Exploitability eXchange (VEX) which asserts the known vulnerabilities that do not affect a product, product family, or organization, and optionally the ones that do. The VEX should include the analysis and findings describing the impact (or lack of impact) that the reported vulnerability has on the product, product family, or organization.\n* __pentest-report__ = Results from an authorized simulated cyberattack on a component or service, otherwise known as a penetration test\n* __static-analysis-report__ = SARIF or proprietary machine or human-readable report for which static analysis has identified code quality, security, and other potential issues with the source code\n* __dynamic-analysis-report__ = Dynamic analysis report that has identified issues such as vulnerabilities and misconfigurations\n* __runtime-analysis-report__ = Report generated by analyzing the call stack of a running application\n* __component-analysis-report__ = Report generated by Software Composition Analysis (SCA), container analysis, or other forms of component analysis\n* __maturity-report__ = Report containing a formal assessment of an organization, business unit, or team against a maturity model\n* __certification-report__ = Industry, regulatory, or other certification from an accredited (if applicable) certification body\n* __quality-metrics__ = Report or system in which quality metrics can be obtained\n* __codified-infrastructure__ = Code or configuration that defines and provisions virtualized infrastructure, commonly referred to as Infrastructure as Code (IaC)\n* __poam__ = Plans of Action and Milestones (POAM) compliment an \"attestation\" external reference. POAM is defined by NIST as a \"document that identifies tasks needing to be accomplished. It details resources required to accomplish the elements of the plan, any milestones in meeting the tasks and scheduled completion dates for the milestones\".\n* __other__ = Use this if no other types accurately describe the purpose of the external reference", - "enum": [ - "vcs", - "issue-tracker", - "website", - "advisories", - "bom", - "mailing-list", - "social", - "chat", - "documentation", - "support", - "distribution", - "distribution-intake", - "license", - "build-meta", - "build-system", - "release-notes", - "security-contact", - "model-card", - "log", - "configuration", - "evidence", - "formulation", - "attestation", - "threat-model", - "adversary-model", - "risk-assessment", - "vulnerability-assertion", - "exploitability-statement", - "pentest-report", - "static-analysis-report", - "dynamic-analysis-report", - "runtime-analysis-report", - "component-analysis-report", - "maturity-report", - "certification-report", - "codified-infrastructure", - "quality-metrics", - "poam", - "other" - ] - }, - "hashes": { - "type": "array", - "items": {"$ref": "#/definitions/hash"}, - "title": "Hashes", - "description": "The hashes of the external reference (if applicable)." - } - } - }, - "dependency": { - "type": "object", - "title": "Dependency", - "description": "Defines the direct dependencies of a component or service. Components or services that do not have their own dependencies MUST be declared as empty elements within the graph. Components or services that are not represented in the dependency graph MAY have unknown dependencies. It is RECOMMENDED that implementations assume this to be opaque and not an indicator of a object being dependency-free. It is RECOMMENDED to leverage compositions to indicate unknown dependency graphs.", - "required": [ - "ref" - ], - "additionalProperties": false, - "properties": { - "ref": { - "$ref": "#/definitions/refLinkType", - "title": "Reference", - "description": "References a component or service by its bom-ref attribute" - }, - "dependsOn": { - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/refLinkType" - }, - "title": "Depends On", - "description": "The bom-ref identifiers of the components or services that are dependencies of this dependency object." - } - } - }, - "service": { - "type": "object", - "title": "Service Object", - "required": [ - "name" - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the service elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "provider": { - "title": "Provider", - "description": "The organization that provides the service.", - "$ref": "#/definitions/organizationalEntity" - }, - "group": { - "type": "string", - "title": "Service Group", - "description": "The grouping name, namespace, or identifier. This will often be a shortened, single name of the company or project that produced the service or domain name. Whitespace and special characters should be avoided.", - "examples": ["com.acme"] - }, - "name": { - "type": "string", - "title": "Service Name", - "description": "The name of the service. This will often be a shortened, single name of the service.", - "examples": ["ticker-service"] - }, - "version": { - "type": "string", - "title": "Service Version", - "description": "The service version.", - "examples": ["1.0.0"] - }, - "description": { - "type": "string", - "title": "Service Description", - "description": "Specifies a description for the service" - }, - "endpoints": { - "type": "array", - "items": { - "type": "string", - "format": "iri-reference" - }, - "title": "Endpoints", - "description": "The endpoint URIs of the service. Multiple endpoints are allowed.", - "examples": ["https://example.com/api/v1/ticker"] - }, - "authenticated": { - "type": "boolean", - "title": "Authentication Required", - "description": "A boolean value indicating if the service requires authentication. A value of true indicates the service requires authentication prior to use. A value of false indicates the service does not require authentication." - }, - "x-trust-boundary": { - "type": "boolean", - "title": "Crosses Trust Boundary", - "description": "A boolean value indicating if use of the service crosses a trust zone or boundary. A value of true indicates that by using the service, a trust boundary is crossed. A value of false indicates that by using the service, a trust boundary is not crossed." - }, - "trustZone": { - "type": "string", - "title": "Trust Zone", - "description": "The name of the trust zone the service resides in." - }, - "data": { - "type": "array", - "items": {"$ref": "#/definitions/serviceData"}, - "title": "Data", - "description": "Specifies information about the data including the directional flow of data and the data classification." - }, - "licenses": { - "$ref": "#/definitions/licenseChoice", - "title": "Component License(s)" - }, - "externalReferences": { - "type": "array", - "items": {"$ref": "#/definitions/externalReference"}, - "title": "External References", - "description": "External references provide a way to document systems, sites, and information that may be relevant, but are not included with the BOM. They may also establish specific relationships within or external to the BOM." - }, - "services": { - "type": "array", - "items": {"$ref": "#/definitions/service"}, - "uniqueItems": true, - "title": "Services", - "description": "A list of services included or deployed behind the parent service. This is not a dependency tree. It provides a way to specify a hierarchical representation of service assemblies." - }, - "releaseNotes": { - "$ref": "#/definitions/releaseNotes", - "title": "Release notes", - "description": "Specifies optional release notes." - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": {"$ref": "#/definitions/property"} - }, - "signature": { - "$ref": "#/definitions/signature", - "title": "Signature", - "description": "Enveloped signature in [JSON Signature Format (JSF)](https://cyberphone.github.io/doc/security/jsf.html)." - } - } - }, - "serviceData": { - "type": "object", - "title": "Hash Objects", - "required": [ - "flow", - "classification" - ], - "additionalProperties": false, - "properties": { - "flow": { - "$ref": "#/definitions/dataFlowDirection", - "title": "Directional Flow", - "description": "Specifies the flow direction of the data. Direction is relative to the service. Inbound flow states that data enters the service. Outbound flow states that data leaves the service. Bi-directional states that data flows both ways, and unknown states that the direction is not known." - }, - "classification": { - "$ref": "#/definitions/dataClassification" - }, - "name": { - "type": "string", - "title": "Name", - "description": "Name for the defined data", - "examples": [ - "Credit card reporting" - ] - }, - "description": { - "type": "string", - "title": "Description", - "description": "Short description of the data content and usage", - "examples": [ - "Credit card information being exchanged in between the web app and the database" - ] - }, - "governance": { - "type": "object", - "title": "Data Governance", - "$ref": "#/definitions/dataGovernance" - }, - "source": { - "type": "array", - "items": { - "anyOf": [ - { - "title": "URL", - "type": "string", - "format": "iri-reference" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "title": "Source", - "description": "The URI, URL, or BOM-Link of the components or services the data came in from" - }, - "destination": { - "type": "array", - "items": { - "anyOf": [ - { - "title": "URL", - "type": "string", - "format": "iri-reference" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "title": "Destination", - "description": "The URI, URL, or BOM-Link of the components or services the data is sent to" - } - } - }, - "dataFlowDirection": { - "type": "string", - "enum": [ - "inbound", - "outbound", - "bi-directional", - "unknown" - ], - "title": "Data flow direction", - "description": "Specifies the flow direction of the data. Direction is relative to the service. Inbound flow states that data enters the service. Outbound flow states that data leaves the service. Bi-directional states that data flows both ways, and unknown states that the direction is not known." - }, - - "copyright": { - "type": "object", - "title": "Copyright", - "required": [ - "text" - ], - "additionalProperties": false, - "properties": { - "text": { - "type": "string", - "title": "Copyright Text" - } - } - }, - "componentEvidence": { - "type": "object", - "title": "Evidence", - "description": "Provides the ability to document evidence collected through various forms of extraction or analysis.", - "additionalProperties": false, - "properties": { - "identity": { - "type": "object", - "description": "Evidence that substantiates the identity of a component.", - "required": [ "field" ], - "additionalProperties": false, - "properties": { - "field": { - "type": "string", - "enum": [ - "group", "name", "version", "purl", "cpe", "swid", "hash" - ], - "title": "Field", - "description": "The identity field of the component which the evidence describes." - }, - "confidence": { - "type": "number", - "minimum": 0, - "maximum": 1, - "title": "Confidence", - "description": "The overall confidence of the evidence from 0 - 1, where 1 is 100% confidence." - }, - "methods": { - "type": "array", - "title": "Methods", - "description": "The methods used to extract and/or analyze the evidence.", - "items": { - "type": "object", - "required": [ - "technique" , - "confidence" - ], - "additionalProperties": false, - "properties": { - "technique": { - "title": "Technique", - "description": "The technique used in this method of analysis.", - "type": "string", - "enum": [ - "source-code-analysis", - "binary-analysis", - "manifest-analysis", - "ast-fingerprint", - "hash-comparison", - "instrumentation", - "dynamic-analysis", - "filename", - "attestation", - "other" - ] - }, - "confidence": { - "type": "number", - "minimum": 0, - "maximum": 1, - "title": "Confidence", - "description": "The confidence of the evidence from 0 - 1, where 1 is 100% confidence. Confidence is specific to the technique used. Each technique of analysis can have independent confidence." - }, - "value": { - "type": "string", - "title": "Value", - "description": "The value or contents of the evidence." - } - } - } - }, - "tools": { - "type": "array", - "uniqueItems": true, - "items": { - "anyOf": [ - { - "title": "Ref", - "$ref": "#/definitions/refLinkType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "title": "BOM References", - "description": "The object in the BOM identified by its bom-ref. This is often a component or service, but may be any object type supporting bom-refs. Tools used for analysis should already be defined in the BOM, either in the metadata/tools, components, or formulation." - } - } - }, - "occurrences": { - "type": "array", - "title": "Occurrences", - "description": "Evidence of individual instances of a component spread across multiple locations.", - "items": { - "type": "object", - "required": [ "location" ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the occurrence elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "location": { - "type": "string", - "title": "Location", - "description": "The location or path to where the component was found." - } - } - } - }, - "callstack": { - "type": "object", - "description": "Evidence of the components use through the callstack.", - "additionalProperties": false, - "properties": { - "frames": { - "type": "array", - "title": "Methods", - "items": { - "type": "object", - "required": [ - "module" - ], - "additionalProperties": false, - "properties": { - "package": { - "title": "Package", - "description": "A package organizes modules into namespaces, providing a unique namespace for each type it contains.", - "type": "string" - }, - "module": { - "title": "Module", - "description": "A module or class that encloses functions/methods and other code.", - "type": "string" - }, - "function": { - "title": "Function", - "description": "A block of code designed to perform a particular task.", - "type": "string" - }, - "parameters": { - "title": "Parameters", - "description": "Optional arguments that are passed to the module or function.", - "type": "array", - "items": { - "type": "string" - } - }, - "line": { - "title": "Line", - "description": "The line number the code that is called resides on.", - "type": "integer" - }, - "column": { - "title": "Column", - "description": "The column the code that is called resides.", - "type": "integer" - }, - "fullFilename": { - "title": "Full Filename", - "description": "The full path and filename of the module.", - "type": "string" - } - } - } - } - } - }, - "licenses": { - "$ref": "#/definitions/licenseChoice", - "title": "Component License(s)" - }, - "copyright": { - "type": "array", - "items": {"$ref": "#/definitions/copyright"}, - "title": "Copyright" - } - } - }, - "compositions": { - "type": "object", - "title": "Compositions", - "required": [ - "aggregate" - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the composition elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "aggregate": { - "$ref": "#/definitions/aggregateType", - "title": "Aggregate", - "description": "Specifies an aggregate type that describe how complete a relationship is.\n\n* __complete__ = The relationship is complete. No further relationships including constituent components, services, or dependencies are known to exist.\n* __incomplete__ = The relationship is incomplete. Additional relationships exist and may include constituent components, services, or dependencies.\n* __incomplete_first_party_only__ = The relationship is incomplete. Only relationships for first-party components, services, or their dependencies are represented.\n* __incomplete_first_party_proprietary_only__ = The relationship is incomplete. Only relationships for first-party components, services, or their dependencies are represented, limited specifically to those that are proprietary.\n* __incomplete_first_party_opensource_only__ = The relationship is incomplete. Only relationships for first-party components, services, or their dependencies are represented, limited specifically to those that are opensource.\n* __incomplete_third_party_only__ = The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented.\n* __incomplete_third_party_proprietary_only__ = The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented, limited specifically to those that are proprietary.\n* __incomplete_third_party_opensource_only__ = The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented, limited specifically to those that are opensource.\n* __unknown__ = The relationship may be complete or incomplete. This usually signifies a 'best-effort' to obtain constituent components, services, or dependencies but the completeness is inconclusive.\n* __not_specified__ = The relationship completeness is not specified.\n" - }, - "assemblies": { - "type": "array", - "uniqueItems": true, - "items": { - "anyOf": [ - { - "title": "Ref", - "$ref": "#/definitions/refLinkType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "title": "BOM references", - "description": "The bom-ref identifiers of the components or services being described. Assemblies refer to nested relationships whereby a constituent part may include other constituent parts. References do not cascade to child parts. References are explicit for the specified constituent part only." - }, - "dependencies": { - "type": "array", - "uniqueItems": true, - "items": { - "type": "string" - }, - "title": "BOM references", - "description": "The bom-ref identifiers of the components or services being described. Dependencies refer to a relationship whereby an independent constituent part requires another independent constituent part. References do not cascade to transitive dependencies. References are explicit for the specified dependency only." - }, - "vulnerabilities": { - "type": "array", - "uniqueItems": true, - "items": { - "type": "string" - }, - "title": "BOM references", - "description": "The bom-ref identifiers of the vulnerabilities being described." - }, - "signature": { - "$ref": "#/definitions/signature", - "title": "Signature", - "description": "Enveloped signature in [JSON Signature Format (JSF)](https://cyberphone.github.io/doc/security/jsf.html)." - } - } - }, - "aggregateType": { - "type": "string", - "default": "not_specified", - "enum": [ - "complete", - "incomplete", - "incomplete_first_party_only", - "incomplete_first_party_proprietary_only", - "incomplete_first_party_opensource_only", - "incomplete_third_party_only", - "incomplete_third_party_proprietary_only", - "incomplete_third_party_opensource_only", - "unknown", - "not_specified" - ] - }, - "property": { - "type": "object", - "title": "Lightweight name-value pair", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "The name of the property. Duplicate names are allowed, each potentially having a different value." - }, - "value": { - "type": "string", - "title": "Value", - "description": "The value of the property." - } - } - }, - "localeType": { - "type": "string", - "pattern": "^([a-z]{2})(-[A-Z]{2})?$", - "title": "Locale", - "description": "Defines a syntax for representing two character language code (ISO-639) followed by an optional two character country code. The language code MUST be lower case. If the country code is specified, the country code MUST be upper case. The language code and country code MUST be separated by a minus sign. Examples: en, en-US, fr, fr-CA" - }, - "releaseType": { - "type": "string", - "examples": [ - "major", - "minor", - "patch", - "pre-release", - "internal" - ], - "description": "The software versioning type. It is RECOMMENDED that the release type use one of 'major', 'minor', 'patch', 'pre-release', or 'internal'. Representing all possible software release types is not practical, so standardizing on the recommended values, whenever possible, is strongly encouraged.\n\n* __major__ = A major release may contain significant changes or may introduce breaking changes.\n* __minor__ = A minor release, also known as an update, may contain a smaller number of changes than major releases.\n* __patch__ = Patch releases are typically unplanned and may resolve defects or important security issues.\n* __pre-release__ = A pre-release may include alpha, beta, or release candidates and typically have limited support. They provide the ability to preview a release prior to its general availability.\n* __internal__ = Internal releases are not for public consumption and are intended to be used exclusively by the project or manufacturer that produced it." - }, - "note": { - "type": "object", - "title": "Note", - "description": "A note containing the locale and content.", - "required": [ - "text" - ], - "additionalProperties": false, - "properties": { - "locale": { - "$ref": "#/definitions/localeType", - "title": "Locale", - "description": "The ISO-639 (or higher) language code and optional ISO-3166 (or higher) country code. Examples include: \"en\", \"en-US\", \"fr\" and \"fr-CA\"" - }, - "text": { - "title": "Release note content", - "description": "Specifies the full content of the release note.", - "$ref": "#/definitions/attachment" - } - } - }, - "releaseNotes": { - "type": "object", - "title": "Release notes", - "required": [ - "type" - ], - "additionalProperties": false, - "properties": { - "type": { - "$ref": "#/definitions/releaseType", - "title": "Type", - "description": "The software versioning type the release note describes." - }, - "title": { - "type": "string", - "title": "Title", - "description": "The title of the release." - }, - "featuredImage": { - "type": "string", - "format": "iri-reference", - "title": "Featured image", - "description": "The URL to an image that may be prominently displayed with the release note." - }, - "socialImage": { - "type": "string", - "format": "iri-reference", - "title": "Social image", - "description": "The URL to an image that may be used in messaging on social media platforms." - }, - "description": { - "type": "string", - "title": "Description", - "description": "A short description of the release." - }, - "timestamp": { - "type": "string", - "format": "date-time", - "title": "Timestamp", - "description": "The date and time (timestamp) when the release note was created." - }, - "aliases": { - "type": "array", - "items": { - "type": "string" - }, - "title": "Aliases", - "description": "One or more alternate names the release may be referred to. This may include unofficial terms used by development and marketing teams (e.g. code names)." - }, - "tags": { - "type": "array", - "items": { - "type": "string" - }, - "title": "Tags", - "description": "One or more tags that may aid in search or retrieval of the release note." - }, - "resolves": { - "type": "array", - "items": {"$ref": "#/definitions/issue"}, - "title": "Resolves", - "description": "A collection of issues that have been resolved." - }, - "notes": { - "type": "array", - "items": {"$ref": "#/definitions/note"}, - "title": "Notes", - "description": "Zero or more release notes containing the locale and content. Multiple note objects may be specified to support release notes in a wide variety of languages." - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": {"$ref": "#/definitions/property"} - } - } - }, - "advisory": { - "type": "object", - "title": "Advisory", - "description": "Title and location where advisory information can be obtained. An advisory is a notification of a threat to a component, service, or system.", - "required": ["url"], - "additionalProperties": false, - "properties": { - "title": { - "type": "string", - "title": "Title", - "description": "An optional name of the advisory." - }, - "url": { - "type": "string", - "title": "URL", - "format": "iri-reference", - "description": "Location where the advisory can be obtained." - } - } - }, - "cwe": { - "type": "integer", - "minimum": 1, - "title": "CWE", - "description": "Integer representation of a Common Weaknesses Enumerations (CWE). For example 399 (of https://cwe.mitre.org/data/definitions/399.html)" - }, - "severity": { - "type": "string", - "title": "Severity", - "description": "Textual representation of the severity of the vulnerability adopted by the analysis method. If the analysis method uses values other than what is provided, the user is expected to translate appropriately.", - "enum": [ - "critical", - "high", - "medium", - "low", - "info", - "none", - "unknown" - ] - }, - "scoreMethod": { - "type": "string", - "title": "Method", - "description": "Specifies the severity or risk scoring methodology or standard used.\n\n* CVSSv2 - [Common Vulnerability Scoring System v2](https://www.first.org/cvss/v2/)\n* CVSSv3 - [Common Vulnerability Scoring System v3](https://www.first.org/cvss/v3-0/)\n* CVSSv31 - [Common Vulnerability Scoring System v3.1](https://www.first.org/cvss/v3-1/)\n* CVSSv4 - [Common Vulnerability Scoring System v4](https://www.first.org/cvss/v4-0/)\n* OWASP - [OWASP Risk Rating Methodology](https://owasp.org/www-community/OWASP_Risk_Rating_Methodology)\n* SSVC - [Stakeholder Specific Vulnerability Categorization](https://github.com/CERTCC/SSVC) (all versions)", - "enum": [ - "CVSSv2", - "CVSSv3", - "CVSSv31", - "CVSSv4", - "OWASP", - "SSVC", - "other" - ] - }, - "impactAnalysisState": { - "type": "string", - "title": "Impact Analysis State", - "description": "Declares the current state of an occurrence of a vulnerability, after automated or manual analysis. \n\n* __resolved__ = the vulnerability has been remediated. \n* __resolved\\_with\\_pedigree__ = the vulnerability has been remediated and evidence of the changes are provided in the affected components pedigree containing verifiable commit history and/or diff(s). \n* __exploitable__ = the vulnerability may be directly or indirectly exploitable. \n* __in\\_triage__ = the vulnerability is being investigated. \n* __false\\_positive__ = the vulnerability is not specific to the component or service and was falsely identified or associated. \n* __not\\_affected__ = the component or service is not affected by the vulnerability. Justification should be specified for all not_affected cases.", - "enum": [ - "resolved", - "resolved_with_pedigree", - "exploitable", - "in_triage", - "false_positive", - "not_affected" - ] - }, - "impactAnalysisJustification": { - "type": "string", - "title": "Impact Analysis Justification", - "description": "The rationale of why the impact analysis state was asserted. \n\n* __code\\_not\\_present__ = the code has been removed or tree-shaked. \n* __code\\_not\\_reachable__ = the vulnerable code is not invoked at runtime. \n* __requires\\_configuration__ = exploitability requires a configurable option to be set/unset. \n* __requires\\_dependency__ = exploitability requires a dependency that is not present. \n* __requires\\_environment__ = exploitability requires a certain environment which is not present. \n* __protected\\_by\\_compiler__ = exploitability requires a compiler flag to be set/unset. \n* __protected\\_at\\_runtime__ = exploits are prevented at runtime. \n* __protected\\_at\\_perimeter__ = attacks are blocked at physical, logical, or network perimeter. \n* __protected\\_by\\_mitigating\\_control__ = preventative measures have been implemented that reduce the likelihood and/or impact of the vulnerability.", - "enum": [ - "code_not_present", - "code_not_reachable", - "requires_configuration", - "requires_dependency", - "requires_environment", - "protected_by_compiler", - "protected_at_runtime", - "protected_at_perimeter", - "protected_by_mitigating_control" - ] - }, - "rating": { - "type": "object", - "title": "Rating", - "description": "Defines the severity or risk ratings of a vulnerability.", - "additionalProperties": false, - "properties": { - "source": { - "$ref": "#/definitions/vulnerabilitySource", - "description": "The source that calculated the severity or risk rating of the vulnerability." - }, - "score": { - "type": "number", - "title": "Score", - "description": "The numerical score of the rating." - }, - "severity": { - "$ref": "#/definitions/severity", - "description": "Textual representation of the severity that corresponds to the numerical score of the rating." - }, - "method": { - "$ref": "#/definitions/scoreMethod" - }, - "vector": { - "type": "string", - "title": "Vector", - "description": "Textual representation of the metric values used to score the vulnerability" - }, - "justification": { - "type": "string", - "title": "Justification", - "description": "An optional reason for rating the vulnerability as it was" - } - } - }, - "vulnerabilitySource": { - "type": "object", - "title": "Source", - "description": "The source of vulnerability information. This is often the organization that published the vulnerability.", - "additionalProperties": false, - "properties": { - "url": { - "type": "string", - "title": "URL", - "description": "The url of the vulnerability documentation as provided by the source.", - "examples": [ - "https://nvd.nist.gov/vuln/detail/CVE-2021-39182" - ] - }, - "name": { - "type": "string", - "title": "Name", - "description": "The name of the source.", - "examples": [ - "NVD", - "National Vulnerability Database", - "OSS Index", - "VulnDB", - "GitHub Advisories" - ] - } - } - }, - "vulnerability": { - "type": "object", - "title": "Vulnerability", - "description": "Defines a weakness in a component or service that could be exploited or triggered by a threat source.", - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the vulnerability elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "id": { - "type": "string", - "title": "ID", - "description": "The identifier that uniquely identifies the vulnerability.", - "examples": [ - "CVE-2021-39182", - "GHSA-35m5-8cvj-8783", - "SNYK-PYTHON-ENROCRYPT-1912876" - ] - }, - "source": { - "$ref": "#/definitions/vulnerabilitySource", - "description": "The source that published the vulnerability." - }, - "references": { - "type": "array", - "title": "References", - "description": "Zero or more pointers to vulnerabilities that are the equivalent of the vulnerability specified. Often times, the same vulnerability may exist in multiple sources of vulnerability intelligence, but have different identifiers. References provide a way to correlate vulnerabilities across multiple sources of vulnerability intelligence.", - "items": { - "type": "object", - "required": [ - "id", - "source" - ], - "additionalProperties": false, - "properties": { - "id": { - "type": "string", - "title": "ID", - "description": "An identifier that uniquely identifies the vulnerability.", - "examples": [ - "CVE-2021-39182", - "GHSA-35m5-8cvj-8783", - "SNYK-PYTHON-ENROCRYPT-1912876" - ] - }, - "source": { - "$ref": "#/definitions/vulnerabilitySource", - "description": "The source that published the vulnerability." - } - } - } - }, - "ratings": { - "type": "array", - "title": "Ratings", - "description": "List of vulnerability ratings", - "items": { - "$ref": "#/definitions/rating" - } - }, - "cwes": { - "type": "array", - "title": "CWEs", - "description": "List of Common Weaknesses Enumerations (CWEs) codes that describes this vulnerability. For example 399 (of https://cwe.mitre.org/data/definitions/399.html)", - "examples": [399], - "items": { - "$ref": "#/definitions/cwe" - } - }, - "description": { - "type": "string", - "title": "Description", - "description": "A description of the vulnerability as provided by the source." - }, - "detail": { - "type": "string", - "title": "Details", - "description": "If available, an in-depth description of the vulnerability as provided by the source organization. Details often include information useful in understanding root cause." - }, - "recommendation": { - "type": "string", - "title": "Recommendation", - "description": "Recommendations of how the vulnerability can be remediated or mitigated." - }, - "workaround": { - "type": "string", - "title": "Workarounds", - "description": "A bypass, usually temporary, of the vulnerability that reduces its likelihood and/or impact. Workarounds often involve changes to configuration or deployments." - }, - "proofOfConcept": { - "type": "object", - "title": "Proof of Concept", - "description": "Evidence used to reproduce the vulnerability.", - "properties": { - "reproductionSteps": { - "type": "string", - "title": "Steps to Reproduce", - "description": "Precise steps to reproduce the vulnerability." - }, - "environment": { - "type": "string", - "title": "Environment", - "description": "A description of the environment in which reproduction was possible." - }, - "supportingMaterial": { - "type": "array", - "title": "Supporting Material", - "description": "Supporting material that helps in reproducing or understanding how reproduction is possible. This may include screenshots, payloads, and PoC exploit code.", - "items": { "$ref": "#/definitions/attachment" } - } - } - }, - "advisories": { - "type": "array", - "title": "Advisories", - "description": "Published advisories of the vulnerability if provided.", - "items": { - "$ref": "#/definitions/advisory" - } - }, - "created": { - "type": "string", - "format": "date-time", - "title": "Created", - "description": "The date and time (timestamp) when the vulnerability record was created in the vulnerability database." - }, - "published": { - "type": "string", - "format": "date-time", - "title": "Published", - "description": "The date and time (timestamp) when the vulnerability record was first published." - }, - "updated": { - "type": "string", - "format": "date-time", - "title": "Updated", - "description": "The date and time (timestamp) when the vulnerability record was last updated." - }, - "rejected": { - "type": "string", - "format": "date-time", - "title": "Rejected", - "description": "The date and time (timestamp) when the vulnerability record was rejected (if applicable)." - }, - "credits": { - "type": "object", - "title": "Credits", - "description": "Individuals or organizations credited with the discovery of the vulnerability.", - "additionalProperties": false, - "properties": { - "organizations": { - "type": "array", - "title": "Organizations", - "description": "The organizations credited with vulnerability discovery.", - "items": { - "$ref": "#/definitions/organizationalEntity" - } - }, - "individuals": { - "type": "array", - "title": "Individuals", - "description": "The individuals, not associated with organizations, that are credited with vulnerability discovery.", - "items": { - "$ref": "#/definitions/organizationalContact" - } - } - } - }, - "tools": { - "oneOf": [ - { - "type": "object", - "title": "Tools", - "description": "The tool(s) used to identify, confirm, or score the vulnerability.", - "additionalProperties": false, - "properties": { - "components": { - "type": "array", - "items": {"$ref": "#/definitions/component"}, - "uniqueItems": true, - "title": "Components", - "description": "A list of software and hardware components used as tools" - }, - "services": { - "type": "array", - "items": {"$ref": "#/definitions/service"}, - "uniqueItems": true, - "title": "Services", - "description": "A list of services used as tools. This may include microservices, function-as-a-service, and other types of network or intra-process services." - } - } - }, - { - "type": "array", - "title": "Tools (legacy)", - "description": "[Deprecated] The tool(s) used to identify, confirm, or score the vulnerability.", - "items": {"$ref": "#/definitions/tool"} - } - ] - }, - "analysis": { - "type": "object", - "title": "Impact Analysis", - "description": "An assessment of the impact and exploitability of the vulnerability.", - "additionalProperties": false, - "properties": { - "state": { - "$ref": "#/definitions/impactAnalysisState" - }, - "justification": { - "$ref": "#/definitions/impactAnalysisJustification" - }, - "response": { - "type": "array", - "title": "Response", - "description": "A response to the vulnerability by the manufacturer, supplier, or project responsible for the affected component or service. More than one response is allowed. Responses are strongly encouraged for vulnerabilities where the analysis state is exploitable.", - "items": { - "type": "string", - "enum": [ - "can_not_fix", - "will_not_fix", - "update", - "rollback", - "workaround_available" - ] - } - }, - "detail": { - "type": "string", - "title": "Detail", - "description": "Detailed description of the impact including methods used during assessment. If a vulnerability is not exploitable, this field should include specific details on why the component or service is not impacted by this vulnerability." - }, - "firstIssued": { - "type": "string", - "format": "date-time", - "title": "First Issued", - "description": "The date and time (timestamp) when the analysis was first issued." - }, - "lastUpdated": { - "type": "string", - "format": "date-time", - "title": "Last Updated", - "description": "The date and time (timestamp) when the analysis was last updated." - } - } - }, - "affects": { - "type": "array", - "uniqueItems": true, - "items": { - "type": "object", - "required": [ - "ref" - ], - "additionalProperties": false, - "properties": { - "ref": { - "anyOf": [ - { - "title": "Ref", - "$ref": "#/definitions/refLinkType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ], - "title": "Reference", - "description": "References a component or service by the objects bom-ref" - }, - "versions": { - "type": "array", - "title": "Versions", - "description": "Zero or more individual versions or range of versions.", - "items": { - "type": "object", - "oneOf": [ - { - "required": ["version"] - }, - { - "required": ["range"] - } - ], - "additionalProperties": false, - "properties": { - "version": { - "description": "A single version of a component or service.", - "$ref": "#/definitions/version" - }, - "range": { - "description": "A version range specified in Package URL Version Range syntax (vers) which is defined at https://github.com/package-url/purl-spec/VERSION-RANGE-SPEC.rst", - "$ref": "#/definitions/range" - }, - "status": { - "description": "The vulnerability status for the version or range of versions.", - "$ref": "#/definitions/affectedStatus", - "default": "affected" - } - } - } - } - } - }, - "title": "Affects", - "description": "The components or services that are affected by the vulnerability." - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "affectedStatus": { - "description": "The vulnerability status of a given version or range of versions of a product. The statuses 'affected' and 'unaffected' indicate that the version is affected or unaffected by the vulnerability. The status 'unknown' indicates that it is unknown or unspecified whether the given version is affected. There can be many reasons for an 'unknown' status, including that an investigation has not been undertaken or that a vendor has not disclosed the status.", - "type": "string", - "enum": [ - "affected", - "unaffected", - "unknown" - ] - }, - "version": { - "description": "A single version of a component or service.", - "type": "string", - "minLength": 1, - "maxLength": 1024 - }, - "range": { - "description": "A version range specified in Package URL Version Range syntax (vers) which is defined at https://github.com/package-url/purl-spec/VERSION-RANGE-SPEC.rst", - "type": "string", - "minLength": 1, - "maxLength": 1024 - }, - "annotations": { - "type": "object", - "title": "Annotations", - "description": "A comment, note, explanation, or similar textual content which provides additional context to the object(s) being annotated.", - "required": [ - "subjects", - "annotator", - "timestamp", - "text" - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the annotation elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "subjects": { - "type": "array", - "uniqueItems": true, - "items": { - "anyOf": [ - { - "title": "Ref", - "$ref": "#/definitions/refLinkType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "title": "BOM References", - "description": "The object in the BOM identified by its bom-ref. This is often a component or service, but may be any object type supporting bom-refs." - }, - "annotator": { - "type": "object", - "title": "Annotator", - "description": "The organization, person, component, or service which created the textual content of the annotation.", - "oneOf": [ - { - "required": [ - "organization" - ] - }, - { - "required": [ - "individual" - ] - }, - { - "required": [ - "component" - ] - }, - { - "required": [ - "service" - ] - } - ], - "additionalProperties": false, - "properties": { - "organization": { - "description": "The organization that created the annotation", - "$ref": "#/definitions/organizationalEntity" - }, - "individual": { - "description": "The person that created the annotation", - "$ref": "#/definitions/organizationalContact" - }, - "component": { - "description": "The tool or component that created the annotation", - "$ref": "#/definitions/component" - }, - "service": { - "description": "The service that created the annotation", - "$ref": "#/definitions/service" - } - } - }, - "timestamp": { - "type": "string", - "format": "date-time", - "title": "Timestamp", - "description": "The date and time (timestamp) when the annotation was created." - }, - "text": { - "type": "string", - "title": "Text", - "description": "The textual content of the annotation." - }, - "signature": { - "$ref": "#/definitions/signature", - "title": "Signature", - "description": "Enveloped signature in [JSON Signature Format (JSF)](https://cyberphone.github.io/doc/security/jsf.html)." - } - } - }, - "modelCard": { - "$comment": "Model card support in CycloneDX is derived from TensorFlow Model Card Toolkit released under the Apache 2.0 license and available from https://github.com/tensorflow/model-card-toolkit/blob/main/model_card_toolkit/schema/v0.0.2/model_card.schema.json. In addition, CycloneDX model card support includes portions of VerifyML, also released under the Apache 2.0 license and available from https://github.com/cylynx/verifyml/blob/main/verifyml/model_card_toolkit/schema/v0.0.4/model_card.schema.json.", - "type": "object", - "title": "Model Card", - "description": "A model card describes the intended uses of a machine learning model and potential limitations, including biases and ethical considerations. Model cards typically contain the training parameters, which datasets were used to train the model, performance metrics, and other relevant data useful for ML transparency. This object SHOULD be specified for any component of type `machine-learning-model` and MUST NOT be specified for other component types.", - "additionalProperties": false, - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the model card elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "modelParameters": { - "type": "object", - "title": "Model Parameters", - "description": "Hyper-parameters for construction of the model.", - "additionalProperties": false, - "properties": { - "approach": { - "type": "object", - "title": "Approach", - "description": "The overall approach to learning used by the model for problem solving.", - "additionalProperties": false, - "properties": { - "type": { - "type": "string", - "title": "Learning Type", - "description": "Learning types describing the learning problem or hybrid learning problem.", - "enum": [ - "supervised", - "unsupervised", - "reinforcement-learning", - "semi-supervised", - "self-supervised" - ] - } - } - }, - "task": { - "type": "string", - "title": "Task", - "description": "Directly influences the input and/or output. Examples include classification, regression, clustering, etc." - }, - "architectureFamily": { - "type": "string", - "title": "Architecture Family", - "description": "The model architecture family such as transformer network, convolutional neural network, residual neural network, LSTM neural network, etc." - }, - "modelArchitecture": { - "type": "string", - "title": "Model Architecture", - "description": "The specific architecture of the model such as GPT-1, ResNet-50, YOLOv3, etc." - }, - "datasets": { - "type": "array", - "title": "Datasets", - "description": "The datasets used to train and evaluate the model.", - "items" : { - "oneOf" : [ - { - "title": "Inline Component Data", - "$ref": "#/definitions/componentData" - }, - { - "type": "object", - "title": "Data Component Reference", - "additionalProperties": false, - "properties": { - "ref": { - "anyOf": [ - { - "title": "Ref", - "$ref": "#/definitions/refLinkType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ], - "title": "Reference", - "description": "References a data component by the components bom-ref attribute" - } - } - } - ] - } - }, - "inputs": { - "type": "array", - "title": "Inputs", - "description": "The input format(s) of the model", - "items": { "$ref": "#/definitions/inputOutputMLParameters" } - }, - "outputs": { - "type": "array", - "title": "Outputs", - "description": "The output format(s) from the model", - "items": { "$ref": "#/definitions/inputOutputMLParameters" } - } - } - }, - "quantitativeAnalysis": { - "type": "object", - "title": "Quantitative Analysis", - "description": "A quantitative analysis of the model", - "additionalProperties": false, - "properties": { - "performanceMetrics": { - "type": "array", - "title": "Performance Metrics", - "description": "The model performance metrics being reported. Examples may include accuracy, F1 score, precision, top-3 error rates, MSC, etc.", - "items": { "$ref": "#/definitions/performanceMetric" } - }, - "graphics": { "$ref": "#/definitions/graphicsCollection" } - } - }, - "considerations": { - "type": "object", - "title": "Considerations", - "description": "What considerations should be taken into account regarding the model's construction, training, and application?", - "additionalProperties": false, - "properties": { - "users": { - "type": "array", - "title": "Users", - "description": "Who are the intended users of the model?", - "items": { - "type": "string" - } - }, - "useCases": { - "type": "array", - "title": "Use Cases", - "description": "What are the intended use cases of the model?", - "items": { - "type": "string" - } - }, - "technicalLimitations": { - "type": "array", - "title": "Technical Limitations", - "description": "What are the known technical limitations of the model? E.g. What kind(s) of data should the model be expected not to perform well on? What are the factors that might degrade model performance?", - "items": { - "type": "string" - } - }, - "performanceTradeoffs": { - "type": "array", - "title": "Performance Tradeoffs", - "description": "What are the known tradeoffs in accuracy/performance of the model?", - "items": { - "type": "string" - } - }, - "ethicalConsiderations": { - "type": "array", - "title": "Ethical Considerations", - "description": "What are the ethical (or environmental) risks involved in the application of this model?", - "items": { "$ref": "#/definitions/risk" } - }, - "fairnessAssessments": { - "type": "array", - "title": "Fairness Assessments", - "description": "How does the model affect groups at risk of being systematically disadvantaged? What are the harms and benefits to the various affected groups?", - "items": { - "$ref": "#/definitions/fairnessAssessment" - } - } - } - }, - "properties": { - "type": "array", - "title": "Properties", - "description": "Provides the ability to document properties in a name-value store. This provides flexibility to include data not officially supported in the standard without having to use additional namespaces or create extensions. Unlike key-value stores, properties support duplicate names, each potentially having different values. Property names of interest to the general public are encouraged to be registered in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy). Formal registration is OPTIONAL.", - "items": {"$ref": "#/definitions/property"} - } - } - }, - "inputOutputMLParameters": { - "type": "object", - "title": "Input and Output Parameters", - "additionalProperties": false, - "properties": { - "format": { - "description": "The data format for input/output to the model. Example formats include string, image, time-series", - "type": "string" - } - } - }, - "componentData": { - "type": "object", - "additionalProperties": false, - "required": [ - "type" - ], - "properties": { - "bom-ref": { - "$ref": "#/definitions/refType", - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the dataset elsewhere in the BOM. Every bom-ref MUST be unique within the BOM." - }, - "type": { - "type": "string", - "title": "Type of Data", - "description": "The general theme or subject matter of the data being specified.\n\n* __source-code__ = Any type of code, code snippet, or data-as-code.\n* __configuration__ = Parameters or settings that may be used by other components.\n* __dataset__ = A collection of data.\n* __definition__ = Data that can be used to create new instances of what the definition defines.\n* __other__ = Any other type of data that does not fit into existing definitions.", - "enum": [ - "source-code", - "configuration", - "dataset", - "definition", - "other" - ] - }, - "name": { - "description": "The name of the dataset.", - "type": "string" - }, - "contents": { - "type": "object", - "title": "Data Contents", - "description": "The contents or references to the contents of the data being described.", - "additionalProperties": false, - "properties": { - "attachment": { - "title": "Data Attachment", - "description": "An optional way to include textual or encoded data.", - "$ref": "#/definitions/attachment" - }, - "url": { - "type": "string", - "title": "Data URL", - "description": "The URL to where the data can be retrieved.", - "format": "iri-reference" - }, - "properties": { - "type": "array", - "title": "Configuration Properties", - "description": "Provides the ability to document name-value parameters used for configuration.", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "classification": { - "$ref": "#/definitions/dataClassification" - }, - "sensitiveData": { - "type": "array", - "description": "A description of any sensitive data in a dataset.", - "items": { - "type": "string" - } - }, - "graphics": { "$ref": "#/definitions/graphicsCollection" }, - "description": { - "description": "A description of the dataset. Can describe size of dataset, whether it's used for source code, training, testing, or validation, etc.", - "type": "string" - }, - "governance": { - "type": "object", - "title": "Data Governance", - "$ref": "#/definitions/dataGovernance" - } - } - }, - "dataGovernance": { - "type": "object", - "title": "Data Governance", - "additionalProperties": false, - "properties": { - "custodians": { - "type": "array", - "title": "Data Custodians", - "description": "Data custodians are responsible for the safe custody, transport, and storage of data.", - "items": { "$ref": "#/definitions/dataGovernanceResponsibleParty" } - }, - "stewards": { - "type": "array", - "title": "Data Stewards", - "description": "Data stewards are responsible for data content, context, and associated business rules.", - "items": { "$ref": "#/definitions/dataGovernanceResponsibleParty" } - }, - "owners": { - "type": "array", - "title": "Data Owners", - "description": "Data owners are concerned with risk and appropriate access to data.", - "items": { "$ref": "#/definitions/dataGovernanceResponsibleParty" } - } - } - }, - "dataGovernanceResponsibleParty": { - "type": "object", - "additionalProperties": false, - "properties": { - "organization": { - "title": "Organization", - "$ref": "#/definitions/organizationalEntity" - }, - "contact": { - "title": "Individual", - "$ref": "#/definitions/organizationalContact" - } - }, - "oneOf":[ - { - "required": ["organization"] - }, - { - "required": ["contact"] - } - ] - }, - "graphicsCollection": { - "type": "object", - "title": "Graphics Collection", - "description": "A collection of graphics that represent various measurements.", - "additionalProperties": false, - "properties": { - "description": { - "description": "A description of this collection of graphics.", - "type": "string" - }, - "collection": { - "description": "A collection of graphics.", - "type": "array", - "items": { "$ref": "#/definitions/graphic" } - } - } - }, - "graphic": { - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "description": "The name of the graphic.", - "type": "string" - }, - "image": { - "title": "Graphic Image", - "description": "The graphic (vector or raster). Base64 encoding MUST be specified for binary images.", - "$ref": "#/definitions/attachment" - } - } - }, - "performanceMetric": { - "type": "object", - "additionalProperties": false, - "properties": { - "type": { - "description": "The type of performance metric.", - "type": "string" - }, - "value": { - "description": "The value of the performance metric.", - "type": "string" - }, - "slice": { - "description": "The name of the slice this metric was computed on. By default, assume this metric is not sliced.", - "type": "string" - }, - "confidenceInterval": { - "description": "The confidence interval of the metric.", - "type": "object", - "additionalProperties": false, - "properties": { - "lowerBound": { - "description": "The lower bound of the confidence interval.", - "type": "string" - }, - "upperBound": { - "description": "The upper bound of the confidence interval.", - "type": "string" - } - } - } - } - }, - "risk": { - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "description": "The name of the risk.", - "type": "string" - }, - "mitigationStrategy": { - "description": "Strategy used to address this risk.", - "type": "string" - } - } - }, - "fairnessAssessment": { - "type": "object", - "title": "Fairness Assessment", - "description": "Information about the benefits and harms of the model to an identified at risk group.", - "additionalProperties": false, - "properties": { - "groupAtRisk": { - "type": "string", - "description": "The groups or individuals at risk of being systematically disadvantaged by the model." - }, - "benefits": { - "type": "string", - "description": "Expected benefits to the identified groups." - }, - "harms": { - "type": "string", - "description": "Expected harms to the identified groups." - }, - "mitigationStrategy": { - "type": "string", - "description": "With respect to the benefits and harms outlined, please describe any mitigation strategy implemented." - } - } - }, - "dataClassification": { - "type": "string", - "title": "Data Classification", - "description": "Data classification tags data according to its type, sensitivity, and value if altered, stolen, or destroyed." - }, - "formula": { - "title": "Formula", - "description": "Describes workflows and resources that captures rules and other aspects of how the associated BOM component or service was formed.", - "type": "object", - "additionalProperties": false, - "properties": { - "bom-ref": { - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the formula elsewhere in the BOM. Every bom-ref MUST be unique within the BOM.", - "$ref": "#/definitions/refType" - }, - "components": { - "title": "Components", - "description": "Transient components that are used in tasks that constitute one or more of this formula's workflows", - "type": "array", - "items": { - "$ref": "#/definitions/component" - }, - "uniqueItems": true - }, - "services": { - "title": "Services", - "description": "Transient services that are used in tasks that constitute one or more of this formula's workflows", - "type": "array", - "items": { - "$ref": "#/definitions/service" - }, - "uniqueItems": true - }, - "workflows": { - "title": "Workflows", - "description": "List of workflows that can be declared to accomplish specific orchestrated goals and independently triggered.", - "$comment": "Different workflows can be designed to work together to perform end-to-end CI/CD builds and deployments.", - "type": "array", - "items": { - "$ref": "#/definitions/workflow" - }, - "uniqueItems": true - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "workflow": { - "title": "Workflow", - "description": "A specialized orchestration task.", - "$comment": "Workflow are as task themselves and can trigger other workflow tasks. These relationships can be modeled in the taskDependencies graph.", - "type": "object", - "required": [ - "bom-ref", - "uid", - "taskTypes" - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the workflow elsewhere in the BOM. Every bom-ref MUST be unique within the BOM.", - "$ref": "#/definitions/refType" - }, - "uid": { - "title": "Unique Identifier (UID)", - "description": "The unique identifier for the resource instance within its deployment context.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "The name of the resource instance.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "A description of the resource instance.", - "type": "string" - }, - "resourceReferences": { - "title": "Resource references", - "description": "References to component or service resources that are used to realize the resource instance.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/resourceReferenceChoice" - } - }, - "tasks": { - "title": "Tasks", - "description": "The tasks that comprise the workflow.", - "$comment": "Note that tasks can appear more than once as different instances (by name or UID).", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/task" - } - }, - "taskDependencies": { - "title": "Task dependency graph", - "description": "The graph of dependencies between tasks within the workflow.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/dependency" - } - }, - "taskTypes": { - "title": "Task types", - "description": "Indicates the types of activities performed by the set of workflow tasks.", - "$comment": "Currently, these types reflect common CI/CD actions.", - "type": "array", - "items": { - "$ref": "#/definitions/taskType" - } - }, - "trigger": { - "title": "Trigger", - "description": "The trigger that initiated the task.", - "$ref": "#/definitions/trigger" - }, - "steps": { - "title": "Steps", - "description": "The sequence of steps for the task.", - "type": "array", - "items": { - "$ref": "#/definitions/step" - }, - "uniqueItems": true - }, - "inputs": { - "title": "Inputs", - "description": "Represents resources and data brought into a task at runtime by executor or task commands", - "examples": ["a `configuration` file which was declared as a local `component` or `externalReference`"], - "type": "array", - "items": { - "$ref": "#/definitions/inputType" - }, - "uniqueItems": true - }, - "outputs": { - "title": "Outputs", - "description": "Represents resources and data output from a task at runtime by executor or task commands", - "examples": ["a log file or metrics data produced by the task"], - "type": "array", - "items": { - "$ref": "#/definitions/outputType" - }, - "uniqueItems": true - }, - "timeStart": { - "title": "Time start", - "description": "The date and time (timestamp) when the task started.", - "type": "string", - "format": "date-time" - }, - "timeEnd": { - "title": "Time end", - "description": "The date and time (timestamp) when the task ended.", - "type": "string", - "format": "date-time" - }, - "workspaces": { - "title": "Workspaces", - "description": "A set of named filesystem or data resource shareable by workflow tasks.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/workspace" - } - }, - "runtimeTopology": { - "title": "Runtime topology", - "description": "A graph of the component runtime topology for workflow's instance.", - "$comment": "A description of the runtime component and service topology. This can describe a partial or complete topology used to host and execute the task (e.g., hardware, operating systems, configurations, etc.),", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/dependency" - } - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "task": { - "title": "Task", - "description": "Describes the inputs, sequence of steps and resources used to accomplish a task and its output.", - "$comment": "Tasks are building blocks for constructing assemble CI/CD workflows or pipelines.", - "type": "object", - "required": [ - "bom-ref", - "uid", - "taskTypes" - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the task elsewhere in the BOM. Every bom-ref MUST be unique within the BOM.", - "$ref": "#/definitions/refType" - }, - "uid": { - "title": "Unique Identifier (UID)", - "description": "The unique identifier for the resource instance within its deployment context.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "The name of the resource instance.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "A description of the resource instance.", - "type": "string" - }, - "resourceReferences": { - "title": "Resource references", - "description": "References to component or service resources that are used to realize the resource instance.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/resourceReferenceChoice" - } - }, - "taskTypes": { - "title": "Task types", - "description": "Indicates the types of activities performed by the set of workflow tasks.", - "$comment": "Currently, these types reflect common CI/CD actions.", - "type": "array", - "items": { - "$ref": "#/definitions/taskType" - } - }, - "trigger": { - "title": "Trigger", - "description": "The trigger that initiated the task.", - "$ref": "#/definitions/trigger" - }, - "steps": { - "title": "Steps", - "description": "The sequence of steps for the task.", - "type": "array", - "items": { - "$ref": "#/definitions/step" - }, - "uniqueItems": true - }, - "inputs": { - "title": "Inputs", - "description": "Represents resources and data brought into a task at runtime by executor or task commands", - "examples": ["a `configuration` file which was declared as a local `component` or `externalReference`"], - "type": "array", - "items": { - "$ref": "#/definitions/inputType" - }, - "uniqueItems": true - }, - "outputs": { - "title": "Outputs", - "description": "Represents resources and data output from a task at runtime by executor or task commands", - "examples": ["a log file or metrics data produced by the task"], - "type": "array", - "items": { - "$ref": "#/definitions/outputType" - }, - "uniqueItems": true - }, - "timeStart": { - "title": "Time start", - "description": "The date and time (timestamp) when the task started.", - "type": "string", - "format": "date-time" - }, - "timeEnd": { - "title": "Time end", - "description": "The date and time (timestamp) when the task ended.", - "type": "string", - "format": "date-time" - }, - "workspaces": { - "title": "Workspaces", - "description": "A set of named filesystem or data resource shareable by workflow tasks.", - "type": "array", - "items": { - "$ref": "#/definitions/workspace" - }, - "uniqueItems": true - }, - "runtimeTopology": { - "title": "Runtime topology", - "description": "A graph of the component runtime topology for task's instance.", - "$comment": "A description of the runtime component and service topology. This can describe a partial or complete topology used to host and execute the task (e.g., hardware, operating systems, configurations, etc.),", - "type": "array", - "items": { - "$ref": "#/definitions/dependency" - }, - "uniqueItems": true - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "step": { - "type": "object", - "description": "Executes specific commands or tools in order to accomplish its owning task as part of a sequence.", - "additionalProperties": false, - "properties": { - "name": { - "title": "Name", - "description": "A name for the step.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "A description of the step.", - "type": "string" - }, - "commands": { - "title": "Commands", - "description": "Ordered list of commands or directives for the step", - "type": "array", - "items": { - "$ref": "#/definitions/command" - } - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "command": { - "type": "object", - "additionalProperties": false, - "properties": { - "executed": { - "title": "Executed", - "description": "A text representation of the executed command.", - "type": "string" - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "workspace": { - "title": "Workspace", - "description": "A named filesystem or data resource shareable by workflow tasks.", - "type": "object", - "required": [ - "bom-ref", - "uid" - ], - "additionalProperties": false, - "properties": { - "bom-ref": { - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the workspace elsewhere in the BOM. Every bom-ref MUST be unique within the BOM.", - "$ref": "#/definitions/refType" - }, - "uid": { - "title": "Unique Identifier (UID)", - "description": "The unique identifier for the resource instance within its deployment context.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "The name of the resource instance.", - "type": "string" - }, - "aliases": { - "title": "Aliases", - "description": "The names for the workspace as referenced by other workflow tasks. Effectively, a name mapping so other tasks can use their own local name in their steps.", - "type": "array", - "items": {"type": "string"} - }, - "description": { - "title": "Description", - "description": "A description of the resource instance.", - "type": "string" - }, - "resourceReferences": { - "title": "Resource references", - "description": "References to component or service resources that are used to realize the resource instance.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/resourceReferenceChoice" - } - }, - "accessMode": { - "title": "Access mode", - "description": "Describes the read-write access control for the workspace relative to the owning resource instance.", - "type": "string", - "enum": [ - "read-only", - "read-write", - "read-write-once", - "write-once", - "write-only" - ] - }, - "mountPath": { - "title": "Mount path", - "description": "A path to a location on disk where the workspace will be available to the associated task's steps.", - "type": "string" - }, - "managedDataType": { - "title": "Managed data type", - "description": "The name of a domain-specific data type the workspace represents.", - "$comment": "This property is for CI/CD frameworks that are able to provide access to structured, managed data at a more granular level than a filesystem.", - "examples": ["ConfigMap","Secret"], - "type": "string" - }, - "volumeRequest": { - "title": "Volume request", - "description": "Identifies the reference to the request for a specific volume type and parameters.", - "examples": ["a kubernetes Persistent Volume Claim (PVC) name"], - "type": "string" - }, - "volume": { - "title": "Volume", - "description": "Information about the actual volume instance allocated to the workspace.", - "$comment": "The actual volume allocated may be different than the request.", - "examples": ["see https://kubernetes.io/docs/concepts/storage/persistent-volumes/"], - "$ref": "#/definitions/volume" - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "volume": { - "title": "Volume", - "description": "An identifiable, logical unit of data storage tied to a physical device.", - "type": "object", - "additionalProperties": false, - "properties": { - "uid": { - "title": "Unique Identifier (UID)", - "description": "The unique identifier for the volume instance within its deployment context.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "The name of the volume instance", - "type": "string" - }, - "mode": { - "title": "Mode", - "description": "The mode for the volume instance.", - "type": "string", - "enum": [ - "filesystem", "block" - ], - "default": "filesystem" - }, - "path": { - "title": "Path", - "description": "The underlying path created from the actual volume.", - "type": "string" - }, - "sizeAllocated": { - "title": "Size allocated", - "description": "The allocated size of the volume accessible to the associated workspace. This should include the scalar size as well as IEC standard unit in either decimal or binary form.", - "examples": ["10GB", "2Ti", "1Pi"], - "type": "string" - }, - "persistent": { - "title": "Persistent", - "description": "Indicates if the volume persists beyond the life of the resource it is associated with.", - "type": "boolean" - }, - "remote": { - "title": "Remote", - "description": "Indicates if the volume is remotely (i.e., network) attached.", - "type": "boolean" - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "trigger": { - "title": "Trigger", - "description": "Represents a resource that can conditionally activate (or fire) tasks based upon associated events and their data.", - "type": "object", - "additionalProperties": false, - "required": [ - "type", - "bom-ref", - "uid" - ], - "properties": { - "bom-ref": { - "title": "BOM Reference", - "description": "An optional identifier which can be used to reference the trigger elsewhere in the BOM. Every bom-ref MUST be unique within the BOM.", - "$ref": "#/definitions/refType" - }, - "uid": { - "title": "Unique Identifier (UID)", - "description": "The unique identifier for the resource instance within its deployment context.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "The name of the resource instance.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "A description of the resource instance.", - "type": "string" - }, - "resourceReferences": { - "title": "Resource references", - "description": "References to component or service resources that are used to realize the resource instance.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/resourceReferenceChoice" - } - }, - "type": { - "title": "Type", - "description": "The source type of event which caused the trigger to fire.", - "type": "string", - "enum": [ - "manual", - "api", - "webhook", - "scheduled" - ] - }, - "event": { - "title": "Event", - "description": "The event data that caused the associated trigger to activate.", - "$ref": "#/definitions/event" - }, - "conditions": { - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/condition" - } - }, - "timeActivated": { - "title": "Time activated", - "description": "The date and time (timestamp) when the trigger was activated.", - "type": "string", - "format": "date-time" - }, - "inputs": { - "title": "Inputs", - "description": "Represents resources and data brought into a task at runtime by executor or task commands", - "examples": ["a `configuration` file which was declared as a local `component` or `externalReference`"], - "type": "array", - "items": { - "$ref": "#/definitions/inputType" - }, - "uniqueItems": true - }, - "outputs": { - "title": "Outputs", - "description": "Represents resources and data output from a task at runtime by executor or task commands", - "examples": ["a log file or metrics data produced by the task"], - "type": "array", - "items": { - "$ref": "#/definitions/outputType" - }, - "uniqueItems": true - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "event": { - "title": "Event", - "description": "Represents something that happened that may trigger a response.", - "type": "object", - "additionalProperties": false, - "properties": { - "uid": { - "title": "Unique Identifier (UID)", - "description": "The unique identifier of the event.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "A description of the event.", - "type": "string" - }, - "timeReceived": { - "title": "Time Received", - "description": "The date and time (timestamp) when the event was received.", - "type": "string", - "format": "date-time" - }, - "data": { - "title": "Data", - "description": "Encoding of the raw event data.", - "$ref": "#/definitions/attachment" - }, - "source": { - "title": "Source", - "description": "References the component or service that was the source of the event", - "$ref": "#/definitions/resourceReferenceChoice" - }, - "target": { - "title": "Target", - "description": "References the component or service that was the target of the event", - "$ref": "#/definitions/resourceReferenceChoice" - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "inputType": { - "title": "Input type", - "description": "Type that represents various input data types and formats.", - "type": "object", - "oneOf": [ - { - "required": [ - "resource" - ] - }, - { - "required": [ - "parameters" - ] - }, - { - "required": [ - "environmentVars" - ] - }, - { - "required": [ - "data" - ] - } - ], - "additionalProperties": false, - "properties": { - "source": { - "title": "Source", - "description": "A references to the component or service that provided the input to the task (e.g., reference to a service with data flow value of `inbound`)", - "examples": [ - "source code repository", - "database" - ], - "$ref": "#/definitions/resourceReferenceChoice" - }, - "target": { - "title": "Target", - "description": "A reference to the component or service that received or stored the input if not the task itself (e.g., a local, named storage workspace)", - "examples": [ - "workspace", - "directory" - ], - "$ref": "#/definitions/resourceReferenceChoice" - }, - "resource": { - "title": "Resource", - "description": "A reference to an independent resource provided as an input to a task by the workflow runtime.", - "examples": [ - "reference to a configuration file in a repository (i.e., a bom-ref)", - "reference to a scanning service used in a task (i.e., a bom-ref)" - ], - "$ref": "#/definitions/resourceReferenceChoice" - }, - "parameters": { - "title": "Parameters", - "description": "Inputs that have the form of parameters with names and values.", - "type": "array", - "uniqueItems": true, - "items": { - "$ref": "#/definitions/parameter" - } - }, - "environmentVars": { - "title": "Environment variables", - "description": "Inputs that have the form of parameters with names and values.", - "type": "array", - "uniqueItems": true, - "items": { - "oneOf": [ - { - "$ref": "#/definitions/property" - }, - { - "type": "string" - } - ] - } - }, - "data": { - "title": "Data", - "description": "Inputs that have the form of data.", - "$ref": "#/definitions/attachment" - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "outputType": { - "type": "object", - "oneOf": [ - { - "required": [ - "resource" - ] - }, - { - "required": [ - "environmentVars" - ] - }, - { - "required": [ - "data" - ] - } - ], - "additionalProperties": false, - "properties": { - "type": { - "title": "Type", - "description": "Describes the type of data output.", - "type": "string", - "enum": [ - "artifact", - "attestation", - "log", - "evidence", - "metrics", - "other" - ] - }, - "source": { - "title": "Source", - "description": "Component or service that generated or provided the output from the task (e.g., a build tool)", - "$ref": "#/definitions/resourceReferenceChoice" - }, - "target": { - "title": "Target", - "description": "Component or service that received the output from the task (e.g., reference to an artifactory service with data flow value of `outbound`)", - "examples": ["a log file described as an `externalReference` within its target domain."], - "$ref": "#/definitions/resourceReferenceChoice" - }, - "resource": { - "title": "Resource", - "description": "A reference to an independent resource generated as output by the task.", - "examples": [ - "configuration file", - "source code", - "scanning service" - ], - "$ref": "#/definitions/resourceReferenceChoice" - }, - "data": { - "title": "Data", - "description": "Outputs that have the form of data.", - "$ref": "#/definitions/attachment" - }, - "environmentVars": { - "title": "Environment variables", - "description": "Outputs that have the form of environment variables.", - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/definitions/property" - }, - { - "type": "string" - } - ] - }, - "uniqueItems": true - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "resourceReferenceChoice": { - "title": "Resource reference choice", - "description": "A reference to a locally defined resource (e.g., a bom-ref) or an externally accessible resource.", - "$comment": "Enables reference to a resource that participates in a workflow; using either internal (bom-ref) or external (externalReference) types.", - "type": "object", - "additionalProperties": false, - "properties": { - "ref": { - "title": "BOM Reference", - "description": "References an object by its bom-ref attribute", - "anyOf": [ - { - "title": "Ref", - "$ref": "#/definitions/refLinkType" - }, - { - "title": "BOM-Link Element", - "$ref": "#/definitions/bomLinkElementType" - } - ] - }, - "externalReference": { - "title": "External reference", - "description": "Reference to an externally accessible resource.", - "$ref": "#/definitions/externalReference" - } - }, - "oneOf": [ - { - "required": [ - "ref" - ] - }, - { - "required": [ - "externalReference" - ] - } - ] - }, - "condition": { - "title": "Condition", - "description": "A condition that was used to determine a trigger should be activated.", - "type": "object", - "additionalProperties": false, - "properties": { - "description": { - "title": "Description", - "description": "Describes the set of conditions which cause the trigger to activate.", - "type": "string" - }, - "expression": { - "title": "Expression", - "description": "The logical expression that was evaluated that determined the trigger should be fired.", - "type": "string" - }, - "properties": { - "type": "array", - "title": "Properties", - "items": { - "$ref": "#/definitions/property" - } - } - } - }, - "taskType": { - "type": "string", - "enum": [ - "copy", - "clone", - "lint", - "scan", - "merge", - "build", - "test", - "deliver", - "deploy", - "release", - "clean", - "other" - ] - }, - "parameter": { - "title": "Parameter", - "description": "A representation of a functional parameter.", - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "title": "Name", - "description": "The name of the parameter.", - "type": "string" - }, - "value": { - "title": "Value", - "description": "The value of the parameter.", - "type": "string" - }, - "dataType": { - "title": "Data type", - "description": "The data type of the parameter.", - "type": "string" - } - } - }, - "signature": { - "$ref": "jsf-0.82.schema.json#/definitions/signature", - "title": "Signature", - "description": "Enveloped signature in [JSON Signature Format (JSF)](https://cyberphone.github.io/doc/security/jsf.html)." - } - } -} diff --git a/schema/cyclonedx/cyclonedx.xsd b/schema/cyclonedx/cyclonedx.xsd deleted file mode 100644 index 23dc620a095..00000000000 --- a/schema/cyclonedx/cyclonedx.xsd +++ /dev/null @@ -1,8290 +0,0 @@ - - - - - - - - - CycloneDX Bill of Materials Standard - https://cyclonedx.org/ - Apache License, Version 2.0 - - - - - - Identifier for referable and therefore interlink-able elements. - - - - - - - - - - Descriptor for an element identified by the attribute "bom-ref" in the same BOM document. - In contrast to `bomLinkElementType`. - - - - - - - - - - - - - - - - - =2.0.0|<5.0.0" - - "vers:pypi/0.0.0|0.0.1|0.0.2|0.0.3|1.0|2.0pre1" - - "vers:tomee/>=1.0.0-beta1|<=1.7.5|>=7.0.0-M1|<=7.0.7|>=7.1.0|<=7.1.2|>=8.0.0-M1|<=8.0.1" - - "vers:gem/>=2.2.0|!= 2.2.1|<2.3.0" - ]]> - - - - - - - - - - - - Descriptor for another BOM document. - See https://cyclonedx.org/capabilities/bomlink/ - - - - - - - - - - - Descriptor for an element in another BOM document. - See https://cyclonedx.org/capabilities/bomlink/ - - - - - - - - - - - - - - - - The date and time (timestamp) when the BOM was created. - - - - - - Lifecycles communicate the stage(s) in which data in the BOM was captured. Different types of data may be available at various phases of a lifecycle, such as the Software Development Lifecycle (SDLC), IT Asset Management (ITAM), and Software Asset Management (SAM). Thus, a BOM may include data specific to or only obtainable in a given lifecycle. - - - - - - - - - - - - A pre-defined phase in the product lifecycle. - - - - - - - - - The name of the lifecycle phase - - - - - - - The description of the lifecycle phase - - - - - - - - - - - - - The tool(s) used in the creation of the BOM. - - - - - - - DEPRECATED. Use tools\components or tools\services instead. - - - - - - - A list of software and hardware components used as tools. - - - - - A list of services used as tools. - - - - - - - - - - The person(s) who created the BOM. - Authors are common in BOMs created through manual processes. BOMs created through automated means may have './manufacturer' instead. - - - - - - - - - - - The component that the BOM describes. - - - - - - The organization that created the BOM. - Manufacturer is common in BOMs created through automated processes. BOMs created through manual means may have './authors' instead. - - - - - - - DEPRECATED - DO NOT USE. This will be removed in a future version. Use the `./component/manufacturer` instead. - The organization that manufactured the component that the BOM describes. - - - - - - The organization that supplied the component that the BOM describes. The - supplier may often be the manufacturer, but may also be a distributor or repackager. - - - - - - The license information for the BOM document. - This may be different from the license(s) of the component(s) that the BOM describes. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - BOM produced early in the development lifecycle containing inventory of components and services - that are proposed or planned to be used. The inventory may need to be procured, retrieved, - or resourced prior to use. - - - - - - - BOM consisting of information obtained prior to a build process and may contain source files - and development artifacts and manifests. The inventory may need to be resolved and retrieved - prior to use. - - - - - - - BOM consisting of information obtained during a build process where component inventory is - available for use. The precise versions of resolved components are usually available at this - time as well as the provenance of where the components were retrieved from. - - - - - - - BOM consisting of information obtained after a build process has completed and the resulting - components(s) are available for further analysis. Built components may exist as the result of a - CI/CD process, may have been installed or deployed to a system or device, and may need to be - retrieved or extracted from the system or device. - - - - - - - BOM produced that represents inventory that is running and operational. This may include staging - or production environments and will generally encompass multiple SBOMs describing the applications - and operating system, along with HBOMs describing the hardware that makes up the system. Operations - Bill of Materials (OBOM) can provide full-stack inventory of runtime environments, configurations, - and additional dependencies. - - - - - - - BOM consisting of information observed through network discovery providing point-in-time - enumeration of embedded, on-premise, and cloud-native services such as server applications, - connected devices, microservices, and serverless functions. - - - - - - - BOM containing inventory that will be, or has been retired from operations. - - - - - - - - - - - The name of the organization - - - - - The physical address (location) of the organization. - - - - - The URL of the organization. Multiple URLs are allowed. - - - - - A contact person at the organization. Multiple contacts are allowed. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the object elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - Information about the automated or manual tool used - - - - - The name of the vendor who created the tool - - - - - The name of the tool - - - - - The version of the tool - - - - - - - - - - - - Provides the ability to document external references related to the tool. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - The name of the contact - - - - - The email address of the contact. - - - - - The phone number of the contact. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the object elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - The organization that supplied the component. The supplier may often - be the manufacturer, but may also be a distributor or repackager. - - - - - - The organization that created the component. - Manufacturer is common in components created through automated processes. Components created through manual means may have './authors' instead. - - - - - - - The person(s) who created the component. - Authors are common in components created through manual processes. Components created through automated means may have `./manufacturer` instead. - - - - - - - - - - - - DEPRECATED - DO NOT USE. This will be removed in a future version. Use `./authors` or `./manufacturer` instead. - The person(s) or organization(s) that authored the component. - - - - - - The person(s) or organization(s) that published the component - - - - - The grouping name or identifier. This will often be a shortened, single - name of the company or project that produced the component, or the source package or - domain name. Whitespace and special characters should be avoided. Examples include: - apache, org.apache.commons, and apache.org. - - - - - The name of the component. This will often be a shortened, single name - of the component. Examples: commons-lang3 and jquery - - - - - The component version. The version should ideally comply with semantic versioning - but is not enforced. - - - - - Specifies a description for the component - - - - - Specifies the scope of the component. If scope is not specified, 'required' - scope SHOULD be assumed by the consumer of the BOM. - - - - - - - - - - - - - A copyright notice informing users of the underlying claims to - copyright ownership in a published work. - - - - - - Specifies a well-formed CPE name that conforms to the CPE 2.2 or 2.3 specification. See https://nvd.nist.gov/products/cpe - - - - - - - Specifies the package-url (purl). The purl, if specified, MUST be valid and conform - to the specification defined at: https://github.com/package-url/purl-spec - - - - - - - Specifies the OmniBOR Artifact ID. The OmniBOR, if specified, MUST be valid and conform - to the specification defined at: https://www.iana.org/assignments/uri-schemes/prov/gitoid - - - - - - - Specifies the Software Heritage persistent identifier (SWHID). The SWHID, if specified, MUST - be valid and conform to the specification defined at: - https://docs.softwareheritage.org/devel/swh-model/persistent-identifiers.html - - - - - - - Specifies metadata and content for ISO-IEC 19770-2 Software Identification (SWID) Tags. - - - - - - - DEPRECATED - DO NOT USE. This will be removed in a future version. Use the pedigree - element instead to supply information on exactly how the component was modified. - A boolean value indicating if the component has been modified from the original. - A value of true indicates the component is a derivative of the original. - A value of false indicates the component has not been modified from the original. - - - - - - - Component pedigree is a way to document complex supply chain scenarios where components are - created, distributed, modified, redistributed, combined with other components, etc. - - - - - - Provides the ability to document external references related to the - component or to the project the component describes. - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - A list of software and hardware components included in the parent component. This is not a - dependency tree. It provides a way to specify a hierarchical representation of component - assemblies, similar to system -> subsystem -> parts assembly in physical supply chains. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - Provides the ability to document evidence collected through various forms of extraction or analysis. - - - - - Specifies optional release notes. - - - - - A model card describes the intended uses of a machine learning model and potential - limitations, including biases and ethical considerations. Model cards typically contain the - training parameters, which datasets were used to train the model, performance metrics, and other - relevant data useful for ML transparency. This object SHOULD be specified for any component of - type `machine-learning-model` and MUST NOT be specified for other component types. - - - - - This object SHOULD be specified for any component of type `data` and MUST NOT be - specified for other component types. - - - - - - Cryptographic assets have properties that uniquely define them and that make them actionable - for further reasoning. As an example, it makes a difference if one knows the algorithm family - (e.g. AES) or the specific variant or instantiation (e.g. AES-128-GCM). This is because the - security level and the algorithm primitive (authenticated encryption) is only defined by the - definition of the algorithm variant. The presence of a weak cryptographic algorithm like SHA1 - vs. HMAC-SHA1 also makes a difference. - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - Specifies the type of component. For software components, classify as application if no more - specific appropriate classification is available or cannot be determined for the component. - - - - - - - The OPTIONAL mime-type of the component. When used on file components, the mime-type - can provide additional context about the kind of file being represented such as an image, - font, or executable. Some library or framework components may also have an associated mime-type. - - - - - - - An optional identifier which can be used to reference the component elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - A valid SPDX license ID - - - - - If SPDX does not define the license used, this field may be used to provide the license name - - - - - - Specifies the optional full text of the attachment - - - - - The URL to the attachment file. If the attachment is a license or BOM, - an externalReference should also be specified for completeness. - - - - - Licensing details describing the licensor/licensee, license type, renewal and - expiration dates, and other important metadata - - - - - - License identifiers that may be used to manage licenses and - their lifecycle - - - - - - - - - - The individual or organization that grants a license to another - individual or organization - - - - - - - The organization that granted the license - - - - - The individual, not associated with an organization, - that granted the license - - - - - - - - - The individual or organization for which a license was granted to - - - - - - - The organization that was granted the license - - - - - The individual, not associated with an organization, - that was granted the license - - - - - - - - - The individual or organization that purchased the license - - - - - - - The organization that purchased the license - - - - - The individual, not associated with an organization, - that purchased the license - - - - - - - - - The purchase order identifier the purchaser sent to a supplier or - vendor to authorize a purchase - - - - - The type of license(s) that was granted to the licensee - - - - - - - - - - The timestamp indicating when the license was last - renewed. For new purchases, this is often the purchase or acquisition date. - For non-perpetual licenses or subscriptions, this is the timestamp of when the - license was last renewed. - - - - - The timestamp indicating when the current license - expires (if applicable). - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the license elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - - Declared licenses and concluded licenses represent two different stages in the - licensing process within software development. Declared licenses refer to the - initial intention of the software authors regarding the licensing terms under - which their code is released. On the other hand, concluded licenses are the - result of a comprehensive analysis of the project's codebase to identify and - confirm the actual licenses of the components used, which may differ from the - initially declared licenses. While declared licenses provide an upfront indication - of the licensing intentions, concluded licenses offer a more thorough understanding - of the actual licensing within a project, facilitating proper compliance and risk - management. Observed licenses are defined in `evidence.licenses`. Observed licenses - form the evidence necessary to substantiate a concluded license. - - - - - - - - - - The attachment data. Proactive controls such as input validation and sanitization should be employed to prevent misuse of attachment text. - - - - Specifies the content type of the text. Defaults to text/plain - if not specified. - - - - - - Specifies the optional encoding the text is represented in - - - - - - - - - - Specifies the file hash of the component - - - - - - Specifies the algorithm used to create the hash - - - - - - - - - - - The component is required for runtime - - - - - The component is optional at runtime. Optional components are components that - are not capable of being called due to them not be installed or otherwise accessible by any means. - Components that are installed but due to configuration or other restrictions are prohibited from - being called must be scoped as 'required'. - - - - - Components that are excluded provide the ability to document component usage - for test and other non-runtime purposes. Excluded components are not reachable within a call - graph at runtime. - - - - - - - - - - A software application. Refer to https://en.wikipedia.org/wiki/Application_software - for information about applications. - - - - - A software framework. Refer to https://en.wikipedia.org/wiki/Software_framework - for information on how frameworks vary slightly from libraries. - - - - - A software library. Refer to https://en.wikipedia.org/wiki/Library_(computing) - for information about libraries. All third-party and open source reusable components will likely - be a library. If the library also has key features of a framework, then it should be classified - as a framework. If not, or is unknown, then specifying library is recommended. - - - - - A packaging and/or runtime format, not specific to any particular technology, - which isolates software inside the container from software outside of a container through - virtualization technology. Refer to https://en.wikipedia.org/wiki/OS-level_virtualization - - - - - A runtime environment which interprets or executes software. This may include - runtimes such as those that execute bytecode or low-code/no-code application platforms. - - - - - A software operating system without regard to deployment model - (i.e. installed on physical hardware, virtual machine, image, etc) Refer to - https://en.wikipedia.org/wiki/Operating_system - - - - - A hardware device such as a processor, or chip-set. A hardware device - containing firmware SHOULD include a component for the physical hardware itself, and another - component of type 'firmware' or 'operating-system' (whichever is relevant), describing - information about the software running on the device. - See also the list of known device properties: https://github.com/CycloneDX/cyclonedx-property-taxonomy/blob/main/cdx/device.md - - - - - - A special type of software that operates or controls a particular type of device. - Refer to https://en.wikipedia.org/wiki/Device_driver - - - - - A special type of software that provides low-level control over a devices - hardware. Refer to https://en.wikipedia.org/wiki/Firmware - - - - - A computer file. Refer to https://en.wikipedia.org/wiki/Computer_file - for information about files. - - - - - A model based on training data that can make predictions or decisions without - being explicitly programmed to do so. - - - - - A collection of discrete values that convey information. - - - - - A cryptographic asset including algorithms, protocols, certificates, keys, tokens, and secrets. - - - - - - - - - - - - - - - - - - - - - - - - - - - A license that grants use of software solely for the purpose - of education or research. - - - - - A license covering use of software embedded in a specific - piece of hardware. - - - - - A Client Access License (CAL) allows client computers to access - services provided by server software. - - - - - A Concurrent User license (aka floating license) limits the - number of licenses for a software application and licenses are shared among - a larger number of users. - - - - - A license where the core of a computer's processor is assigned - a specific number of points. - - - - - A license for which consumption is measured by non-standard - metrics. - - - - - A license that covers a defined number of installations on - computers and other types of devices. - - - - - A license that grants permission to install and use software - for trial purposes. - - - - - A license that grants access to the software to one or more - pre-defined users. - - - - - A license that grants access to the software on one or more - pre-defined computers or devices. - - - - - An Original Equipment Manufacturer license that is delivered - with hardware, cannot be transferred to other hardware, and is valid for the - life of the hardware. - - - - - A license where the software is sold on a one-time basis and - the licensee can use a copy of the software indefinitely. - - - - - A license where each installation consumes points per - processor. - - - - - A license where the licensee pays a fee to use the software - or service. - - - - - A license that grants access to the software or service by a - specified number of users. - - - - - Another license type. - - - - - - - - - - - - - - - - - - - - - - - - - - - Define the format for acceptable CPE URIs. Supports CPE 2.2 and CPE 2.3 formats. - Refer to https://nvd.nist.gov/products/cpe for official specification. - - - - - - - - - - - - Specifies the full content of the SWID tag. - - - - - The URL to the SWID file. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - Maps to the tagId of a SoftwareIdentity. - - - - - Maps to the name of a SoftwareIdentity. - - - - - Maps to the version of a SoftwareIdentity. - - - - - Maps to the tagVersion of a SoftwareIdentity. - - - - - Maps to the patch of a SoftwareIdentity. - - - - - - - - Defines a string representation of a UUID conforming to RFC 4122. - - - - - - - - - - - - Version Control System - - - - - Issue or defect tracking system, or an Application Lifecycle Management (ALM) system - - - - - Website - - - - - Security advisories - - - - - Bill-of-materials (SBOM, OBOM, HBOM, SaaSBOM, etc) - - - - - Mailing list or discussion group - - - - - Social media account - - - - - Real-time chat platform - - - - - Documentation, guides, or how-to instructions - - - - - Community or commercial support - - - - - The location where the source code distributable can be obtained. This is often an archive format such as zip or tgz. The source-distribution type complements use of the version control (vcs) type. - - - - - Direct or repository download location - - - - - The location where a component was published to. This is often the same as "distribution" but may also include specialized publishing processes that act as an intermediary - - - - - The URL to the license file. If a license URL has been defined in the license - node, it should also be defined as an external reference for completeness - - - - - Build-system specific meta file (i.e. pom.xml, package.json, .nuspec, etc) - - - - - URL to an automated build system - - - - - URL to release notes - - - - - Specifies a way to contact the maintainer, supplier, or provider in the event of a security incident. Common URIs include links to a disclosure procedure, a mailto (RFC-2368) that specifies an email address, a tel (RFC-3966) that specifies a phone number, or dns (RFC-4501) that specifies the records containing DNS Security TXT. - - - - - A model card describes the intended uses of a machine learning model, potential - limitations, biases, ethical considerations, training parameters, datasets used to train the - model, performance metrics, and other relevant data useful for ML transparency. - - - - - A record of events that occurred in a computer system or application, such as problems, errors, or information on current operations. - - - - - Parameters or settings that may be used by other components or services. - - - - - Information used to substantiate a claim. - - - - - Describes how a component or service was manufactured or deployed. - - - - - Human or machine-readable statements containing facts, evidence, or testimony - - - - - An enumeration of identified weaknesses, threats, and countermeasures, dataflow diagram (DFD), attack tree, and other supporting documentation in human-readable or machine-readable format - - - - - The defined assumptions, goals, and capabilities of an adversary. - - - - - Identifies and analyzes the potential of future events that may negatively impact individuals, assets, and/or the environment. Risk assessments may also include judgments on the tolerability of each risk. - - - - - A Vulnerability Disclosure Report (VDR) which asserts the known and previously unknown vulnerabilities that affect a component, service, or product including the analysis and findings describing the impact (or lack of impact) that the reported vulnerability has on a component, service, or product. - - - - - A Vulnerability Exploitability eXchange (VEX) which asserts the known vulnerabilities that do not affect a product, product family, or organization, and optionally the ones that do. The VEX should include the analysis and findings describing the impact (or lack of impact) that the reported vulnerability has on the product, product family, or organization. - - - - - Results from an authorized simulated cyberattack on a component or service, otherwise known as a penetration test - - - - - SARIF or proprietary machine or human-readable report for which static analysis has identified code quality, security, and other potential issues with the source code - - - - - Dynamic analysis report that has identified issues such as vulnerabilities and misconfigurations - - - - - Report generated by analyzing the call stack of a running application - - - - - Report generated by Software Composition Analysis (SCA), container analysis, or other forms of component analysis - - - - - Report containing a formal assessment of an organization, business unit, or team against a maturity model - - - - - Industry, regulatory, or other certification from an accredited (if applicable) certification body - - - - - Report or system in which quality metrics can be obtained - - - - - Code or configuration that defines and provisions virtualized infrastructure, commonly referred to as Infrastructure as Code (IaC) - - - - - Plans of Action and Milestones (POAM) complement an "attestation" external reference. POAM is defined by NIST as a "document that identifies tasks needing to be accomplished. It details resources required to accomplish the elements of the plan, any milestones in meeting the tasks and scheduled completion dates for the milestones". - - - - - An e-signature is commonly a scanned representation of a written signature or a stylized script of the persons name. - - - - - A signature that leverages cryptography, typically public/private key pairs, which provides strong authenticity verification. - - - - - Document that complies with RFC-9116 (A File Format to Aid in Security Vulnerability Disclosure) - - - - - Use this if no other types accurately describe the purpose of the external reference - - - - - - - - - External references provide a way to document systems, sites, and information that may be - relevant, but are not included with the BOM. They may also establish specific relationships - within or external to the BOM. - - - - - - Zero or more external references can be defined - - - - - - - - - - The URI (URL or URN) to the external reference. External references - are URIs and therefore can accept any URL scheme including https, mailto, tel, and dns. - External references may also include formally registered URNs such as CycloneDX BOM-Link to - reference CycloneDX BOMs or any object within a BOM. BOM-Link transforms applicable external - references into relationships that can be expressed in a BOM or across BOMs. Refer to: - https://cyclonedx.org/capabilities/bomlink/ - - - - - - - - An optional comment describing the external reference - - - - - - - - - - - - - Specifies the type of external reference. There are built-in types to describe common - references. If a type does not exist for the reference being referred to, use the "other" type. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - Zero or more commits can be specified. - - - - - Specifies an individual commit. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - - A unique identifier of the commit. This may be version control - specific. For example, Subversion uses revision numbers whereas git uses commit hashes. - - - - - - The URL to the commit. This URL will typically point to a commit - in a version control system. - - - - - - The author who created the changes in the commit - - - - - The person who committed or pushed the commit - - - - - The text description of the contents of the commit - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - Zero or more patches can be specified. - - - - - Specifies an individual patch. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - - The patch file (or diff) that show changes. - Refer to https://en.wikipedia.org/wiki/Diff - - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - Specifies the purpose for the patch including the resolution of defects, - security issues, or new behavior or functionality - - - - - - - - - A patch which is not developed by the creators or maintainers of the software - being patched. Refer to https://en.wikipedia.org/wiki/Unofficial_patch - - - - - A patch which dynamically modifies runtime behavior. - Refer to https://en.wikipedia.org/wiki/Monkey_patch - - - - - A patch which takes code from a newer version of software and applies - it to older versions of the same software. Refer to https://en.wikipedia.org/wiki/Backporting - - - - - A patch created by selectively applying commits from other versions or - branches of the same software. - - - - - - - - - - A fault, flaw, or bug in software - - - - - A new feature or behavior in software - - - - - A special type of defect which impacts security - - - - - - - - - - Specifies the optional text of the diff - - - - - Specifies the URL to the diff - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - An individual issue that has been resolved. - - - - - - The identifier of the issue assigned by the source of the issue - - - - - The name of the issue - - - - - A description of the issue - - - - - - - The source of the issue where it is documented. - - - - - - - The name of the source. For example "National Vulnerability Database", - "NVD", and "Apache" - - - - - - - The url of the issue documentation as provided by the source - - - - - - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - Specifies the type of issue - - - - - - - - - The timestamp in which the action occurred - - - - - The name of the individual who performed the action - - - - - The email address of the individual who performed the action - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - Component pedigree is a way to document complex supply chain scenarios where components are created, - distributed, modified, redistributed, combined with other components, etc. Pedigree supports viewing - this complex chain from the beginning, the end, or anywhere in the middle. It also provides a way to - document variants where the exact relation may not be known. - - - - - - Describes zero or more components in which a component is derived - from. This is commonly used to describe forks from existing projects where the forked version - contains a ancestor node containing the original component it was forked from. For example, - Component A is the original component. Component B is the component being used and documented - in the BOM. However, Component B contains a pedigree node with a single ancestor documenting - Component A - the original component from which Component B is derived from. - - - - - - Descendants are the exact opposite of ancestors. This provides a - way to document all forks (and their forks) of an original or root component. - - - - - - Variants describe relations where the relationship between the - components are not known. For example, if Component A contains nearly identical code to - Component B. They are both related, but it is unclear if one is derived from the other, - or if they share a common ancestor. - - - - - - A list of zero or more commits which provide a trail describing - how the component deviates from an ancestor, descendant, or variant. - - - - - A list of zero or more patches describing how the component - deviates from an ancestor, descendant, or variant. Patches may be complementary to commits - or may be used in place of commits. - - - - - Notes, observations, and other non-structured commentary - describing the components pedigree. - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - - The component or service that is a dependency of this dependency object. - - - - - - The component or service that define a given specification or standard, which is provided or implemented by this dependency object. - For example, a cryptographic library which implements a cryptographic algorithm. A component which implements another component does not imply that the implementation is in use. - - - - - - References a component or service by its bom-ref attribute - - - - - - - - References a component or service by its bom-ref attribute - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - Defines the direct dependencies of a component or service. Components or services - that do not have their own dependencies MUST be declared as empty elements within the graph. - Components or services that are not represented in the dependency graph MAY have unknown - dependencies. It is RECOMMENDED that implementations assume this to be opaque and not an - indicator of a object being dependency-free. It is RECOMMENDED to leverage compositions to - indicate unknown dependency graphs. - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - The organization that provides the service. - - - - - The grouping name, namespace, or identifier. This will often be a shortened, - single name of the company or project that produced the service or domain name. - Whitespace and special characters should be avoided. - - - - - The name of the service. This will often be a shortened, single name - of the service. - - - - - The service version. - - - - - Specifies a description for the service. - - - - - - - - A service endpoint URI. - - - - - - - - A boolean value indicating if the service requires authentication. - A value of true indicates the service requires authentication prior to use. - A value of false indicates the service does not require authentication. - - - - - A boolean value indicating if use of the service crosses a trust zone or boundary. - A value of true indicates that by using the service, a trust boundary is crossed. - A value of false indicates that by using the service, a trust boundary is not crossed. - - - - - The name of the trust zone the service resides in. - - - - - - - - - DEPRECATED: Specifies the data classification. THIS FIELD IS DEPRECATED AS OF v1.5. Use dataflow\classification instead - - - - - - Specifies the data classification. - - - - - - Specifies the data classification. - - - - - - The URI, URL, or BOM-Link of the components or services the data came in from. - - - - - - - - - - - - - - The URI, URL, or BOM-Link of the components or services the data is sent to. - - - - - - - - - - - - - - - - Name for the defined data. - - - - - - - Short description of the data content and usage. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Provides the ability to document external references related to the service. - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - A list of services included or deployed behind the parent service. This is not a dependency - tree. It provides a way to specify a hierarchical representation of service assemblies. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - Specifies optional release notes. - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the service elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - Specifies the data classification. - - - - - - Specifies the flow direction of the data. - - - - - - - - - Specifies the flow direction of the data. Valid values are: - inbound, outbound, bi-directional, and unknown. Direction is relative to the service. - Inbound flow states that data enters the service. Outbound flow states that data - leaves the service. Bi-directional states that data flows both ways, and unknown - states that the direction is not known. - - - - - - - - - - - - - - - A valid SPDX license expression. - Refer to https://spdx.org/specifications for syntax requirements - - Example values: - - Apache-2.0 AND (MIT OR GPL-2.0-only) - - GPL-3.0-only WITH Classpath-exception-2.0 - - - - - - - - - An optional identifier which can be used to reference the license elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - - Declared licenses and concluded licenses represent two different stages in the - licensing process within software development. Declared licenses refer to the - initial intention of the software authors regarding the licensing terms under - which their code is released. On the other hand, concluded licenses are the - result of a comprehensive analysis of the project's codebase to identify and - confirm the actual licenses of the components used, which may differ from the - initially declared licenses. While declared licenses provide an upfront indication - of the licensing intentions, concluded licenses offer a more thorough understanding - of the actual licensing within a project, facilitating proper compliance and risk - management. Observed licenses are defined in `evidence.licenses`. Observed licenses - form the evidence necessary to substantiate a concluded license. - - - - - - - - - - - - - - - - Declared licenses represent the initial intentions of authors regarding - the licensing terms of their code. - - - - - - - Concluded licenses are verified and confirmed. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Examines the source code without executing it. - - - - - - - Examines a compiled binary through reverse engineering, typically via disassembly or bytecode reversal. - - - - - - - Examines a package management system such as those used for building software or installing software. - - - - - - - Examines the Abstract Syntax Tree (AST) of source code or a compiled binary. - - - - - - - Evaluates the cryptographic hash of a component against a set of pre-computed hashes of identified software. - - - - - - - Examines the call stack of running applications by intercepting and monitoring application logic without the need to modify the application. - - - - - - - Evaluates a running application. - - - - - - - Evaluates file name of a component against a set of known file names of identified software. - - - - - - - A testimony to the accuracy of the identify of a component made by an individual or entity. - - - - - - - Any other technique. - - - - - - - - - - - Evidence that substantiates the identity of a component. The identify may be an - object or an array of identity objects. Support for specifying identify as a single object was - introduced in CycloneDX v1.5. "unbounded" was introduced in v1.6. It is RECOMMENDED that all - implementations are aware of "unbounded". - - - - - - The identity field of the component which the evidence describes. - - - - - The overall confidence of the evidence from 0 - 1, where 1 is 100% confidence. - - - - - The value of the field (cpe, purl, etc) that has been concluded based on the aggregate of all methods (if available). - - - - - The methods used to extract and/or analyze the evidence. - - - - - - - - - The technique used in this method of analysis. - - - - - The confidence of the evidence from 0 - 1, where 1 is 100% confidence. Confidence is specific to the technique used. Each technique of analysis can have independent confidence. - - - - - The value or contents of the evidence. - - - - - - - - - - - - The object in the BOM identified by its bom-ref. This is often a component or service, - but may be any object type supporting bom-refs. Tools used for analysis should already - be defined in the BOM, either in the metadata/tools, components, or formulation. - - - - - - - - - - - - - - Evidence of individual instances of a component spread across multiple locations. - - - - - - - - - The location or path to where the component was found. - - - - - The line number where the component was found. - - - - - The offset where the component was found. - - - - - The symbol name that was found associated with the component. - - - - - Any additional context of the detected component (e.g. a code snippet). - - - - - - - - An optional identifier which can be used to reference the occurrence elsewhere - in the BOM. Every bom-ref MUST be unique within the BOM. - - - - - - - - - - - Evidence of the components use through the callstack. - - - - - - - - - - - - A package organizes modules into namespaces, providing a unique namespace for each type it contains. - - - - - A module or class that encloses functions/methods and other code. - - - - - A block of code designed to perform a particular task. - - - - - Optional arguments that are passed to the module or function. - - - - - - - - - - The line number the code that is called resides on. - - - - - The column the code that is called resides. - - - - - The full path and filename of the module. - - - - - - - - - - - - The object in the BOM identified by its bom-ref. This is often a component or service, - but may be any object type supporting bom-refs. Tools used for analysis should already - be defined in the BOM, either in the metadata/tools, components, or formulation. - - - - - - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - Specifies an aggregate type that describe how complete a relationship is. - - - - - - The bom-ref identifiers of the components or services being described. Assemblies refer to - nested relationships whereby a constituent part may include other constituent parts. References - do not cascade to child parts. References are explicit for the specified constituent part only. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - The bom-ref identifiers of the components or services being described. Dependencies refer to a - relationship whereby an independent constituent part requires another independent constituent - part. References do not cascade to transitive dependencies. References are explicit for the - specified dependency only. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - The bom-ref identifiers of the vulnerabilities being described. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - - An optional identifier which can be used to reference the composition elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - - - - - The relationship is complete. No further relationships including constituent components, services, or dependencies are known to exist. - - - - - The relationship is incomplete. Additional relationships exist and may include constituent components, services, or dependencies. - - - - - The relationship is incomplete. Only relationships for first-party components, services, or their dependencies are represented. - - - - - The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented, limited specifically to those that are proprietary. - - - - - The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented, limited specifically to those that are opensource. - - - - - The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented. - - - - - The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented, limited specifically to those that are proprietary. - - - - - The relationship is incomplete. Only relationships for third-party components, services, or their dependencies are represented, limited specifically to those that are opensource. - - - - - The relationship may be complete or incomplete. This usually signifies a 'best-effort' to obtain constituent components, services, or dependencies but the completeness is inconclusive. - - - - - The relationship completeness is not specified. - - - - - - - - - Defines a syntax for representing two character language code (ISO-639) followed by an optional two - character country code. The language code MUST be lower case. If the country code is specified, the - country code MUST be upper case. The language code and country code MUST be separated by a minus sign. - Examples: en, en-US, fr, fr-CA - - - - - - - - - - - - The software versioning type. It is RECOMMENDED that the release type use one - of 'major', 'minor', 'patch', 'pre-release', or 'internal'. Representing all possible software - release types is not practical, so standardizing on the recommended values, whenever possible, - is strongly encouraged. - * major = A major release may contain significant changes or may introduce breaking changes. - * minor = A minor release, also known as an update, may contain a smaller number of changes than major releases. - * patch = Patch releases are typically unplanned and may resolve defects or important security issues. - * pre-release = A pre-release may include alpha, beta, or release candidates and typically have - limited support. They provide the ability to preview a release prior to its general availability. - * internal = Internal releases are not for public consumption and are intended to be used exclusively - by the project or manufacturer that produced it. - - - - - - The title of the release. - - - - - The URL to an image that may be prominently displayed with the release note. - - - - - The URL to an image that may be used in messaging on social media platforms. - - - - - A short description of the release. - - - - - The date and time (timestamp) when the release note was created. - - - - - - - - One or more alternate names the release may be referred to. This may - include unofficial terms used by development and marketing teams (e.g. code names). - - - - - - - - - A collection of issues that have been resolved. - - - - - - - - - - - - - Zero or more release notes containing the locale and content. Multiple - note elements may be specified to support release notes in a wide variety of languages. - - - - - - The ISO-639 (or higher) language code and optional ISO-3166 - (or higher) country code. Examples include: "en", "en-US", "fr" and "fr-CA". - - - - - Specifies the full content of the release note. - - - - - - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - A model card describes the intended uses of a machine learning model and potential limitations, including - biases and ethical considerations. Model cards typically contain the training parameters, which datasets - were used to train the model, performance metrics, and other relevant data useful for ML transparency. - This object SHOULD be specified for any component of type `machine-learning-model` and MUST NOT be specified - for other component types. - - - - - - - Hyper-parameters for construction of the model. - - - - - - - - The overall approach to learning used by the model for problem solving. - - - - - - - - Learning types describing the learning problem or hybrid learning problem. - - - - - - - - - - Directly influences the input and/or output. Examples include classification, - regression, clustering, etc. - - - - - - - The model architecture family such as transformer network, convolutional neural - network, residual neural network, LSTM neural network, etc. - - - - - - - The specific architecture of the model such as GPT-1, ResNet-50, YOLOv3, etc. - - - - - - - The datasets used to train and evaluate the model. - - - - - - - References a data component by the components bom-ref attribute - - - - - - - - - - - - - The input format(s) of the model - - - - - - - - - - - The data format for input to the model. Example formats include string, image, time-series - - - - - - - - - - - - - The output format(s) from the model - - - - - - - - - - - The data format for output from the model. Example formats include string, image, time-series - - - - - - - - - - - - - - - - A quantitative analysis of the model - - - - - - - - - - - - - - The type of performance metric. - - - - - - - The value of the performance metric. - - - - - - - The name of the slice this metric was computed on. By default, assume - this metric is not sliced. - - - - - - - The confidence interval of the metric. - - - - - - - - The lower bound of the confidence interval. - - - - - - - The upper bound of the confidence interval. - - - - - - - - - - - - - - - - A collection of graphics that represent various measurements - - - - - - - - A description of this collection of graphics. - - - - - - - A collection of graphics. - - - - - - - - - - - The name of the graphic. - - - - - - - The graphic (vector or raster). Base64 encoding MUST be specified for binary images. - - - - - - - - - - - - - - - - - - - What considerations should be taken into account regarding the model's construction, training, - and application? - - - - - - - - Who are the intended users of the model? - - - - - - - - - - - - What are the intended use cases of the model? - - - - - - - - - - - - What are the known technical limitations of the model? E.g. What kind(s) of data - should the model be expected not to perform well on? What are the factors that might - degrade model performance? - - - - - - - - - - - - What are the known tradeoffs in accuracy/performance of the model? - - - - - - - - - - - - What are the ethical risks involved in the application of this model? - - - - - - - - - - - The name of the risk - - - - - - - Strategy used to address this risk - - - - - - - - - - - - - What are the various environmental impacts the corresponding machine learning model has exhibited across its lifecycle? - - - - - - - How does the model affect groups at risk of being systematically disadvantaged? - What are the harms and benefits to the various affected groups? - - - - - - - - - - - The groups or individuals at risk of being systematically disadvantaged by the model. - - - - - - - Expected benefits to the identified groups. - - - - - - - Expected harms to the identified groups. - - - - - - - With respect to the benefits and harms outlined, please - describe any mitigation strategy implemented. - - - - - - - - - - - - - - - - - An optional identifier which can be used to reference the model card elsewhere in the BOM. - Every bom-ref MUST be unique within the BOM. - - - - - - - - - Describes various environmental impact metrics. - - - - - - - Describes energy consumption information incurred for one or more component lifecycle activities. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - Describes energy consumption information incurred for the specified lifecycle activity. - - - - - - - The type of activity that is part of a machine learning model development or operational lifecycle. - - - - - - - - model design including problem framing, goal definition and algorithm selection. - - - - - - - model data acquisition including search, selection and transfer. - - - - - - - model data preparation including data cleaning, labeling and conversion. - - - - - - - model building, training and generalized tuning. - - - - - - - refining a trained model to produce desired outputs for a given problem space. - - - - - - - model validation including model output evaluation and testing. - - - - - - - explicit model deployment to a target hosting infrastructure. - - - - - - - generating an output response from a hosted model from a set of inputs. - - - - - - - a lifecycle activity type whose description does not match currently defined values. - - - - - - - - - - The provider(s) of the energy consumed by the associated model development lifecycle activity. - - - - - - - The total energy cost associated with the model lifecycle activity. - - - - - - - The CO2 cost (debit) equivalent to the total energy cost. - - - - - - - The CO2 offset (credit) for the CO2 equivalent cost. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - - - - A measure of energy. - - - - - - - Quantity of energy. - - - - - - - Unit of energy. - - - - - - - - kilowatt-hour (kWh) is the energy delivered by one kilowatt (kW) of power for one hour (h). - - - - - - - - - - - - - A measure of carbon dioxide (CO2). - - - - - - - Quantity of carbon dioxide (CO2). - - - - - - - Unit of carbon dioxide (CO2). - - - - - - - - Tonnes (t) of carbon dioxide (CO2) equivalent (eq). - - - - - - - - - - - - - Describes the physical provider of energy used for model development or operations. - - - - - - - A description of the energy provider. - - - - - - - The organization of the energy provider. - - - - - - - The energy source for the energy provider. - - - - - - - - Energy produced by types of coal. - - - - - - - Petroleum products (primarily crude oil and its derivative fuel oils). - - - - - - - Hydrocarbon gas liquids (HGL) that occur as gases at atmospheric pressure and as liquids under higher pressures including Natural gas (C5H12 and heavier), Ethane (C2H6), Propane (C3H8), etc. - - - - - - - Energy produced from the cores of atoms (i.e., through nuclear fission or fusion). - - - - - - - Energy produced from moving air. - - - - - - - Energy produced from the sun (i.e., solar radiation). - - - - - - - Energy produced from heat within the earth. - - - - - - - Energy produced from flowing water. - - - - - - - Liquid fuels produced from biomass feedstocks (i.e., organic materials such as plants or animals). - - - - - - - The energy source is unknown. - - - - - - - An energy source that is not listed. - - - - - - - - - - The energy provided by the energy source for an associated activity. - - - - - - External references provide a way to document systems, sites, and information that may be relevant but are not included with the BOM. They may also establish specific relationships within or external to the BOM. - - - - - - - An optional identifier which can be used to reference the energy provider elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - - - - An address used to identify a contactable location. - - - - - - - The country name or the two-letter ISO 3166-1 country code. - - - - - - - The region or state in the country. For example, Texas. - - - - - - - The locality or city within the country. For example, Austin. - - - - - - - The post office box number. For example, 901. - - - - - - - The postal code. For example, 78758. - - - - - - - The street address. For example, 100 Main Street. - - - - - - - - An optional identifier which can be used to reference the address elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - - - - - - Supervised machine learning involves training an algorithm on labeled - data to predict or classify new data based on the patterns learned from - the labeled examples. - - - - - - - Unsupervised machine learning involves training algorithms on unlabeled - data to discover patterns, structures, or relationships without explicit - guidance, allowing the model to identify inherent structures or clusters - within the data. - - - - - - - Reinforcement learning is a type of machine learning where an agent learns - to make decisions by interacting with an environment to maximize cumulative - rewards, through trial and error. - - - - - - - Semi-supervised machine learning utilizes a combination of labeled and - unlabeled data during training to improve model performance, leveraging - the benefits of both supervised and unsupervised learning techniques. - - - - - - - Self-supervised machine learning involves training models to predict parts - of the input data from other parts of the same data, without requiring - external labels, enabling learning from large amounts of unlabeled data. - - - - - - - - - - - - The general theme or subject matter of the data being specified. - - - - - - - The name of the dataset. - - - - - - - The contents or references to the contents of the data being described. - - - - - - - An optional way to include textual or encoded data. - - - - - The URL to where the data can be retrieved. - - - - - Provides the ability to document name-value parameters used for configuration. - - - - - - - - - Data classification tags data according to its type, sensitivity, and value if altered, stolen, or destroyed. - - - - - - - A description of any sensitive data in a dataset. - - - - - - - A collection of graphics that represent various measurements. - - - - - - - A description of the dataset. Can describe size of dataset, whether it's used for source code, - training, testing, or validation, etc. - - - - - - - - - An optional identifier which can be used to reference the dataset elsewhere in the BOM. - Every bom-ref MUST be unique within the BOM. - - - - - - - - - - - Data custodians are responsible for the safe custody, transport, and storage of data. - - - - - - - - - - - - Data stewards are responsible for data content, context, and associated business rules. - - - - - - - - - - - - Data owners are concerned with risk and appropriate access to data. - - - - - - - - - - - - - - - - - - - - - - A collection of graphics that represent various measurements. - - - - - - - A description of this collection of graphics. - - - - - - - A collection of graphics. - - - - - - - - - - - The name of the graphic. - - - - - - - The graphic (vector or raster). Base64 encoding MUST be specified for binary images. - - - - - - - - - - - - - - - - - Any type of code, code snippet, or data-as-code. - - - - - Parameters or settings that may be used by other components. - - - - - A collection of data. - - - - - Data that can be used to create new instances of what the definition defines. - - - - - Any other type of data that does not fit into existing definitions. - - - - - - - - - References a component or service by its bom-ref attribute - - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - Specifies an individual property with a name and value. - - - - - - The name of the property. Duplicate names are allowed, each potentially having a different value. - - - - - - - - - - - Defines a weakness in a component or service that could be exploited or triggered by a threat source. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - The identifier that uniquely identifies the vulnerability. For example: - CVE-2021-39182, GHSA-35m5-8cvj-8783, and SNYK-PYTHON-ENROCRYPT-1912876. - - - - - The source that published the vulnerability. - - - - - Zero or more pointers to vulnerabilities that are the equivalent of the - vulnerability specified. Often times, the same vulnerability may exist in multiple sources of - vulnerability intelligence, but have different identifiers. References provide a way to - correlate vulnerabilities across multiple sources of vulnerability intelligence. - - - - - - A pointer to a vulnerability that is the equivalent of the - vulnerability specified. - - - - - - The identifier that uniquely identifies the vulnerability. For example: - CVE-2021-39182, GHSA-35m5-8cvj-8783, and SNYK-PYTHON-ENROCRYPT-1912876. - - - - - The source that published the vulnerability. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - List of vulnerability ratings. - - - - - - - - - - - - List of Common Weaknesses Enumerations (CWEs) codes that describes this vulnerability. - For example 399 (of https://cwe.mitre.org/data/definitions/399.html) - - - - - - - - - - A description of the vulnerability as provided by the source. - - - - - If available, an in-depth description of the vulnerability as provided by the - source organization. Details often include information useful in understanding root cause. - - - - - Recommendations of how the vulnerability can be remediated or mitigated. - - - - - A bypass, usually temporary, of the vulnerability that reduces its likelihood and/or impact. Workarounds often involve changes to configuration or deployments. - - - - - - - Evidence used to reproduce the vulnerability. - - - - - - Precise steps to reproduce the vulnerability. - - - - - A description of the environment in which reproduction was possible. - - - - - Supporting material that helps in reproducing or understanding how reproduction is possible. This may include screenshots, payloads, and PoC exploit code. - - - - - - - - - - - - - - - Published advisories of the vulnerability if provided. - - - - - - - - - - The date and time (timestamp) when the vulnerability record was created in the vulnerability database. - - - - - The date and time (timestamp) when the vulnerability record was first published. - - - - - The date and time (timestamp) when the vulnerability record was last updated. - - - - - The date and time (timestamp) when the vulnerability record was rejected (if applicable). - - - - - Individuals or organizations credited with the discovery of the vulnerability. - - - - - - The organizations credited with vulnerability discovery. - - - - - - - - - - The individuals, not associated with organizations, that are credited with vulnerability discovery. - - - - - - - - - - - - - The tool(s) used to identify, confirm, or score the vulnerability. - - - - - - - DEPRECATED. Use tools\components or tools\services instead. - - - - - - - A list of software and hardware components used as tools. - - - - - A list of services used as tools. - - - - - - - - - - - An assessment of the impact and exploitability of the vulnerability. - - - - - - - Declares the current state of an occurrence of a vulnerability, after automated or manual analysis. - - - - - - - The rationale of why the impact analysis state was asserted. - - - - - - A response to the vulnerability by the manufacturer, supplier, or - project responsible for the affected component or service. More than one response - is allowed. Responses are strongly encouraged for vulnerabilities where the analysis - state is exploitable. - - - - - - - - - - - Detailed description of the impact including methods used during assessment. - If a vulnerability is not exploitable, this field should include specific details - on why the component or service is not impacted by this vulnerability. - - - - - - - The date and time (timestamp) when the analysis was first issued. - - - - - - - The date and time (timestamp) when the analysis was last updated. - - - - - - - - - The components or services that are affected by the vulnerability. - - - - - - - - - References a component or service by the objects bom-ref. - - - - - - - - Zero or more individual versions or range of versions. - - - - - - - - - - A single version of a component or service. - - - - - A version range specified in Package URL Version Range syntax (vers) which is defined at https://github.com/package-url/purl-spec/VERSION-RANGE-SPEC.rst - - - - - - - The vulnerability status for the version or range of versions. - - - - - - - - - - - - - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - An optional identifier which can be used to reference the vulnerability elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - - - - - The name of the source. - For example: NVD, National Vulnerability Database, OSS Index, VulnDB, and GitHub Advisories - - - - - - The url of the vulnerability documentation as provided by the source. - For example: https://nvd.nist.gov/vuln/detail/CVE-2021-39182 - - - - - - - - - - The source that calculated the severity or risk rating of the vulnerability. - - - - - The numerical score of the rating. - - - - - Textual representation of the severity that corresponds to the numerical score of the rating. - - - - - The risk scoring methodology/standard used. - - - - - Textual representation of the metric values used to score the vulnerability. - - - - - An optional reason for rating the vulnerability as it was. - - - - - - - - - - An optional name of the advisory. - - - - - Location where the advisory can be obtained. - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - The organization that created the annotation - - - - - The person that created the annotation - - - - - The tool or component that created the annotation - - - - - The service that created the annotation - - - - - - - - - - - The objects in the BOM identified by their bom-ref's. This is often components or services, but may be any object type supporting bom-refs. - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - The organization, individual, component, or service which created the textual content - of the annotation. - - - - - The date and time (timestamp) when the annotation was created. - - - - - The textual content of the annotation. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the annotation elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - Textual representation of the severity of the vulnerability adopted by the analysis method. If the - analysis method uses values other than what is provided, the user is expected to translate appropriately. - - - - - - - - - - - - - - - - - Declares the current state of an occurrence of a vulnerability, after automated or manual analysis. - - - - - - - The vulnerability has been remediated. - - - - - - - The vulnerability has been remediated and evidence of the changes are provided in the affected - components pedigree containing verifiable commit history and/or diff(s). - - - - - - - The vulnerability may be directly or indirectly exploitable. - - - - - - - The vulnerability is being investigated. - - - - - - - The vulnerability is not specific to the component or service and was falsely identified or associated. - - - - - - - The component or service is not affected by the vulnerability. Justification should be specified - for all not_affected cases. - - - - - - - - - - The rationale of why the impact analysis state was asserted. - - - - - - - The code has been removed or tree-shaked. - - - - - - - The vulnerable code is not invoked at runtime. - - - - - - - Exploitability requires a configurable option to be set/unset. - - - - - - - Exploitability requires a dependency that is not present. - - - - - - - Exploitability requires a certain environment which is not present. - - - - - - - Exploitability requires a compiler flag to be set/unset. - - - - - - - Exploits are prevented at runtime. - - - - - - - Attacks are blocked at physical, logical, or network perimeter. - - - - - - - Preventative measures have been implemented that reduce the likelihood and/or impact of the vulnerability. - - - - - - - - - - Specifies the severity or risk scoring methodology or standard used. - - - - - - - Common Vulnerability Scoring System v2.0 standard as defined at https://www.first.org/cvss/v2/ - - - - - - - Common Vulnerability Scoring System v3.0 standard as defined at https://www.first.org/cvss/v3-0/ - - - - - - - Common Vulnerability Scoring System v3.1 standard as defined at https://www.first.org/cvss/v3-1/ - - - - - - - Common Vulnerability Scoring System v4.0 standard as defined at https://www.first.org/cvss/v4-0/ - - - - - - - OWASP Risk Rating as defined at https://owasp.org/www-community/OWASP_Risk_Rating_Methodology - - - - - - - Stakeholder Specific Vulnerability Categorization as defined at https://github.com/CERTCC/SSVC - - - - - - - Another severity or risk scoring methodology - - - - - - - - - - The rationale of why the impact analysis state was asserted. - - - - - - - - - - - - - - - The vulnerability status of a given version or range of versions of a product. The statuses - 'affected' and 'unaffected' indicate that the version is affected or unaffected by the vulnerability. - The status 'unknown' indicates that it is unknown or unspecified whether the given version is affected. - There can be many reasons for an 'unknown' status, including that an investigation has not been - undertaken or that a vendor has not disclosed the status. - - - - - - - - - - - - - Describes how a component or service was manufactured or deployed. This is achieved through the use - of formulas, workflows, tasks, and steps, which declare the precise steps to reproduce along with the - observed formulas describing the steps which transpired in the manufacturing process. - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - Describes workflows and resources that captures rules and other aspects of how the associated - BOM component or service was formed. - - - - - - Transient components that are used in tasks that constitute one or more of - this formula's workflows - - - - - Transient services that are used in tasks that constitute one or more of - this formula's workflows - - - - - List of workflows that can be declared to accomplish specific orchestrated goals - and independently triggered. - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - An optional identifier which can be used to reference the formula elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - The unique identifier for the resource instance within its deployment context. - - - - - - - The name of the resource instance. - - - - - - - The description of the resource instance. - - - - - - References to component or service resources that are used to realize - the resource instance. - - - - - The tasks that comprise the workflow. - - - - - The graph of dependencies between tasks within the workflow. - - - - - Indicates the types of activities performed by the set of workflow tasks. - - - - - - - - - - The trigger that initiated the task. - - - - - - The sequence of steps for the task. - - - - - - - - - - - Represents resources and data brought into a task at runtime by executor - or task commands - - - - - - - - - - Represents resources and data output from a task at runtime by executor - or task commands - - - - - - - - - - - The date and time (timestamp) when the task started. - - - - - - - The date and time (timestamp) when the task ended. - - - - - - A set of named filesystem or data resource shareable by workflow tasks. - - - - - A graph of the component runtime topology for workflow's instance. - A description of the runtime component and service topology. This can describe a partial or - complete topology used to host and execute the task (e.g., hardware, operating systems, - configurations, etc.) - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the workflow elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - References an object by its bom-ref attribute - - - - - - - - - - Reference to an externally accessible resource. - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - The unique identifier for the resource instance within its deployment context. - - - - - - - The name of the resource instance. - - - - - - - The description of the resource instance. - - - - - - - References to component or service resources that are used to realize the resource instance. - - - - - - - Indicates the types of activities performed by the set of workflow tasks. - - - - - - - - - - - - The trigger that initiated the task. - - - - - - - The sequence of steps for the task. - - - - - - - - - - - - Represents resources and data brought into a task at runtime by executor or task commands. - - - - - - - - - - - - Represents resources and data output from a task at runtime by executor or task commands - - - - - - - - - - - - The date and time (timestamp) when the task started. - - - - - - - The date and time (timestamp) when the task ended. - - - - - - - A set of named filesystem or data resource shareable by workflow tasks. - - - - - - - A graph of the component runtime topology for task's instance. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the task elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - A task that copies software or data used to accomplish other tasks in the workflow. - - - - - A task that clones a software repository into the workflow in order to retrieve its source code or data for use in a build step. - - - - - A task that checks source code for programmatic and stylistic errors. - - - - - A task that performs a scan against source code, or built or deployed components and services. Scans are typically run to gather or test for security vulnerabilities or policy compliance. - - - - - A task that merges changes or fixes into source code prior to a build step in the workflow. - - - - - A task that builds the source code, dependencies and/or data into an artifact that can be deployed to and executed on target systems. - - - - - A task that verifies the functionality of a component or service. - - - - - A task that delivers a built artifact to one or more target repositories or storage systems. - - - - - A task that deploys a built artifact for execution on one or more target systems. - - - - - A task that releases a built, versioned artifact to a target repository or distribution system. - - - - - A task that cleans unnecessary tools, build artifacts and/or data from workflow storage. - - - - - A workflow task that does not match current task type definitions. - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - A named filesystem or data resource shareable by workflow tasks. - - - - - - - The unique identifier for the resource instance within its deployment context. - - - - - - - The name of the resource instance. - - - - - - - The names for the workspace as referenced by other workflow tasks. Effectively, a name mapping - so other tasks can use their own local name in their steps. - - - - - - - - - - - - The description of the resource instance. - - - - - - - References to component or service resources that are used to realize the resource instance. - - - - - - - Describes the read-write access control for the workspace relative to the owning resource instance. - - - - - - - A path to a location on disk where the workspace will be available to the associated task's steps. - - - - - - - The name of a domain-specific data type the workspace represents. This property is for CI/CD - frameworks that are able to provide access to structured, managed data at a more granular level - than a filesystem. - - - - - - - Identifies the reference to the request for a specific volume type and parameters. - - - - - - - Information about the actual volume instance allocated to the workspace. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the workflow elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - - - - - - - - An identifiable, logical unit of data storage tied to a physical device. - - - - - - - The unique identifier for the volume instance within its deployment context. - - - - - - - The name of the volume instance - - - - - - - The mode for the volume instance. - - - - - - - The underlying path created from the actual volume. - - - - - - - The allocated size of the volume accessible to the associated workspace. This should include - the scalar size as well as IEC standard unit in either decimal or binary form. - - - - - - - Indicates if the volume persists beyond the life of the resource it is associated with. - - - - - - - Indicates if the volume is remotely (i.e., network) attached. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - - - - - - - - - - Executes specific commands or tools in order to accomplish its owning task as part of a sequence. - - - - - - - A name for the step. - - - - - - - A description of the step. - - - - - - - Ordered list of commands or directives for the step - - - - - - - - - - - A text representation of the executed command. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - The unique identifier for the resource instance within its deployment context. - - - - - - - The name of the resource instance. - - - - - - - The description of the resource instance. - - - - - - - References to component or service resources that are used to realize the resource instance. - - - - - - - The source type of event which caused the trigger to fire. - - - - - - - The event data that caused the associated trigger to activate. - - - - - - - - - - A condition that was used to determine a trigger should be activated. - - - - - - - - Describes the set of conditions which cause the trigger to activate. - - - - - - - The logical expression that was evaluated that determined the trigger should be fired. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - - - - - - - The date and time (timestamp) when the trigger was activated. - - - - - - - Represents resources and data brought into a task at runtime by executor or task commands - - - - - - - - - - - - Represents resources and data output from a task at runtime by executor or task commands - - - - - - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the trigger elsewhere in the BOM. - Uniqueness is enforced within all elements and children of the root-level bom element. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - - - - - - - - - The unique identifier of the event. - - - - - - - A description of the event. - - - - - - - The date and time (timestamp) when the event was received. - - - - - - - Encoding of the raw event data. - - - - - - - References the component or service that was the source of the event - - - - - - - References the component or service that was the target of the event - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - Type that represents various input data types and formats. - - - - - - - - A reference to an independent resource provided as an input to a task by the workflow runtime. - - - - - - - Inputs that have the form of parameters with names and values. - - - - - - - Inputs that have the form of parameters with names and values. - - - - - - - - - - - - - - - - Inputs that have the form of data. - - - - - - - - A references to the component or service that provided the input to the task - (e.g., reference to a service with data flow value of inbound) - - - - - - - A reference to the component or service that received or stored the input if not the task - itself (e.g., a local, named storage workspace) - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - Represents resources and data output from a task at runtime by executor or task commands - - - - - - - - A reference to an independent resource generated as output by the task. - - - - - - - Outputs that have the form of environment variables. - - - - - - - - - - - - - - - - Outputs that have the form of data. - - - - - - - - Describes the type of data output. - - - - - - - Component or service that generated or provided the output from the task (e.g., a build tool) - - - - - - - Component or service that received the output from the task - (e.g., reference to an artifactory service with data flow value of outbound) - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - - - - - - - - - - - - - - - A representation of a functional parameter. - - - - - - - The name of the parameter. - - - - - - - The value of the parameter. - - - - - - - The data type of the parameter. - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - Cryptographic assets have properties that uniquely define them and that make them actionable for - further reasoning. As an example, it makes a difference if one knows the algorithm family (e.g. AES) - or the specific variant or instantiation (e.g. AES-128-GCM). This is because the security level and the - algorithm primitive (authenticated encryption) is only defined by the definition of the algorithm variant. - The presence of a weak cryptographic algorithm like SHA1 vs. HMAC-SHA1 also makes a difference. - - - - - - - Cryptographic assets occur in several forms. Algorithms and protocols are most commonly - implemented in specialized cryptographic libraries. They may however also be 'hardcoded' - in software components. Certificates and related cryptographic material like keys, tokens, - secrets or passwords are other cryptographic assets to be modelled. - - - - - - - - Mathematical function commonly used for data encryption, authentication, and - digital signatures. - - - - - - - An electronic document that is used to provide the identity or validate a public key. - - - - - - - A set of rules and guidelines that govern the behavior and communication with each other. - - - - - - - Other cryptographic assets that are related to algorithms, certificate, and protocols - such as keys and tokens. - - - - - - - - - - Additional properties specific to a cryptographic algorithm. - - - - - - - - Cryptographic building blocks used in higher-level cryptographic systems and - protocols. Primitives represent different cryptographic routines: deterministic - random bit generators (drbg, e.g. CTR_DRBG from NIST SP800-90A-r1), message - authentication codes (mac, e.g. HMAC-SHA-256), blockciphers (e.g. AES), - streamciphers (e.g. Salsa20), signatures (e.g. ECDSA), hash functions (e.g. SHA-256), - public-key encryption schemes (pke, e.g. RSA), extended output functions - (xof, e.g. SHAKE256), key derivation functions (e.g. pbkdf2), key agreement - algorithms (e.g. ECDH), key encapsulation mechanisms (e.g. ML-KEM), authenticated - encryption (ae, e.g. AES-GCM) and the combination of multiple algorithms - (combiner, e.g. SP800-56Cr2). - - - - - - - - Deterministic Random Bit Generator (DRBG) is a type of pseudorandom - number generator designed to produce a sequence of bits from an initial - seed value. DRBGs are commonly used in cryptographic applications where - reproducibility of random values is important. - - - - - - - In cryptography, a Message Authentication Code (MAC) is information - used for authenticating and integrity-checking a message. - - - - - - - A block cipher is a symmetric key algorithm that operates on fixed-size - blocks of data. It encrypts or decrypts the data in block units, - providing confidentiality. Block ciphers are widely used in various - cryptographic modes and protocols for secure data transmission. - - - - - - - A stream cipher is a symmetric key cipher where plaintext digits are - combined with a pseudorandom cipher digit stream (keystream). - - - - - - - In cryptography, a signature is a digital representation of a message - or data that proves its origin, identity, and integrity. Digital - signatures are generated using cryptographic algorithms and are widely - used for authentication and verification in secure communication. - - - - - - - A hash function is a mathematical algorithm that takes an input - (or 'message') and produces a fixed-size string of characters, which is - typically a hash value. Hash functions are commonly used in various - cryptographic applications, including data integrity verification and - password hashing. - - - - - - - Public Key Encryption (PKE) is a type of encryption that uses a pair of - public and private keys for secure communication. The public key is used - for encryption, while the private key is used for decryption. PKE is a - fundamental component of public-key cryptography. - - - - - - - An XOF is an extendable output function that can take arbitrary input - and creates a stream of output, up to a limit determined by the size of - the internal state of the hash function that underlies the XOF. - - - - - - - A Key Derivation Function (KDF) derives key material from another source - of entropy while preserving the entropy of the input. - - - - - - - In cryptography, a key-agreement is a protocol whereby two or more - parties agree on a cryptographic key in such a way that both influence - the outcome. - - - - - - - A Key Encapsulation Mechanism (KEM) algorithm is a mechanism for - transporting random keying material to a recipient using the recipient's - public key. - - - - - - - Authenticated Encryption (AE) is a cryptographic process that provides - both confidentiality and data integrity. It ensures that the encrypted - data has not been tampered with and comes from a legitimate source. - AE is commonly used in secure communication protocols. - - - - - - - A combiner aggregates many candidates for a cryptographic primitive and - generates a new candidate for the same primitive. - - - - - - - Another primitive type. - - - - - - - The primitive is not known. - - - - - - - - - - An identifier for the parameter set of the cryptographic algorithm. Examples: in - AES128, '128' identifies the key length in bits, in SHA256, '256' identifies the - digest length, '128' in SHAKE128 identifies its maximum security level in bits, and - 'SHA2-128s' identifies a parameter set used in SLH-DSA (FIPS205). - - - - - - - The specific underlying Elliptic Curve (EC) definition employed which is an indicator - of the level of security strength, performance and complexity. Absent an - authoritative source of curve names, CycloneDX recommends use of curve names as - defined at https://neuromancer.sk/std/, the source from which can be found at - https://github.com/J08nY/std-curves. - - - - - - - The target and execution environment in which the algorithm is implemented in. - - - - - - - - A software implementation running in plain unencrypted RAM. - - - - - - - A software implementation running in encrypted RAM. - - - - - - A software implementation running in a trusted execution environment. - - - - - - A hardware implementation. - - - - - - Another implementation environment. - - - - - - The execution environment is not known. - - - - - - - - - - The target platform for which the algorithm is implemented. The implementation can - be 'generic', running on any platform or for a specific platform. - - - - - - - - - - - - - - - - - - - - - - - - - The certification that the implementation of the cryptographic algorithm has - received, if any. Certifications include revisions and levels of FIPS 140 or - Common Criteria of different Extended Assurance Levels (CC-EAL). - - - - - - - - No certification obtained - - - - - - - FIPS 140-1 Level 1 - - - - - - - FIPS 140-1 Level 2 - - - - - - - FIPS 140-1 Level 3 - - - - - - - FIPS 140-1 Level 4 - - - - - - - FIPS 140-2 Level 1 - - - - - - - FIPS 140-2 Level 2 - - - - - - - FIPS 140-2 Level 3 - - - - - - - FIPS 140-2 Level 4 - - - - - - - FIPS 140-3 Level 1 - - - - - - - FIPS 140-3 Level 2 - - - - - - - FIPS 140-3 Level 3 - - - - - - - FIPS 140-3 Level 4 - - - - - - - Common Criteria - Evaluation Assurance Level 1 - - - - - - - Common Criteria - Evaluation Assurance Level 1 (Augmented) - - - - - - - Common Criteria - Evaluation Assurance Level 2 - - - - - - - Common Criteria - Evaluation Assurance Level 2 (Augmented) - - - - - - - Common Criteria - Evaluation Assurance Level 3 - - - - - - - Common Criteria - Evaluation Assurance Level 3 (Augmented) - - - - - - - Common Criteria - Evaluation Assurance Level 4 - - - - - - - Common Criteria - Evaluation Assurance Level 4 (Augmented) - - - - - - - Common Criteria - Evaluation Assurance Level 5 - - - - - - - Common Criteria - Evaluation Assurance Level 5 (Augmented) - - - - - - - Common Criteria - Evaluation Assurance Level 6 - - - - - - - Common Criteria - Evaluation Assurance Level 6 (Augmented) - - - - - - - Common Criteria - Evaluation Assurance Level 7 - - - - - - - Common Criteria - Evaluation Assurance Level 7 (Augmented) - - - - - - - Another certification - - - - - - - The certification level is not known - - - - - - - - - - The mode of operation in which the cryptographic algorithm (block cipher) is used. - - - - - - - - Cipher block chaining - - - - - - - Electronic codebook - - - - - - - Counter with cipher block chaining message authentication code - - - - - - - Galois/counter - - - - - - - Cipher feedback - - - - - - - Output feedback - - - - - - - Counter - - - - - - - Another mode of operation - - - - - - - The mode of operation is not known - - - - - - - - - - The padding scheme that is used for the cryptographic algorithm. - - - - - - - - Password-Based Cryptography Specification #5 - - - - - - - Public Key Cryptography Standard: Cryptographic Message Syntax - - - - - - - Public Key Cryptography Standard: RSA Cryptography v1.5 - - - - - - - Optimal asymmetric encryption padding - - - - - - - Raw - - - - - - - Another padding scheme - - - - - - - The padding scheme is not known - - - - - - - - - - The cryptographic functions implemented by the cryptographic algorithm. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The classical security level that a cryptographic algorithm provides (in bits). - - - - - - - - - - - - The NIST security strength category as defined in - https://csrc.nist.gov/projects/post-quantum-cryptography/post-quantum-cryptography-standardization/evaluation-criteria/security-(evaluation-criteria). - A value of 0 indicates that none of the categories are met. - - - - - - - - - - - - - - - - Properties for cryptographic assets of asset type 'certificate' - - - - - - - - The subject name for the certificate - - - - - - - The issuer name for the certificate - - - - - - - The date and time according to ISO-8601 standard from which the certificate is valid - - - - - - - The date and time according to ISO-8601 standard from which the certificate is not valid anymore - - - - - - - The bom-ref to signature algorithm used by the certificate - - - - - - - The bom-ref to the public key of the subject - - - - - - - The format of the certificate. Examples include X.509, PEM, DER, and CVC - - - - - - - The file extension of the certificate. Examples include crt, pem, cer, der, and p12. - - - - - - - - - - Properties for cryptographic assets of asset type 'relatedCryptoMaterial' - - - - - - - - The type for the related cryptographic material - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The optional unique identifier for the related cryptographic material. - - - - - - - The key state as defined by NIST SP 800-57. - - - - - - - - - - - - - - - - - The bom-ref to the algorithm used to generate the related cryptographic material. - - - - - - - The date and time (timestamp) when the related cryptographic material was created. - - - - - - - The date and time (timestamp) when the related cryptographic material was activated. - - - - - - - The date and time (timestamp) when the related cryptographic material was updated. - - - - - - - The date and time (timestamp) when the related cryptographic material expires. - - - - - - - The associated value of the cryptographic material. - - - - - - - The size of the cryptographic asset (in bits). - - - - - - - The format of the related cryptographic material (e.g. P8, PEM, DER). - - - - - - - The mechanism by which the cryptographic asset is secured by. - - - - - - - - Specifies the mechanism by which the cryptographic asset is secured by. - Examples include HSM, TPM, XGX, Software, and None. - - - - - - - The bom-ref to the algorithm. - - - - - - - - - - - - - Properties specific to cryptographic assets of type: 'protocol'. - - - - - - - - The concrete protocol type. - - - - - - - - Transport Layer Security - - - - - - - Secure Shell - - - - - - - Internet Protocol Security - - - - - - - Internet Key Exchange - - - - - - - Secure Socket Tunneling Protocol - - - - - - - Wi-Fi Protected Access - - - - - - - Another protocol type - - - - - - - The protocol type is not known - - - - - - - - - - The version of the protocol. Examples include 1.0, 1.2, and 1.99. - - - - - - - A list of cipher suites related to the protocol. - - - - - - - - - - - A common name for the cipher suite. For example: TLS_DHE_RSA_WITH_AES_128_CCM - - - - - - - A list of algorithms related to the cipher suite. - - - - - - - - The bom-ref to algorithm cryptographic asset. - - - - - - - - - - A list of common identifiers for the cipher suite. - - - - - - - - Cipher suite identifier. Examples include 0xC0 and 0x9E. - - - - - - - - - - - - - - - - The IKEv2 transform types supported (types 1-4), defined in RFC7296 section 3.3.2, - and additional properties. - - - - - - - - Transform Type 1: encryption algorithms - - - - - - - Transform Type 2: pseudorandom functions - - - - - - - Transform Type 3: integrity algorithms - - - - - - - Transform Type 4: Key Exchange Method (KE) per RFC9370, formerly called Diffie-Hellman Group (D-H) - - - - - - - Specifies if an Extended Sequence Number (ESN) is used. - - - - - - - IKEv2 Authentication method - - - - - - - - - - - - - The object identifier (OID) of the cryptographic asset. - - - - - - - - - - - - The list of assessors evaluating claims and determining conformance to requirements and confidence in that assessment. - - - - - - - - The assessor who evaluates claims and determines conformance to requirements and confidence in that assessment. - - - - - - - - The boolean indicating if the assessor is outside the organization generating claims. A value of false indicates a self assessor. - - - - - - - The entity issuing the assessment. - - - - - - - - An optional identifier which can be used to reference the object elsewhere in the BOM. - Every bom-ref MUST be unique within the BOM. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - The list of attestations asserted by an assessor that maps requirements to claims. - - - - - - - - An attestation asserted by an assessor that maps requirements to claims. - - - - - - - - The short description explaining the main points of the attestation. - - - - - - - The `bom-ref` to the assessor asserting the attestation. - - - - - - - The grouping of requirements to claims and the attestors declared conformance and confidence thereof. - - - - - - - - The `bom-ref` to the requirement being attested to. - - - - - - - The list of `bom-ref` to the claims being attested to. - - - - - - - - The `bom-ref` to the claim being attested to. - - - - - - - - - - The list of `bom-ref` to the counter claims being attested to. - - - - - - - - The `bom-ref` to the counter claim being attested to. - - - - - - - - - - The conformance of the claim meeting a requirement. - - - - - - - - The conformance of the claim between and inclusive of 0 and 1, where 1 is 100% conformance. - - - - - - - - - - - - - The rationale for the score of conformance. - - - - - - - The list of `bom-ref` to the evidence provided describing the - mitigation strategies. Each mitigation strategy should include an - explanation of how any weaknesses in the evidence will be mitigated. - - - - - - - - - - - - - - - The confidence of the claim meeting the requirement. - - - - - - - - The confidence of the claim between and inclusive of 0 and 1, where 1 is 100% confidence. - - - - - - - - - - - - - The rationale for the confidence score. - - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - - - - The list of claims. - - - - - - - - - - - The `bom-ref` to a target representing a specific system, application, - API, module, team, person, process, business unit, company, etc... - that this claim is being applied to. - - - - - - - The specific statement or assertion about the target. - - - - - - - The list of `bom-ref` to the evidence provided describing the - mitigation strategies. Each mitigation strategy should include an - explanation of how any weaknesses in the evidence will be mitigated. - - - - - - - - - - - - The written explanation of why the evidence provided substantiates the claim. - - - - - - - The list of `bom-ref` to evidence that supports this claim. - - - - - - - The list of `bom-ref` to counterEvidence that supports this claim. - - - - - - Provides the ability to document external references related to the claim the BOM describes. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the object elsewhere - in the BOM. Every bom-ref MUST be unique within the BOM. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - The list of evidence - - - - - - - - The list of evidence - - - - - - - - The reference to the property name as defined in the [CycloneDX Property Taxonomy](https://github.com/CycloneDX/cyclonedx-property-taxonomy/). - - - - - - - The written description of what this evidence is and how it was created. - - - - - - - The output or analysis that supports claims. - - - - - - - - The name of the data. - - - - - - - The contents or references to the contents of the data being described. - - - - - - - An optional way to include textual or encoded data. - - - - - The URL to where the data can be retrieved. - - - - - - - - - Data classification tags data according to its type, sensitivity, and value if altered, stolen, or destroyed. - - - - - - - A description of any sensitive data. - - - - - - - - - - The date and time (timestamp) when the evidence was created. - - - - - The optional date and time (timestamp) when the evidence is no longer valid. - - - - - The author of the evidence. - - - - - The reviewer of the evidence. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the object elsewhere - in the BOM. Every bom-ref MUST be unique within the BOM. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - The list of targets which claims are made against. - - - - - - - - The list of organizations which claims are made against. - - - - - - - - - - - - The list of components which claims are made against. - - - - - - - - - - - - The list of services which claims are made against. - - - - - - - - - - - - - - - - - - The brief statement affirmed by an individual regarding all declarations. - This could be an affirmation of acceptance by a third-party auditor or receiving - individual of a file. For example: "I certify, to the best of my knowledge, that all information is correct." - - - - - - - The list of signatories authorized on behalf of an organization to assert validity of this document. - - - - - - - - - - - The signatory's name. - - - - - - - The signatory's role within an organization. - - - - - - - The signatory's organization. - - - - - - - An External reference provide a way to document systems, sites, and information that may be relevant, but are not included with the BOM. They may also establish specific relationships within or external to the BOM. - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - - - A collection of reusable objects that are defined and may be used elsewhere in the BOM. - - - - - - - - - - - The list of standards which may consist of regulations, industry or organizational-specific standards, maturity models, best practices, or any other requirements which can be evaluated against or attested to. - - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - A standard may consist of regulations, industry or organizational-specific standards, maturity models, best practices, or any other requirements which can be evaluated against or attested to. - - - - - - - The name of the standard. This will often be a shortened, single name of the standard. - - - - - - - The version of the standard. - - - - - - - The description of the standard. - - - - - - - The owner of the standard, often the entity responsible for its release. - - - - - - - The list of requirements comprising the standard. - - - - - - - - - - - The unique identifier used in the standard to identify a specific requirement. This should match what is in the standard and should not be the requirements bom-ref. - - - - - - - The title of the requirement. - - - - - - - The textual content of the requirement. - - - - - - - The supplemental text that provides additional guidance or context to the requirement, but is not directly part of the requirement. - - - - - - - - - - - - The Common Requirements Enumeration (CRE) identifier(s). CRE is a structured and standardized framework for uniting security standards and guidelines. CRE links each section of a resource to a shared topic identifier (a Common Requirement). Through this shared topic link, all resources map to each other. Use of CRE promotes clear and unambiguous communication among stakeholders. - - - - - - - - - - - - The optional `bom-ref` to a parent requirement. This establishes a hierarchy of requirements. Top-level requirements must not define a parent. Only child requirements should define parents. - - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - Provides the ability to document external references related to the BOM or - to the project the BOM describes. - - - - - - - An optional identifier which can be used to reference the object elsewhere - in the BOM. Every bom-ref MUST be unique within the BOM. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - - The list of levels associated with the standard. Some standards have different levels of compliance. - - - - - - - - - - - The identifier used in the standard to identify a specific level. - - - - - - - The title of the level. - - - - - - - The description of the level. - - - - - - - The list of requirement `bom-ref`s that comprise the level. - - - - - - - - - - - - - An optional identifier which can be used to reference the object elsewhere - in the BOM. Every bom-ref MUST be unique within the BOM. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - - Provides the ability to document external references related to the BOM or - to the project the BOM describes. - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - - An optional identifier which can be used to reference the object elsewhere - in the BOM. Every bom-ref MUST be unique within the BOM. - - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - Textual strings that aid in discovery, search, and retrieval of the associated - object. Tags often serve as a way to group or categorize similar or related objects by various - attributes. - - Examples include: - "json-parser", "object-persistence", "text-to-image", "translation", and "object-detection" - - - - - - - - - - - - Provides additional information about a BOM. - - - - - A list of software and hardware components. - - - - - A list of services. This may include microservices, function-as-a-service, and other types of network or intra-process services. - - - - - Provides the ability to document external references related to the BOM or - to the project the BOM describes. - - - - - Provides the ability to document dependency relationships. - - - - - Compositions describe constituent parts (including components, services, and dependency relationships) and their completeness. The completeness of vulnerabilities expressed in a BOM may also be described. - - - - - Provides the ability to document properties in a name/value store. - This provides flexibility to include data not officially supported in the standard - without having to use additional namespaces or create extensions. Property names - of interest to the general public are encouraged to be registered in the - CycloneDX Property Taxonomy - https://github.com/CycloneDX/cyclonedx-property-taxonomy. - Formal registration is OPTIONAL. - - - - - Vulnerabilities identified in components or services. - - - - - Comments made by people, organizations, or tools about any object with - a bom-ref, such as components, services, vulnerabilities, or the BOM itself. Unlike - inventory information, annotations may contain opinion or commentary from various - stakeholders. Annotations may be inline (with inventory) or externalized via BOM-Link, - and may optionally be signed. - - - - - Describes how a component or service was manufactured or deployed. This is - achieved through the use of formulas, workflows, tasks, and steps, which declare the precise - steps to reproduce along with the observed formulas describing the steps which transpired - in the manufacturing process. - - - - - - The list of declarations which describe the conformance to standards. Each declaration may - include attestations, claims, and evidence. - - - - - - - A collection of reusable objects that are defined and may be used elsewhere in the BOM. - - - - - - - Allows any undeclared elements as long as the elements are placed in a different namespace. - - - - - - - Whenever an existing BOM is modified, either manually or through automated - processes, the version of the BOM SHOULD be incremented by 1. When a system is presented with - multiple BOMs with identical serial numbers, the system SHOULD use the most recent version of the BOM. - The default version is '1'. - - - - - Every BOM generated SHOULD have a unique serial number, even if the contents of - the BOM have not changed over time. If specified, the serial number MUST conform to RFC-4122. - Use of serial numbers are RECOMMENDED. - - - - - User-defined attributes may be used on this element as long as they - do not have the same name as an existing attribute used by the schema. - - - - - - - - - diff --git a/schema/cyclonedx/spdx.xsd b/schema/cyclonedx/spdx.xsd deleted file mode 100644 index c2126601965..00000000000 --- a/schema/cyclonedx/spdx.xsd +++ /dev/null @@ -1,3079 +0,0 @@ - - - - - - - - - BSD Zero Clause License - - - - - Attribution Assurance License - - - - - Abstyles License - - - - - AdaCore Doc License - - - - - Adobe Systems Incorporated Source Code License Agreement - - - - - Adobe Glyph List License - - - - - Amazon Digital Services License - - - - - Academic Free License v1.1 - - - - - Academic Free License v1.2 - - - - - Academic Free License v2.0 - - - - - Academic Free License v2.1 - - - - - Academic Free License v3.0 - - - - - Afmparse License - - - - - Affero General Public License v1.0 - - - - - Affero General Public License v1.0 only - - - - - Affero General Public License v1.0 or later - - - - - GNU Affero General Public License v3.0 - - - - - GNU Affero General Public License v3.0 only - - - - - GNU Affero General Public License v3.0 or later - - - - - Aladdin Free Public License - - - - - AMD's plpa_map.c License - - - - - Apple MIT License - - - - - Academy of Motion Picture Arts and Sciences BSD - - - - - ANTLR Software Rights Notice - - - - - ANTLR Software Rights Notice with license fallback - - - - - Apache License 1.0 - - - - - Apache License 1.1 - - - - - Apache License 2.0 - - - - - Adobe Postscript AFM License - - - - - Adaptive Public License 1.0 - - - - - App::s2p License - - - - - Apple Public Source License 1.0 - - - - - Apple Public Source License 1.1 - - - - - Apple Public Source License 1.2 - - - - - Apple Public Source License 2.0 - - - - - Arphic Public License - - - - - Artistic License 1.0 - - - - - Artistic License 1.0 w/clause 8 - - - - - Artistic License 1.0 (Perl) - - - - - Artistic License 2.0 - - - - - ASWF Digital Assets License version 1.0 - - - - - ASWF Digital Assets License 1.1 - - - - - Baekmuk License - - - - - Bahyph License - - - - - Barr License - - - - - Beerware License - - - - - Bitstream Charter Font License - - - - - Bitstream Vera Font License - - - - - BitTorrent Open Source License v1.0 - - - - - BitTorrent Open Source License v1.1 - - - - - SQLite Blessing - - - - - Blue Oak Model License 1.0.0 - - - - - Boehm-Demers-Weiser GC License - - - - - Borceux license - - - - - Brian Gladman 3-Clause License - - - - - BSD 1-Clause License - - - - - BSD 2-Clause "Simplified" License - - - - - BSD 2-Clause FreeBSD License - - - - - BSD 2-Clause NetBSD License - - - - - BSD-2-Clause Plus Patent License - - - - - BSD 2-Clause with views sentence - - - - - BSD 3-Clause "New" or "Revised" License - - - - - BSD with attribution - - - - - BSD 3-Clause Clear License - - - - - Lawrence Berkeley National Labs BSD variant license - - - - - BSD 3-Clause Modification - - - - - BSD 3-Clause No Military License - - - - - BSD 3-Clause No Nuclear License - - - - - BSD 3-Clause No Nuclear License 2014 - - - - - BSD 3-Clause No Nuclear Warranty - - - - - BSD 3-Clause Open MPI variant - - - - - BSD 4-Clause "Original" or "Old" License - - - - - BSD 4 Clause Shortened - - - - - BSD-4-Clause (University of California-Specific) - - - - - BSD 4.3 RENO License - - - - - BSD 4.3 TAHOE License - - - - - BSD Advertising Acknowledgement License - - - - - BSD with Attribution and HPND disclaimer - - - - - BSD Protection License - - - - - BSD Source Code Attribution - - - - - Boost Software License 1.0 - - - - - Business Source License 1.1 - - - - - bzip2 and libbzip2 License v1.0.5 - - - - - bzip2 and libbzip2 License v1.0.6 - - - - - Computational Use of Data Agreement v1.0 - - - - - Cryptographic Autonomy License 1.0 - - - - - Cryptographic Autonomy License 1.0 (Combined Work Exception) - - - - - Caldera License - - - - - Computer Associates Trusted Open Source License 1.1 - - - - - Creative Commons Attribution 1.0 Generic - - - - - Creative Commons Attribution 2.0 Generic - - - - - Creative Commons Attribution 2.5 Generic - - - - - Creative Commons Attribution 2.5 Australia - - - - - Creative Commons Attribution 3.0 Unported - - - - - Creative Commons Attribution 3.0 Austria - - - - - Creative Commons Attribution 3.0 Germany - - - - - Creative Commons Attribution 3.0 IGO - - - - - Creative Commons Attribution 3.0 Netherlands - - - - - Creative Commons Attribution 3.0 United States - - - - - Creative Commons Attribution 4.0 International - - - - - Creative Commons Attribution Non Commercial 1.0 Generic - - - - - Creative Commons Attribution Non Commercial 2.0 Generic - - - - - Creative Commons Attribution Non Commercial 2.5 Generic - - - - - Creative Commons Attribution Non Commercial 3.0 Unported - - - - - Creative Commons Attribution Non Commercial 3.0 Germany - - - - - Creative Commons Attribution Non Commercial 4.0 International - - - - - Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic - - - - - Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic - - - - - Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic - - - - - Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported - - - - - Creative Commons Attribution Non Commercial No Derivatives 3.0 Germany - - - - - Creative Commons Attribution Non Commercial No Derivatives 3.0 IGO - - - - - Creative Commons Attribution Non Commercial No Derivatives 4.0 International - - - - - Creative Commons Attribution Non Commercial Share Alike 1.0 Generic - - - - - Creative Commons Attribution Non Commercial Share Alike 2.0 Generic - - - - - Creative Commons Attribution Non Commercial Share Alike 2.0 Germany - - - - - Creative Commons Attribution-NonCommercial-ShareAlike 2.0 France - - - - - Creative Commons Attribution Non Commercial Share Alike 2.0 England and Wales - - - - - Creative Commons Attribution Non Commercial Share Alike 2.5 Generic - - - - - Creative Commons Attribution Non Commercial Share Alike 3.0 Unported - - - - - Creative Commons Attribution Non Commercial Share Alike 3.0 Germany - - - - - Creative Commons Attribution Non Commercial Share Alike 3.0 IGO - - - - - Creative Commons Attribution Non Commercial Share Alike 4.0 International - - - - - Creative Commons Attribution No Derivatives 1.0 Generic - - - - - Creative Commons Attribution No Derivatives 2.0 Generic - - - - - Creative Commons Attribution No Derivatives 2.5 Generic - - - - - Creative Commons Attribution No Derivatives 3.0 Unported - - - - - Creative Commons Attribution No Derivatives 3.0 Germany - - - - - Creative Commons Attribution No Derivatives 4.0 International - - - - - Creative Commons Attribution Share Alike 1.0 Generic - - - - - Creative Commons Attribution Share Alike 2.0 Generic - - - - - Creative Commons Attribution Share Alike 2.0 England and Wales - - - - - Creative Commons Attribution Share Alike 2.1 Japan - - - - - Creative Commons Attribution Share Alike 2.5 Generic - - - - - Creative Commons Attribution Share Alike 3.0 Unported - - - - - Creative Commons Attribution Share Alike 3.0 Austria - - - - - Creative Commons Attribution Share Alike 3.0 Germany - - - - - Creative Commons Attribution-ShareAlike 3.0 IGO - - - - - Creative Commons Attribution Share Alike 4.0 International - - - - - Creative Commons Public Domain Dedication and Certification - - - - - Creative Commons Zero v1.0 Universal - - - - - Common Development and Distribution License 1.0 - - - - - Common Development and Distribution License 1.1 - - - - - Common Documentation License 1.0 - - - - - Community Data License Agreement Permissive 1.0 - - - - - Community Data License Agreement Permissive 2.0 - - - - - Community Data License Agreement Sharing 1.0 - - - - - CeCILL Free Software License Agreement v1.0 - - - - - CeCILL Free Software License Agreement v1.1 - - - - - CeCILL Free Software License Agreement v2.0 - - - - - CeCILL Free Software License Agreement v2.1 - - - - - CeCILL-B Free Software License Agreement - - - - - CeCILL-C Free Software License Agreement - - - - - CERN Open Hardware Licence v1.1 - - - - - CERN Open Hardware Licence v1.2 - - - - - CERN Open Hardware Licence Version 2 - Permissive - - - - - CERN Open Hardware Licence Version 2 - Strongly Reciprocal - - - - - CERN Open Hardware Licence Version 2 - Weakly Reciprocal - - - - - CFITSIO License - - - - - Checkmk License - - - - - Clarified Artistic License - - - - - Clips License - - - - - CMU Mach License - - - - - CNRI Jython License - - - - - CNRI Python License - - - - - CNRI Python Open Source GPL Compatible License Agreement - - - - - Copyfree Open Innovation License - - - - - Community Specification License 1.0 - - - - - Condor Public License v1.1 - - - - - copyleft-next 0.3.0 - - - - - copyleft-next 0.3.1 - - - - - Cornell Lossless JPEG License - - - - - Common Public Attribution License 1.0 - - - - - Common Public License 1.0 - - - - - Code Project Open License 1.02 - - - - - Crossword License - - - - - CrystalStacker License - - - - - CUA Office Public License v1.0 - - - - - Cube License - - - - - curl License - - - - - Deutsche Freie Software Lizenz - - - - - diffmark license - - - - - Data licence Germany – attribution – version 2.0 - - - - - DOC License - - - - - Dotseqn License - - - - - Detection Rule License 1.0 - - - - - DSDP License - - - - - David M. Gay dtoa License - - - - - dvipdfm License - - - - - Educational Community License v1.0 - - - - - Educational Community License v2.0 - - - - - eCos license version 2.0 - - - - - Eiffel Forum License v1.0 - - - - - Eiffel Forum License v2.0 - - - - - eGenix.com Public License 1.1.0 - - - - - Elastic License 2.0 - - - - - Entessa Public License v1.0 - - - - - EPICS Open License - - - - - Eclipse Public License 1.0 - - - - - Eclipse Public License 2.0 - - - - - Erlang Public License v1.1 - - - - - Etalab Open License 2.0 - - - - - EU DataGrid Software License - - - - - European Union Public License 1.0 - - - - - European Union Public License 1.1 - - - - - European Union Public License 1.2 - - - - - Eurosym License - - - - - Fair License - - - - - Fraunhofer FDK AAC Codec Library - - - - - Frameworx Open License 1.0 - - - - - FreeBSD Documentation License - - - - - FreeImage Public License v1.0 - - - - - FSF All Permissive License - - - - - FSF Unlimited License - - - - - FSF Unlimited License (with License Retention) - - - - - FSF Unlimited License (With License Retention and Warranty Disclaimer) - - - - - Freetype Project License - - - - - GD License - - - - - GNU Free Documentation License v1.1 - - - - - GNU Free Documentation License v1.1 only - invariants - - - - - GNU Free Documentation License v1.1 or later - invariants - - - - - GNU Free Documentation License v1.1 only - no invariants - - - - - GNU Free Documentation License v1.1 or later - no invariants - - - - - GNU Free Documentation License v1.1 only - - - - - GNU Free Documentation License v1.1 or later - - - - - GNU Free Documentation License v1.2 - - - - - GNU Free Documentation License v1.2 only - invariants - - - - - GNU Free Documentation License v1.2 or later - invariants - - - - - GNU Free Documentation License v1.2 only - no invariants - - - - - GNU Free Documentation License v1.2 or later - no invariants - - - - - GNU Free Documentation License v1.2 only - - - - - GNU Free Documentation License v1.2 or later - - - - - GNU Free Documentation License v1.3 - - - - - GNU Free Documentation License v1.3 only - invariants - - - - - GNU Free Documentation License v1.3 or later - invariants - - - - - GNU Free Documentation License v1.3 only - no invariants - - - - - GNU Free Documentation License v1.3 or later - no invariants - - - - - GNU Free Documentation License v1.3 only - - - - - GNU Free Documentation License v1.3 or later - - - - - Giftware License - - - - - GL2PS License - - - - - 3dfx Glide License - - - - - Glulxe License - - - - - Good Luck With That Public License - - - - - gnuplot License - - - - - GNU General Public License v1.0 only - - - - - GNU General Public License v1.0 or later - - - - - GNU General Public License v1.0 only - - - - - GNU General Public License v1.0 or later - - - - - GNU General Public License v2.0 only - - - - - GNU General Public License v2.0 or later - - - - - GNU General Public License v2.0 only - - - - - GNU General Public License v2.0 or later - - - - - GNU General Public License v2.0 w/Autoconf exception - - - - - GNU General Public License v2.0 w/Bison exception - - - - - GNU General Public License v2.0 w/Classpath exception - - - - - GNU General Public License v2.0 w/Font exception - - - - - GNU General Public License v2.0 w/GCC Runtime Library exception - - - - - GNU General Public License v3.0 only - - - - - GNU General Public License v3.0 or later - - - - - GNU General Public License v3.0 only - - - - - GNU General Public License v3.0 or later - - - - - GNU General Public License v3.0 w/Autoconf exception - - - - - GNU General Public License v3.0 w/GCC Runtime Library exception - - - - - Graphics Gems License - - - - - gSOAP Public License v1.3b - - - - - Haskell Language Report License - - - - - Hippocratic License 2.1 - - - - - Hewlett-Packard 1986 License - - - - - Historical Permission Notice and Disclaimer - - - - - HPND with US Government export control warning - - - - - Historical Permission Notice and Disclaimer - Markus Kuhn variant - - - - - Historical Permission Notice and Disclaimer - sell variant - - - - - HPND sell variant with MIT disclaimer - - - - - HTML Tidy License - - - - - IBM PowerPC Initialization and Boot Software - - - - - ICU License - - - - - IEC Code Components End-user licence agreement - - - - - Independent JPEG Group License - - - - - Independent JPEG Group License - short - - - - - ImageMagick License - - - - - iMatix Standard Function Library Agreement - - - - - Imlib2 License - - - - - Info-ZIP License - - - - - Inner Net License v2.0 - - - - - Intel Open Source License - - - - - Intel ACPI Software License Agreement - - - - - Interbase Public License v1.0 - - - - - IPA Font License - - - - - IBM Public License v1.0 - - - - - ISC License - - - - - Jam License - - - - - JasPer License - - - - - JPL Image Use Policy - - - - - Japan Network Information Center License - - - - - JSON License - - - - - Kazlib License - - - - - Knuth CTAN License - - - - - Licence Art Libre 1.2 - - - - - Licence Art Libre 1.3 - - - - - Latex2e License - - - - - Latex2e with translated notice permission - - - - - Leptonica License - - - - - GNU Library General Public License v2 only - - - - - GNU Library General Public License v2 or later - - - - - GNU Library General Public License v2 only - - - - - GNU Library General Public License v2 or later - - - - - GNU Lesser General Public License v2.1 only - - - - - GNU Lesser General Public License v2.1 or later - - - - - GNU Lesser General Public License v2.1 only - - - - - GNU Lesser General Public License v2.1 or later - - - - - GNU Lesser General Public License v3.0 only - - - - - GNU Lesser General Public License v3.0 or later - - - - - GNU Lesser General Public License v3.0 only - - - - - GNU Lesser General Public License v3.0 or later - - - - - Lesser General Public License For Linguistic Resources - - - - - libpng License - - - - - PNG Reference Library version 2 - - - - - libselinux public domain notice - - - - - libtiff License - - - - - libutil David Nugent License - - - - - Licence Libre du Québec – Permissive version 1.1 - - - - - Licence Libre du Québec – Réciprocité version 1.1 - - - - - Licence Libre du Québec – Réciprocité forte version 1.1 - - - - - Linux man-pages - 1 paragraph - - - - - Linux man-pages Copyleft - - - - - Linux man-pages Copyleft - 2 paragraphs - - - - - Linux man-pages Copyleft Variant - - - - - Linux Kernel Variant of OpenIB.org license - - - - - Common Lisp LOOP License - - - - - Lucent Public License Version 1.0 - - - - - Lucent Public License v1.02 - - - - - LaTeX Project Public License v1.0 - - - - - LaTeX Project Public License v1.1 - - - - - LaTeX Project Public License v1.2 - - - - - LaTeX Project Public License v1.3a - - - - - LaTeX Project Public License v1.3c - - - - - LZMA SDK License (versions 9.11 to 9.20) - - - - - LZMA SDK License (versions 9.22 and beyond) - - - - - MakeIndex License - - - - - Martin Birgmeier License - - - - - metamail License - - - - - Minpack License - - - - - The MirOS Licence - - - - - MIT License - - - - - MIT No Attribution - - - - - Enlightenment License (e16) - - - - - CMU License - - - - - enna License - - - - - feh License - - - - - MIT Festival Variant - - - - - MIT License Modern Variant - - - - - MIT Open Group variant - - - - - MIT Tom Wu Variant - - - - - MIT +no-false-attribs license - - - - - Motosoto License - - - - - mpi Permissive License - - - - - mpich2 License - - - - - Mozilla Public License 1.0 - - - - - Mozilla Public License 1.1 - - - - - Mozilla Public License 2.0 - - - - - Mozilla Public License 2.0 (no copyleft exception) - - - - - mplus Font License - - - - - Microsoft Limited Public License - - - - - Microsoft Public License - - - - - Microsoft Reciprocal License - - - - - Matrix Template Library License - - - - - Mulan Permissive Software License, Version 1 - - - - - Mulan Permissive Software License, Version 2 - - - - - Multics License - - - - - Mup License - - - - - Nara Institute of Science and Technology License (2003) - - - - - NASA Open Source Agreement 1.3 - - - - - Naumen Public License - - - - - Net Boolean Public License v1 - - - - - Non-Commercial Government Licence - - - - - University of Illinois/NCSA Open Source License - - - - - Net-SNMP License - - - - - NetCDF license - - - - - Newsletr License - - - - - Nethack General Public License - - - - - NICTA Public Software License, Version 1.0 - - - - - NIST Public Domain Notice - - - - - NIST Public Domain Notice with license fallback - - - - - NIST Software License - - - - - Norwegian Licence for Open Government Data (NLOD) 1.0 - - - - - Norwegian Licence for Open Government Data (NLOD) 2.0 - - - - - No Limit Public License - - - - - Nokia Open Source License - - - - - Netizen Open Source License - - - - - Noweb License - - - - - Netscape Public License v1.0 - - - - - Netscape Public License v1.1 - - - - - Non-Profit Open Software License 3.0 - - - - - NRL License - - - - - NTP License - - - - - NTP No Attribution - - - - - Nunit License - - - - - Open Use of Data Agreement v1.0 - - - - - Open CASCADE Technology Public License - - - - - OCLC Research Public License 2.0 - - - - - Open Data Commons Open Database License v1.0 - - - - - Open Data Commons Attribution License v1.0 - - - - - OFFIS License - - - - - SIL Open Font License 1.0 - - - - - SIL Open Font License 1.0 with no Reserved Font Name - - - - - SIL Open Font License 1.0 with Reserved Font Name - - - - - SIL Open Font License 1.1 - - - - - SIL Open Font License 1.1 with no Reserved Font Name - - - - - SIL Open Font License 1.1 with Reserved Font Name - - - - - OGC Software License, Version 1.0 - - - - - Taiwan Open Government Data License, version 1.0 - - - - - Open Government Licence - Canada - - - - - Open Government Licence v1.0 - - - - - Open Government Licence v2.0 - - - - - Open Government Licence v3.0 - - - - - Open Group Test Suite License - - - - - Open LDAP Public License v1.1 - - - - - Open LDAP Public License v1.2 - - - - - Open LDAP Public License v1.3 - - - - - Open LDAP Public License v1.4 - - - - - Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B) - - - - - Open LDAP Public License v2.0.1 - - - - - Open LDAP Public License v2.1 - - - - - Open LDAP Public License v2.2 - - - - - Open LDAP Public License v2.2.1 - - - - - Open LDAP Public License 2.2.2 - - - - - Open LDAP Public License v2.3 - - - - - Open LDAP Public License v2.4 - - - - - Open LDAP Public License v2.5 - - - - - Open LDAP Public License v2.6 - - - - - Open LDAP Public License v2.7 - - - - - Open LDAP Public License v2.8 - - - - - Open Logistics Foundation License Version 1.3 - - - - - Open Market License - - - - - OpenPBS v2.3 Software License - - - - - OpenSSL License - - - - - Open Public License v1.0 - - - - - United Kingdom Open Parliament Licence v3.0 - - - - - Open Publication License v1.0 - - - - - OSET Public License version 2.1 - - - - - Open Software License 1.0 - - - - - Open Software License 1.1 - - - - - Open Software License 2.0 - - - - - Open Software License 2.1 - - - - - Open Software License 3.0 - - - - - The Parity Public License 6.0.0 - - - - - The Parity Public License 7.0.0 - - - - - Open Data Commons Public Domain Dedication & License 1.0 - - - - - PHP License v3.0 - - - - - PHP License v3.01 - - - - - Plexus Classworlds License - - - - - PolyForm Noncommercial License 1.0.0 - - - - - PolyForm Small Business License 1.0.0 - - - - - PostgreSQL License - - - - - Python Software Foundation License 2.0 - - - - - psfrag License - - - - - psutils License - - - - - Python License 2.0 - - - - - Python License 2.0.1 - - - - - Qhull License - - - - - Q Public License 1.0 - - - - - Q Public License 1.0 - INRIA 2004 variant - - - - - Rdisc License - - - - - Red Hat eCos Public License v1.1 - - - - - Reciprocal Public License 1.1 - - - - - Reciprocal Public License 1.5 - - - - - RealNetworks Public Source License v1.0 - - - - - RSA Message-Digest License - - - - - Ricoh Source Code Public License - - - - - Ruby License - - - - - Sax Public Domain Notice - - - - - Saxpath License - - - - - SCEA Shared Source License - - - - - Scheme Language Report License - - - - - Sendmail License - - - - - Sendmail License 8.23 - - - - - SGI Free Software License B v1.0 - - - - - SGI Free Software License B v1.1 - - - - - SGI Free Software License B v2.0 - - - - - SGP4 Permission Notice - - - - - Solderpad Hardware License v0.5 - - - - - Solderpad Hardware License, Version 0.51 - - - - - Simple Public License 2.0 - - - - - Sun Industry Standards Source License v1.1 - - - - - Sun Industry Standards Source License v1.2 - - - - - Sleepycat License - - - - - Standard ML of New Jersey License - - - - - Secure Messaging Protocol Public License - - - - - SNIA Public License 1.1 - - - - - snprintf License - - - - - Spencer License 86 - - - - - Spencer License 94 - - - - - Spencer License 99 - - - - - Sun Public License v1.0 - - - - - SSH OpenSSH license - - - - - SSH short notice - - - - - Server Side Public License, v 1 - - - - - Standard ML of New Jersey License - - - - - SugarCRM Public License v1.1.3 - - - - - SunPro License - - - - - Scheme Widget Library (SWL) Software License Agreement - - - - - Symlinks License - - - - - TAPR Open Hardware License v1.0 - - - - - TCL/TK License - - - - - TCP Wrappers License - - - - - TermReadKey License - - - - - TMate Open Source License - - - - - TORQUE v2.5+ Software License v1.1 - - - - - Trusster Open Source License - - - - - Time::ParseDate License - - - - - THOR Public License 1.0 - - - - - Text-Tabs+Wrap License - - - - - Technische Universitaet Berlin License 1.0 - - - - - Technische Universitaet Berlin License 2.0 - - - - - UCAR License - - - - - Upstream Compatibility License v1.0 - - - - - Unicode License Agreement - Data Files and Software (2015) - - - - - Unicode License Agreement - Data Files and Software (2016) - - - - - Unicode Terms of Use - - - - - UnixCrypt License - - - - - The Unlicense - - - - - Universal Permissive License v1.0 - - - - - Vim License - - - - - VOSTROM Public License for Open Source - - - - - Vovida Software License v1.0 - - - - - W3C Software Notice and License (2002-12-31) - - - - - W3C Software Notice and License (1998-07-20) - - - - - W3C Software Notice and Document License (2015-05-13) - - - - - w3m License - - - - - Sybase Open Watcom Public License 1.0 - - - - - Widget Workshop License - - - - - Wsuipa License - - - - - Do What The F*ck You Want To Public License - - - - - wxWindows Library License - - - - - X11 License - - - - - X11 License Distribution Modification Variant - - - - - Xdebug License v 1.03 - - - - - Xerox License - - - - - Xfig License - - - - - XFree86 License 1.1 - - - - - xinetd License - - - - - xlock License - - - - - X.Net License - - - - - XPP License - - - - - XSkat License - - - - - Yahoo! Public License v1.0 - - - - - Yahoo! Public License v1.1 - - - - - Zed License - - - - - Zend License v2.0 - - - - - Zimbra Public License v1.3 - - - - - Zimbra Public License v1.4 - - - - - zlib License - - - - - zlib/libpng License with Acknowledgement - - - - - Zope Public License 1.1 - - - - - Zope Public License 2.0 - - - - - Zope Public License 2.1 - - - - - - 389 Directory Server Exception - - - - - Asterisk exception - - - - - Autoconf exception 2.0 - - - - - Autoconf exception 3.0 - - - - - Autoconf generic exception - - - - - Autoconf macro exception - - - - - Bison exception 2.2 - - - - - Bootloader Distribution Exception - - - - - Classpath exception 2.0 - - - - - CLISP exception 2.0 - - - - - cryptsetup OpenSSL exception - - - - - DigiRule FOSS License Exception - - - - - eCos exception 2.0 - - - - - Fawkes Runtime Exception - - - - - FLTK exception - - - - - Font exception 2.0 - - - - - FreeRTOS Exception 2.0 - - - - - GCC Runtime Library exception 2.0 - - - - - GCC Runtime Library exception 3.1 - - - - - GNAT exception - - - - - GNU JavaMail exception - - - - - GPL-3.0 Interface Exception - - - - - GPL-3.0 Linking Exception - - - - - GPL-3.0 Linking Exception (with Corresponding Source) - - - - - GPL Cooperation Commitment 1.0 - - - - - GStreamer Exception (2005) - - - - - GStreamer Exception (2008) - - - - - i2p GPL+Java Exception - - - - - KiCad Libraries Exception - - - - - LGPL-3.0 Linking Exception - - - - - libpri OpenH323 exception - - - - - Libtool Exception - - - - - Linux Syscall Note - - - - - LLGPL Preamble - - - - - LLVM Exception - - - - - LZMA exception - - - - - Macros and Inline Functions Exception - - - - - Nokia Qt LGPL exception 1.1 - - - - - OCaml LGPL Linking Exception - - - - - Open CASCADE Exception 1.0 - - - - - OpenJDK Assembly exception 1.0 - - - - - OpenVPN OpenSSL Exception - - - - - PS/PDF font exception (2017-08-17) - - - - - INRIA QPL 1.0 2004 variant exception - - - - - Qt GPL exception 1.0 - - - - - Qt LGPL exception 1.1 - - - - - Qwt exception 1.0 - - - - - Solderpad Hardware License v2.0 - - - - - Solderpad Hardware License v2.1 - - - - - SWI exception - - - - - Swift Exception - - - - - U-Boot exception 2.0 - - - - - Universal FOSS Exception, Version 1.0 - - - - - vsftpd OpenSSL exception - - - - - WxWindows Library Exception 3.1 - - - - - x11vnc OpenSSL Exception - - - - - - diff --git a/schema/grype/db-search-vuln/json/schema-1.0.3.json b/schema/grype/db-search-vuln/json/schema-1.0.3.json new file mode 100644 index 00000000000..9bd58164b7a --- /dev/null +++ b/schema/grype/db-search-vuln/json/schema-1.0.3.json @@ -0,0 +1,247 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "anchore.io/schema/grype/db-search-vuln/json/1.0.3/vulnerabilities", + "$ref": "#/$defs/Vulnerabilities", + "$defs": { + "EPSS": { + "properties": { + "cve": { + "type": "string" + }, + "epss": { + "type": "number" + }, + "percentile": { + "type": "number" + }, + "date": { + "type": "string" + } + }, + "type": "object", + "required": [ + "cve", + "epss", + "percentile", + "date" + ] + }, + "KnownExploited": { + "properties": { + "cve": { + "type": "string" + }, + "vendor_project": { + "type": "string" + }, + "product": { + "type": "string" + }, + "date_added": { + "type": "string" + }, + "required_action": { + "type": "string" + }, + "due_date": { + "type": "string" + }, + "known_ransomware_campaign_use": { + "type": "string" + }, + "notes": { + "type": "string" + }, + "urls": { + "items": { + "type": "string" + }, + "type": "array" + }, + "cwes": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "cve", + "known_ransomware_campaign_use" + ] + }, + "OperatingSystem": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Reference": { + "$defs": { + "tags": { + "description": "is a free-form organizational field to convey additional information about the reference" + }, + "url": { + "description": "is the external resource" + } + }, + "properties": { + "url": { + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "url" + ] + }, + "Severity": { + "$defs": { + "rank": { + "description": "is a free-form organizational field to convey priority over other severities" + }, + "scheme": { + "description": "describes the quantitative method used to determine the Score, such as 'CVSS_V3'. Alternatively this makes\nclaim that Value is qualitative, for example 'HML' (High, Medium, Low), CHMLN (critical-high-medium-low-negligible)" + }, + "source": { + "description": "is the name of the source of the severity score (e.g. 'nvd@nist.gov' or 'security-advisories@github.com')" + }, + "value": { + "description": "is the severity score (e.g. '7.5', 'CVSS:4.0/AV:N/AC:L/AT:N/PR:H/UI:N/VC:L/VI:L/VA:N/SC:N/SI:N/SA:N', or 'high' )" + } + }, + "properties": { + "scheme": { + "type": "string" + }, + "value": true, + "source": { + "type": "string" + }, + "rank": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "scheme", + "value", + "rank" + ] + }, + "Vulnerabilities": { + "items": { + "$ref": "#/$defs/Vulnerability" + }, + "type": "array" + }, + "Vulnerability": { + "$defs": { + "affected_packages": { + "description": "is the number of packages affected by the vulnerability" + }, + "operating_systems": { + "description": "is a list of operating systems affected by the vulnerability" + } + }, + "properties": { + "id": { + "type": "string" + }, + "assigner": { + "items": { + "type": "string" + }, + "type": "array" + }, + "description": { + "type": "string" + }, + "refs": { + "items": { + "$ref": "#/$defs/Reference" + }, + "type": "array" + }, + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "severities": { + "items": { + "$ref": "#/$defs/Severity" + }, + "type": "array" + }, + "severity": { + "type": "string" + }, + "provider": { + "type": "string" + }, + "status": { + "type": "string" + }, + "published_date": { + "type": "string", + "format": "date-time" + }, + "modified_date": { + "type": "string", + "format": "date-time" + }, + "withdrawn_date": { + "type": "string", + "format": "date-time" + }, + "known_exploited": { + "items": { + "$ref": "#/$defs/KnownExploited" + }, + "type": "array" + }, + "epss": { + "items": { + "$ref": "#/$defs/EPSS" + }, + "type": "array" + }, + "operating_systems": { + "items": { + "$ref": "#/$defs/OperatingSystem" + }, + "type": "array" + }, + "affected_packages": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "id", + "provider", + "status", + "operating_systems", + "affected_packages" + ] + } + } +} diff --git a/schema/grype/db-search-vuln/json/schema-latest.json b/schema/grype/db-search-vuln/json/schema-latest.json index bea5a4069cf..9bd58164b7a 100644 --- a/schema/grype/db-search-vuln/json/schema-latest.json +++ b/schema/grype/db-search-vuln/json/schema-latest.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "anchore.io/schema/grype/db-search-vuln/json/1.0.1/vulnerabilities", + "$id": "anchore.io/schema/grype/db-search-vuln/json/1.0.3/vulnerabilities", "$ref": "#/$defs/Vulnerabilities", "$defs": { "EPSS": { @@ -191,6 +191,9 @@ }, "type": "array" }, + "severity": { + "type": "string" + }, "provider": { "type": "string" }, diff --git a/schema/grype/db-search/json/schema-1.0.2.json b/schema/grype/db-search/json/schema-1.0.2.json new file mode 100644 index 00000000000..365c6583310 --- /dev/null +++ b/schema/grype/db-search/json/schema-1.0.2.json @@ -0,0 +1,529 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "anchore.io/schema/grype/db-search/json/1.0.2/matches", + "$ref": "#/$defs/Matches", + "$defs": { + "AffectedPackageBlob": { + "$defs": { + "cves": { + "description": "is a list of Common Vulnerabilities and Exposures (CVE) identifiers related to this vulnerability." + }, + "qualifiers": { + "description": "are package attributes that confirm the package is affected by the vulnerability." + }, + "ranges": { + "description": "specifies the affected version ranges and fixes if available." + } + }, + "properties": { + "cves": { + "items": { + "type": "string" + }, + "type": "array" + }, + "qualifiers": { + "$ref": "#/$defs/AffectedPackageQualifiers" + }, + "ranges": { + "items": { + "$ref": "#/$defs/AffectedRange" + }, + "type": "array" + } + }, + "type": "object" + }, + "AffectedPackageInfo": { + "$defs": { + "cpe": { + "description": "is a Common Platform Enumeration that is affected by the vulnerability" + }, + "detail": { + "description": "is the detailed information about the affected package" + }, + "namespace": { + "description": "is a holdover value from the v5 DB schema that combines provider and search methods into a single value\nDeprecated: this field will be removed in a later version of the search schema" + }, + "os": { + "description": "identifies the operating system release that the affected package is released for" + }, + "package": { + "description": "identifies the name of the package in a specific ecosystem affected by the vulnerability" + } + }, + "properties": { + "os": { + "$ref": "#/$defs/OperatingSystem" + }, + "package": { + "$ref": "#/$defs/Package" + }, + "cpe": { + "$ref": "#/$defs/CPE" + }, + "namespace": { + "type": "string" + }, + "detail": { + "$ref": "#/$defs/AffectedPackageBlob" + } + }, + "type": "object", + "required": [ + "namespace", + "detail" + ] + }, + "AffectedPackageQualifiers": { + "$defs": { + "platform_cpes": { + "description": "lists Common Platform Enumeration (CPE) identifiers for affected platforms." + }, + "rpm_modularity": { + "description": "indicates if the package follows RPM modularity for versioning." + } + }, + "properties": { + "rpm_modularity": { + "type": "string" + }, + "platform_cpes": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "AffectedRange": { + "$defs": { + "fix": { + "description": "provides details on the fix version and its state if available." + }, + "version": { + "description": "defines the version constraints for affected software." + } + }, + "properties": { + "version": { + "$ref": "#/$defs/AffectedVersion" + }, + "fix": { + "$ref": "#/$defs/Fix" + } + }, + "type": "object" + }, + "AffectedVersion": { + "$defs": { + "constraint": { + "description": "defines the version range constraint for affected versions." + }, + "type": { + "description": "specifies the versioning system used (e.g., 'semver', 'rpm')." + } + }, + "properties": { + "type": { + "type": "string" + }, + "constraint": { + "type": "string" + } + }, + "type": "object" + }, + "CPE": { + "properties": { + "ID": { + "type": "integer" + }, + "Part": { + "type": "string" + }, + "Vendor": { + "type": "string" + }, + "Product": { + "type": "string" + }, + "Edition": { + "type": "string" + }, + "Language": { + "type": "string" + }, + "SoftwareEdition": { + "type": "string" + }, + "TargetHardware": { + "type": "string" + }, + "TargetSoftware": { + "type": "string" + }, + "Other": { + "type": "string" + }, + "Packages": { + "items": { + "$ref": "#/$defs/Package" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "ID", + "Part", + "Vendor", + "Product", + "Edition", + "Language", + "SoftwareEdition", + "TargetHardware", + "TargetSoftware", + "Other", + "Packages" + ] + }, + "EPSS": { + "properties": { + "cve": { + "type": "string" + }, + "epss": { + "type": "number" + }, + "percentile": { + "type": "number" + }, + "date": { + "type": "string" + } + }, + "type": "object", + "required": [ + "cve", + "epss", + "percentile", + "date" + ] + }, + "Fix": { + "$defs": { + "detail": { + "description": "provides additional fix information, such as commit details." + }, + "state": { + "description": "represents the status of the fix (e.g., 'fixed', 'unaffected')." + }, + "version": { + "description": "is the version number of the fix." + } + }, + "properties": { + "version": { + "type": "string" + }, + "state": { + "type": "string" + }, + "detail": { + "$ref": "#/$defs/FixDetail" + } + }, + "type": "object" + }, + "FixDetail": { + "$defs": { + "git_commit": { + "description": "is the identifier for the Git commit associated with the fix." + }, + "references": { + "description": "contains URLs or identifiers for additional resources on the fix." + }, + "timestamp": { + "description": "is the date and time when the fix was committed." + } + }, + "properties": { + "git_commit": { + "type": "string" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "references": { + "items": { + "$ref": "#/$defs/Reference" + }, + "type": "array" + } + }, + "type": "object" + }, + "KnownExploited": { + "properties": { + "cve": { + "type": "string" + }, + "vendor_project": { + "type": "string" + }, + "product": { + "type": "string" + }, + "date_added": { + "type": "string" + }, + "required_action": { + "type": "string" + }, + "due_date": { + "type": "string" + }, + "known_ransomware_campaign_use": { + "type": "string" + }, + "notes": { + "type": "string" + }, + "urls": { + "items": { + "type": "string" + }, + "type": "array" + }, + "cwes": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "cve", + "known_ransomware_campaign_use" + ] + }, + "Match": { + "$defs": { + "packages": { + "description": "is the list of packages affected by the vulnerability." + }, + "vulnerability": { + "description": "is the core advisory record for a single known vulnerability from a specific provider." + } + }, + "properties": { + "vulnerability": { + "$ref": "#/$defs/VulnerabilityInfo" + }, + "packages": { + "items": { + "$ref": "#/$defs/AffectedPackageInfo" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "vulnerability", + "packages" + ] + }, + "Matches": { + "items": { + "$ref": "#/$defs/Match" + }, + "type": "array" + }, + "OperatingSystem": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Package": { + "properties": { + "name": { + "type": "string" + }, + "ecosystem": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "ecosystem" + ] + }, + "Reference": { + "$defs": { + "tags": { + "description": "is a free-form organizational field to convey additional information about the reference" + }, + "url": { + "description": "is the external resource" + } + }, + "properties": { + "url": { + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "url" + ] + }, + "Severity": { + "$defs": { + "rank": { + "description": "is a free-form organizational field to convey priority over other severities" + }, + "scheme": { + "description": "describes the quantitative method used to determine the Score, such as 'CVSS_V3'. Alternatively this makes\nclaim that Value is qualitative, for example 'HML' (High, Medium, Low), CHMLN (critical-high-medium-low-negligible)" + }, + "source": { + "description": "is the name of the source of the severity score (e.g. 'nvd@nist.gov' or 'security-advisories@github.com')" + }, + "value": { + "description": "is the severity score (e.g. '7.5', 'CVSS:4.0/AV:N/AC:L/AT:N/PR:H/UI:N/VC:L/VI:L/VA:N/SC:N/SI:N/SA:N', or 'high' )" + } + }, + "properties": { + "scheme": { + "type": "string" + }, + "value": true, + "source": { + "type": "string" + }, + "rank": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "scheme", + "value", + "rank" + ] + }, + "VulnerabilityInfo": { + "$defs": { + "epss": { + "description": "is a list of Exploit Prediction Scoring System (EPSS) scores for the vulnerability" + }, + "known_exploited": { + "description": "is a list of known exploited vulnerabilities from the CISA KEV dataset" + }, + "modified_date": { + "description": "is the date the vulnerability record was last modified" + }, + "provider": { + "description": "is the upstream data processor (usually Vunnel) that is responsible for vulnerability records. Each provider\nshould be scoped to a specific vulnerability dataset, for instance, the 'ubuntu' provider for all records from\nCanonicals' Ubuntu Security Notices (for all Ubuntu distro versions)." + }, + "published_date": { + "description": "is the date the vulnerability record was first published" + }, + "status": { + "description": "conveys the actionability of the current record (one of 'active', 'analyzing', 'rejected', 'disputed')" + }, + "withdrawn_date": { + "description": "is the date the vulnerability record was withdrawn" + } + }, + "properties": { + "id": { + "type": "string" + }, + "assigner": { + "items": { + "type": "string" + }, + "type": "array" + }, + "description": { + "type": "string" + }, + "refs": { + "items": { + "$ref": "#/$defs/Reference" + }, + "type": "array" + }, + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "severities": { + "items": { + "$ref": "#/$defs/Severity" + }, + "type": "array" + }, + "provider": { + "type": "string" + }, + "status": { + "type": "string" + }, + "published_date": { + "type": "string", + "format": "date-time" + }, + "modified_date": { + "type": "string", + "format": "date-time" + }, + "withdrawn_date": { + "type": "string", + "format": "date-time" + }, + "known_exploited": { + "items": { + "$ref": "#/$defs/KnownExploited" + }, + "type": "array" + }, + "epss": { + "items": { + "$ref": "#/$defs/EPSS" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "id", + "provider", + "status" + ] + } + } +} diff --git a/schema/grype/db-search/json/schema-1.0.3.json b/schema/grype/db-search/json/schema-1.0.3.json new file mode 100644 index 00000000000..ac9ac84a1e4 --- /dev/null +++ b/schema/grype/db-search/json/schema-1.0.3.json @@ -0,0 +1,535 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "anchore.io/schema/grype/db-search/json/1.0.3/matches", + "$ref": "#/$defs/Matches", + "$defs": { + "AffectedPackageBlob": { + "$defs": { + "cves": { + "description": "is a list of Common Vulnerabilities and Exposures (CVE) identifiers related to this vulnerability." + }, + "qualifiers": { + "description": "are package attributes that confirm the package is affected by the vulnerability." + }, + "ranges": { + "description": "specifies the affected version ranges and fixes if available." + } + }, + "properties": { + "cves": { + "items": { + "type": "string" + }, + "type": "array" + }, + "qualifiers": { + "$ref": "#/$defs/AffectedPackageQualifiers" + }, + "ranges": { + "items": { + "$ref": "#/$defs/AffectedRange" + }, + "type": "array" + } + }, + "type": "object" + }, + "AffectedPackageInfo": { + "$defs": { + "cpe": { + "description": "is a Common Platform Enumeration that is affected by the vulnerability" + }, + "detail": { + "description": "is the detailed information about the affected package" + }, + "namespace": { + "description": "is a holdover value from the v5 DB schema that combines provider and search methods into a single value\nDeprecated: this field will be removed in a later version of the search schema" + }, + "os": { + "description": "identifies the operating system release that the affected package is released for" + }, + "package": { + "description": "identifies the name of the package in a specific ecosystem affected by the vulnerability" + } + }, + "properties": { + "os": { + "$ref": "#/$defs/OperatingSystem" + }, + "package": { + "$ref": "#/$defs/Package" + }, + "cpe": { + "$ref": "#/$defs/CPE" + }, + "namespace": { + "type": "string" + }, + "detail": { + "$ref": "#/$defs/AffectedPackageBlob" + } + }, + "type": "object", + "required": [ + "namespace", + "detail" + ] + }, + "AffectedPackageQualifiers": { + "$defs": { + "platform_cpes": { + "description": "lists Common Platform Enumeration (CPE) identifiers for affected platforms." + }, + "rpm_modularity": { + "description": "indicates if the package follows RPM modularity for versioning." + } + }, + "properties": { + "rpm_modularity": { + "type": "string" + }, + "platform_cpes": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "AffectedRange": { + "$defs": { + "fix": { + "description": "provides details on the fix version and its state if available." + }, + "version": { + "description": "defines the version constraints for affected software." + } + }, + "properties": { + "version": { + "$ref": "#/$defs/AffectedVersion" + }, + "fix": { + "$ref": "#/$defs/Fix" + } + }, + "type": "object" + }, + "AffectedVersion": { + "$defs": { + "constraint": { + "description": "defines the version range constraint for affected versions." + }, + "type": { + "description": "specifies the versioning system used (e.g., 'semver', 'rpm')." + } + }, + "properties": { + "type": { + "type": "string" + }, + "constraint": { + "type": "string" + } + }, + "type": "object" + }, + "CPE": { + "properties": { + "ID": { + "type": "integer" + }, + "Part": { + "type": "string" + }, + "Vendor": { + "type": "string" + }, + "Product": { + "type": "string" + }, + "Edition": { + "type": "string" + }, + "Language": { + "type": "string" + }, + "SoftwareEdition": { + "type": "string" + }, + "TargetHardware": { + "type": "string" + }, + "TargetSoftware": { + "type": "string" + }, + "Other": { + "type": "string" + }, + "Packages": { + "items": { + "$ref": "#/$defs/Package" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "ID", + "Part", + "Vendor", + "Product", + "Edition", + "Language", + "SoftwareEdition", + "TargetHardware", + "TargetSoftware", + "Other", + "Packages" + ] + }, + "EPSS": { + "properties": { + "cve": { + "type": "string" + }, + "epss": { + "type": "number" + }, + "percentile": { + "type": "number" + }, + "date": { + "type": "string" + } + }, + "type": "object", + "required": [ + "cve", + "epss", + "percentile", + "date" + ] + }, + "Fix": { + "$defs": { + "detail": { + "description": "provides additional fix information, such as commit details." + }, + "state": { + "description": "represents the status of the fix (e.g., 'fixed', 'unaffected')." + }, + "version": { + "description": "is the version number of the fix." + } + }, + "properties": { + "version": { + "type": "string" + }, + "state": { + "type": "string" + }, + "detail": { + "$ref": "#/$defs/FixDetail" + } + }, + "type": "object" + }, + "FixDetail": { + "$defs": { + "git_commit": { + "description": "is the identifier for the Git commit associated with the fix." + }, + "references": { + "description": "contains URLs or identifiers for additional resources on the fix." + }, + "timestamp": { + "description": "is the date and time when the fix was committed." + } + }, + "properties": { + "git_commit": { + "type": "string" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "references": { + "items": { + "$ref": "#/$defs/Reference" + }, + "type": "array" + } + }, + "type": "object" + }, + "KnownExploited": { + "properties": { + "cve": { + "type": "string" + }, + "vendor_project": { + "type": "string" + }, + "product": { + "type": "string" + }, + "date_added": { + "type": "string" + }, + "required_action": { + "type": "string" + }, + "due_date": { + "type": "string" + }, + "known_ransomware_campaign_use": { + "type": "string" + }, + "notes": { + "type": "string" + }, + "urls": { + "items": { + "type": "string" + }, + "type": "array" + }, + "cwes": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "cve", + "known_ransomware_campaign_use" + ] + }, + "Match": { + "$defs": { + "packages": { + "description": "is the list of packages affected by the vulnerability." + }, + "vulnerability": { + "description": "is the core advisory record for a single known vulnerability from a specific provider." + } + }, + "properties": { + "vulnerability": { + "$ref": "#/$defs/VulnerabilityInfo" + }, + "packages": { + "items": { + "$ref": "#/$defs/AffectedPackageInfo" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "vulnerability", + "packages" + ] + }, + "Matches": { + "items": { + "$ref": "#/$defs/Match" + }, + "type": "array" + }, + "OperatingSystem": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Package": { + "properties": { + "name": { + "type": "string" + }, + "ecosystem": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "ecosystem" + ] + }, + "Reference": { + "$defs": { + "tags": { + "description": "is a free-form organizational field to convey additional information about the reference" + }, + "url": { + "description": "is the external resource" + } + }, + "properties": { + "url": { + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "url" + ] + }, + "Severity": { + "$defs": { + "rank": { + "description": "is a free-form organizational field to convey priority over other severities" + }, + "scheme": { + "description": "describes the quantitative method used to determine the Score, such as 'CVSS_V3'. Alternatively this makes\nclaim that Value is qualitative, for example 'HML' (High, Medium, Low), CHMLN (critical-high-medium-low-negligible)" + }, + "source": { + "description": "is the name of the source of the severity score (e.g. 'nvd@nist.gov' or 'security-advisories@github.com')" + }, + "value": { + "description": "is the severity score (e.g. '7.5', 'CVSS:4.0/AV:N/AC:L/AT:N/PR:H/UI:N/VC:L/VI:L/VA:N/SC:N/SI:N/SA:N', or 'high' )" + } + }, + "properties": { + "scheme": { + "type": "string" + }, + "value": true, + "source": { + "type": "string" + }, + "rank": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "scheme", + "value", + "rank" + ] + }, + "VulnerabilityInfo": { + "$defs": { + "epss": { + "description": "is a list of Exploit Prediction Scoring System (EPSS) scores for the vulnerability" + }, + "known_exploited": { + "description": "is a list of known exploited vulnerabilities from the CISA KEV dataset" + }, + "modified_date": { + "description": "is the date the vulnerability record was last modified" + }, + "provider": { + "description": "is the upstream data processor (usually Vunnel) that is responsible for vulnerability records. Each provider\nshould be scoped to a specific vulnerability dataset, for instance, the 'ubuntu' provider for all records from\nCanonicals' Ubuntu Security Notices (for all Ubuntu distro versions)." + }, + "published_date": { + "description": "is the date the vulnerability record was first published" + }, + "severity": { + "description": "is the single string representation of the vulnerability's severity based on the set of available severity values" + }, + "status": { + "description": "conveys the actionability of the current record (one of 'active', 'analyzing', 'rejected', 'disputed')" + }, + "withdrawn_date": { + "description": "is the date the vulnerability record was withdrawn" + } + }, + "properties": { + "id": { + "type": "string" + }, + "assigner": { + "items": { + "type": "string" + }, + "type": "array" + }, + "description": { + "type": "string" + }, + "refs": { + "items": { + "$ref": "#/$defs/Reference" + }, + "type": "array" + }, + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "severities": { + "items": { + "$ref": "#/$defs/Severity" + }, + "type": "array" + }, + "severity": { + "type": "string" + }, + "provider": { + "type": "string" + }, + "status": { + "type": "string" + }, + "published_date": { + "type": "string", + "format": "date-time" + }, + "modified_date": { + "type": "string", + "format": "date-time" + }, + "withdrawn_date": { + "type": "string", + "format": "date-time" + }, + "known_exploited": { + "items": { + "$ref": "#/$defs/KnownExploited" + }, + "type": "array" + }, + "epss": { + "items": { + "$ref": "#/$defs/EPSS" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "id", + "provider", + "status" + ] + } + } +} diff --git a/schema/grype/db-search/json/schema-latest.json b/schema/grype/db-search/json/schema-latest.json index d1ddb779dd3..ac9ac84a1e4 100644 --- a/schema/grype/db-search/json/schema-latest.json +++ b/schema/grype/db-search/json/schema-latest.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "anchore.io/schema/grype/db-search/json/1.0.1/matches", + "$id": "anchore.io/schema/grype/db-search/json/1.0.3/matches", "$ref": "#/$defs/Matches", "$defs": { "AffectedPackageBlob": { @@ -42,6 +42,9 @@ "detail": { "description": "is the detailed information about the affected package" }, + "namespace": { + "description": "is a holdover value from the v5 DB schema that combines provider and search methods into a single value\nDeprecated: this field will be removed in a later version of the search schema" + }, "os": { "description": "identifies the operating system release that the affected package is released for" }, @@ -59,12 +62,16 @@ "cpe": { "$ref": "#/$defs/CPE" }, + "namespace": { + "type": "string" + }, "detail": { "$ref": "#/$defs/AffectedPackageBlob" } }, "type": "object", "required": [ + "namespace", "detail" ] }, @@ -442,6 +449,9 @@ "published_date": { "description": "is the date the vulnerability record was first published" }, + "severity": { + "description": "is the single string representation of the vulnerability's severity based on the set of available severity values" + }, "status": { "description": "conveys the actionability of the current record (one of 'active', 'analyzing', 'rejected', 'disputed')" }, @@ -480,6 +490,9 @@ }, "type": "array" }, + "severity": { + "type": "string" + }, "provider": { "type": "string" }, diff --git a/templates/html.tmpl b/templates/html.tmpl index 5fbab17f1f6..a550829375d 100644 --- a/templates/html.tmpl +++ b/templates/html.tmpl @@ -1,595 +1,1545 @@ - - - - - - Vulnerability Report - - - - - - - - - - - - - - - - - - - - - - - - - - - -{{/* Initialize counters */}} -{{- $CountCritical := 0 }} -{{- $CountHigh := 0 }} -{{- $CountMedium := 0 }} -{{- $CountLow := 0}} -{{- $CountUnknown := 0 }} - -{{/* Create a list */}} -{{- $FilteredMatches := list }} - -{{/* Loop through all vulns limit output and set count*/}} -{{- range $vuln := .Matches }} - {{/* Use this filter to exclude severity if needed */}} - {{- if or (eq $vuln.Vulnerability.Severity "Critical") (eq $vuln.Vulnerability.Severity "High") (eq $vuln.Vulnerability.Severity "Medium") (eq $vuln.Vulnerability.Severity "Low") (eq $vuln.Vulnerability.Severity "Unknown") }} - {{- $FilteredMatches = append $FilteredMatches $vuln }} - {{- if eq $vuln.Vulnerability.Severity "Critical" }} - {{- $CountCritical = add $CountCritical 1 }} - {{- else if eq $vuln.Vulnerability.Severity "High" }} - {{- $CountHigh = add $CountHigh 1 }} - {{- else if eq $vuln.Vulnerability.Severity "Medium" }} - {{- $CountMedium = add $CountMedium 1 }} - {{- else if eq $vuln.Vulnerability.Severity "Low" }} - {{- $CountLow = add $CountLow 1 }} - {{- else }} - {{- $CountUnknown = add $CountUnknown 1 }} - {{- end }} - {{- end }} -{{- end }} - - -
-
-
-

Container Vulnerability Report

-

Name: {{- if eq (.Source.Type) "image" -}} {{.Source.Target.UserInput}} - {{- else if eq (.Source.Type) "directory" -}} {{.Source.Target}} - {{- else if eq (.Source.Type) "file" -}} {{.Source.Target}} - {{- else -}} unknown - {{- end -}}

-

Type: {{ .Source.Type }}

-

Date: {{.Descriptor.Timestamp}}

-
-
- Grype Logo -
-
-
-
-
Critical
-
{{ $CountCritical }}
-
-
-
High
-
{{ $CountHigh }}
-
-
-
Medium
-
{{ $CountMedium }}
-
-
-
Low
-
{{ $CountLow }}
-
-
-
Unknown
-
{{ $CountUnknown }}
-
-
-
- - - - - - - - - - - - - - - {{- range $FilteredMatches }} - - - - - - - - - - - {{end}} - -
NameVersionTypeVulnerabilitySeverityDescriptionStateFixed In
{{.Artifact.Name}}{{.Artifact.Version}}{{.Artifact.Type}} - {{.Vulnerability.ID}} - {{.Vulnerability.Severity}}{{html .Vulnerability.Description}}{{.Vulnerability.Fix.State}} - {{- if .Vulnerability.Fix.Versions }} -
    - {{- range .Vulnerability.Fix.Versions }} -
  • {{ . }}
  • - {{- end }} -
- {{- else }} - N/A - {{- end }} -
-
-
- - - - - - - + + + + + + Vulnerability Report + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +{{/* Initialize counters */}} +{{- $CountCritical := 0 }} +{{- $CountHigh := 0 }} +{{- $CountMedium := 0 }} +{{- $CountLow := 0}} +{{- $CountUnknown := 0 }} + +{{/* Create a list */}} +{{- $FilteredMatches := list }} + +{{/* Loop through all vulns limit output and set count*/}} +{{- range $vuln := .Matches }} + {{/* Use this filter to exclude severity if needed */}} + {{- if or (eq $vuln.Vulnerability.Severity "Critical") (eq $vuln.Vulnerability.Severity "High") (eq $vuln.Vulnerability.Severity "Medium") (eq $vuln.Vulnerability.Severity "Low") (eq $vuln.Vulnerability.Severity "Unknown") }} + {{- $FilteredMatches = append $FilteredMatches $vuln }} + {{- if eq $vuln.Vulnerability.Severity "Critical" }} + {{- $CountCritical = add $CountCritical 1 }} + {{- else if eq $vuln.Vulnerability.Severity "High" }} + {{- $CountHigh = add $CountHigh 1 }} + {{- else if eq $vuln.Vulnerability.Severity "Medium" }} + {{- $CountMedium = add $CountMedium 1 }} + {{- else if eq $vuln.Vulnerability.Severity "Low" }} + {{- $CountLow = add $CountLow 1 }} + {{- else }} + {{- $CountUnknown = add $CountUnknown 1 }} + {{- end }} + {{- end }} +{{- end }} + + +
+
+
+

Vulnerability Report

+
+
Name:
+
{{- if eq (.Source.Type) "image" -}} {{.Source.Target.UserInput}} + {{- else if eq (.Source.Type) "directory" -}} {{.Source.Target}} + {{- else if eq (.Source.Type) "file" -}} {{.Source.Target}} + {{- else -}} unknown + {{- end -}}
+ +
Type:
+
{{ .Source.Type }}
+ + {{- /* Conditionally add ImageID (Checksum) for images */ -}} + {{- if eq .Source.Type "image" -}} + {{- with .Source.Target.ID -}} +
Checksum:
+
{{ . }}
+ {{- end -}} + {{- end -}} + +
Date:
+
+ {{.Descriptor.Timestamp}} + +
+
+
+
+ Grype Logo +
+
+
+
+
Critical
+
{{ $CountCritical }}
+
+
+
High
+
{{ $CountHigh }}
+
+
+
Medium
+
{{ $CountMedium }}
+
+
+
Low
+
{{ $CountLow }}
+
+
+
Unknown
+
{{ $CountUnknown }}
+
+
+
+ + + + + + + + + + + + + + + + + {{- range $FilteredMatches }} + + + + + + + + + + + + + {{- end }} + +
NameVersionTypeVulnerabilitySeverityStateFixed InDescriptionRelated URLsPURL
{{.Artifact.Name}}{{.Artifact.Version}}{{.Artifact.Type}} + {{.Vulnerability.ID}} + {{.Vulnerability.Severity}}{{.Vulnerability.Fix.State}} + {{- if .Vulnerability.Fix.Versions }} +
    + {{- range .Vulnerability.Fix.Versions }} +
  • {{ . }}
  • + {{- end }} +
+ {{- else }} + N/A + {{- end }} +
{{html .Vulnerability.Description}}{{ toJson .Vulnerability.URLs }}{{ .Artifact.PURL }}
+
+
+ + + + + \ No newline at end of file diff --git a/test/integration/compare_sbom_input_vs_lib_test.go b/test/integration/compare_sbom_input_vs_lib_test.go index 80e0db48ed6..eafc94cdbc2 100644 --- a/test/integration/compare_sbom_input_vs_lib_test.go +++ b/test/integration/compare_sbom_input_vs_lib_test.go @@ -54,6 +54,7 @@ func TestCompareSBOMInputToLibResults(t *testing.T) { string(syftPkg.BinaryPkg), // these are removed due to overlap-by-file-ownership string(syftPkg.BitnamiPkg), string(syftPkg.PhpPeclPkg), + string(syftPkg.PhpPearPkg), string(syftPkg.RustPkg), string(syftPkg.KbPkg), string(syftPkg.DartPubPkg), @@ -62,6 +63,7 @@ func TestCompareSBOMInputToLibResults(t *testing.T) { string(syftPkg.ConanPkg), string(syftPkg.HexPkg), string(syftPkg.PortagePkg), + string(syftPkg.HomebrewPkg), string(syftPkg.CocoapodsPkg), string(syftPkg.HackagePkg), string(syftPkg.NixPkg), diff --git a/test/integration/match_by_image_test.go b/test/integration/match_by_image_test.go index 6b43eb9cf45..90f5cb9cdd8 100644 --- a/test/integration/match_by_image_test.go +++ b/test/integration/match_by_image_test.go @@ -11,6 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/grype/grype" + "github.com/anchore/grype/grype/distro" "github.com/anchore/grype/grype/match" "github.com/anchore/grype/grype/matcher" "github.com/anchore/grype/grype/matcher/dotnet" @@ -30,7 +31,6 @@ import ( "github.com/anchore/syft/syft" "github.com/anchore/syft/syft/cataloging/pkgcataloging" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/linux" syftPkg "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -56,42 +56,42 @@ func addAlpineMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Co { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "alpine", - "version": "3.12.0", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "alpine", + Version: "3.12.0", }, - "namespace": "alpine:distro:alpine:3.12", - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Namespace: "alpine:distro:alpine:3.12", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, }, - Found: map[string]any{ - "versionConstraint": "< 0.9.10 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.DistroResult{ + VersionConstraint: "< 0.9.10 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.ApkMatcher, }, { // note: the input pURL has an upstream reference (redundant) - Type: "exact-indirect-match", - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "alpine", - "version": "3.12.0", + Type: match.ExactIndirectMatch, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "alpine", + Version: "3.12.0", }, - "namespace": "alpine:distro:alpine:3.12", - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Namespace: "alpine:distro:alpine:3.12", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, }, - Found: map[string]any{ - "versionConstraint": "< 0.9.10 (unknown)", - "vulnerabilityID": "CVE-alpine-libvncserver", + Found: match.DistroResult{ + VersionConstraint: "< 0.9.10 (unknown)", + VulnerabilityID: "CVE-alpine-libvncserver", }, - Matcher: "apk-matcher", + Matcher: match.ApkMatcher, Confidence: 1, }, }, @@ -117,17 +117,17 @@ func addJavascriptMatches(t *testing.T, theSource source.Source, catalog *syftPk { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "javascript", - "namespace": "github:language:javascript", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "javascript", + Namespace: "github:language:javascript", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": "> 5, < 7.2.1 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: "> 5, < 7.2.1 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.JavascriptMatcher, }, @@ -157,17 +157,17 @@ func addPythonMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Co { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "python", - "namespace": "github:language:python", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "python", + Namespace: "github:language:python", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": "< 2.6.2 (python)", - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: "< 2.6.2 (python)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.PythonMatcher, }, @@ -177,7 +177,21 @@ func addPythonMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Co func addDotnetMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Collection, provider vulnerability.Provider, theResult *match.Matches) { packages := catalog.PackagesByPath("/dotnet/TestLibrary.deps.json") - if len(packages) != 2 { // TestLibrary + AWSSDK.Core + // 55caef8df7ac822e Pkg(name="TestLibrary" version="1.0.0" type="dotnet" id="55caef8df7ac822e") + // 0012329cdebba0ea Pkg(name="AWSSDK.Core" version="3.7.10.6" type="dotnet" id="0012329cdebba0ea") + // 07ec6fb2adb2cf8f Pkg(name="Microsoft.Extensions.DependencyInjection.Abstractions" version="6.0.0" type="dotnet" id="07ec6fb2adb2cf8f") + // ff03e77b91acca32 Pkg(name="Microsoft.Extensions.DependencyInjection" version="6.0.0" type="dotnet" id="ff03e77b91acca32") + // a1ea42c8f064083e Pkg(name="Microsoft.Extensions.Logging.Abstractions" version="6.0.0" type="dotnet" id="a1ea42c8f064083e") + // aaef85a2649e5d15 Pkg(name="Microsoft.Extensions.Logging" version="6.0.0" type="dotnet" id="aaef85a2649e5d15") + // 4af0fb6a81ba0423 Pkg(name="Microsoft.Extensions.Options" version="6.0.0" type="dotnet" id="4af0fb6a81ba0423") + // cb41a8aefdf40c3a Pkg(name="Microsoft.Extensions.Primitives" version="6.0.0" type="dotnet" id="cb41a8aefdf40c3a") + // 5ee80fba9caa3ab3 Pkg(name="Newtonsoft.Json" version="13.0.1" type="dotnet" id="5ee80fba9caa3ab3") + // df4b5dc73acd1f36 Pkg(name="Serilog.Sinks.Console" version="4.0.1" type="dotnet" id="df4b5dc73acd1f36") + // 023b9ba74c5c5ef5 Pkg(name="Serilog" version="2.10.0" type="dotnet" id="023b9ba74c5c5ef5") + // 430e4d4304a3ff55 Pkg(name="System.Diagnostics.DiagnosticSource" version="6.0.0" type="dotnet" id="430e4d4304a3ff55") + // 42021023d8f87661 Pkg(name="System.Runtime.CompilerServices.Unsafe" version="6.0.0" type="dotnet" id="42021023d8f87661") + // 2bb01d8c22df1e95 Pkg(name="TestCommon" version="1.0.0" type="dotnet" id="2bb01d8c22df1e95") + if len(packages) != 14 { for _, p := range packages { t.Logf("Dotnet Package: %s %+v", p.ID(), p) } @@ -197,17 +211,17 @@ func addDotnetMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Co { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "dotnet", - "namespace": "github:language:dotnet", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "dotnet", + Namespace: "github:language:dotnet", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": ">= 3.7.0.0, < 3.7.12.0 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: ">= 3.7.0.0, < 3.7.12.0 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.DotnetMatcher, }, @@ -234,17 +248,17 @@ func addRubyMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Coll { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "ruby", - "namespace": "github:language:ruby", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "ruby", + Namespace: "github:language:ruby", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": "> 2.0.0, <= 2.1.4 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: "> 2.0.0, <= 2.1.4 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.RubyGemMatcher, }, @@ -293,17 +307,17 @@ func addGolangMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Co { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "go", - "namespace": "github:language:go", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "go", + Namespace: "github:language:go", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": "< 1.4.0 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: "< 1.4.0 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.GoModuleMatcher, }, @@ -340,17 +354,17 @@ func addJavaMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Coll { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "java", - "namespace": "github:language:java", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "java", + Namespace: "github:language:java", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": ">= 0.0.1, < 1.2.0 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: ">= 0.0.1, < 1.2.0 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.JavaMatcher, }, @@ -378,20 +392,20 @@ func addDpkgMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Coll { Type: match.ExactIndirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "debian", - "version": "8", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "debian", + Version: "8", }, - "namespace": "debian:distro:debian:8", - "package": map[string]string{ - "name": "apt-dev", - "version": "1.8.2", + Namespace: "debian:distro:debian:8", + Package: match.PackageParameter{ + Name: "apt-dev", + Version: "1.8.2", }, }, - Found: map[string]any{ - "versionConstraint": "<= 1.8.2 (deb)", - "vulnerabilityID": vulnObj.ID, + Found: match.DistroResult{ + VersionConstraint: "<= 1.8.2 (deb)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.DpkgMatcher, }, @@ -418,20 +432,20 @@ func addPortageMatches(t *testing.T, theSource source.Source, catalog *syftPkg.C { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "gentoo", - "version": "2.8", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "gentoo", + Version: "2.8", }, - "namespace": "gentoo:distro:gentoo:2.8", - "package": map[string]string{ - "name": "app-containers/skopeo", - "version": "1.5.1", + Namespace: "gentoo:distro:gentoo:2.8", + Package: match.PackageParameter{ + Name: "app-containers/skopeo", + Version: "1.5.1", }, }, - Found: map[string]any{ - "versionConstraint": "< 1.6.0 (unknown)", - "vulnerabilityID": vulnObj.ID, + Found: match.DistroResult{ + VersionConstraint: "< 1.6.0 (unknown)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.PortageMatcher, }, @@ -458,20 +472,20 @@ func addRhelMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Coll { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "centos", - "version": "8", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "centos", + Version: "8", }, - "namespace": "redhat:distro:redhat:8", - "package": map[string]string{ - "name": "dive", - "version": "0:0.9.2-1", + Namespace: "redhat:distro:redhat:8", + Package: match.PackageParameter{ + Name: "dive", + Version: "0:0.9.2-1", }, }, - Found: map[string]any{ - "versionConstraint": "<= 1.0.42 (rpm)", - "vulnerabilityID": vulnObj.ID, + Found: match.DistroResult{ + VersionConstraint: "<= 1.0.42 (rpm)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.RpmMatcher, }, @@ -500,20 +514,20 @@ func addSlesMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Coll { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "sles", - "version": "12.5", + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "sles", + Version: "12.5", }, - "namespace": "sles:distro:sles:12.5", - "package": map[string]string{ - "name": "dive", - "version": "0:0.9.2-1", + Namespace: "sles:distro:sles:12.5", + Package: match.PackageParameter{ + Name: "dive", + Version: "0:0.9.2-1", }, }, - Found: map[string]any{ - "versionConstraint": "<= 1.0.42 (rpm)", - "vulnerabilityID": vulnObj.ID, + Found: match.DistroResult{ + VersionConstraint: "<= 1.0.42 (rpm)", + VulnerabilityID: vulnObj.ID, }, Matcher: match.RpmMatcher, }, @@ -540,17 +554,17 @@ func addHaskellMatches(t *testing.T, theSource source.Source, catalog *syftPkg.C { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "haskell", - "namespace": "github:language:haskell", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "haskell", + Namespace: "github:language:haskell", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": "< 0.9.0 (unknown)", - "vulnerabilityID": "CVE-haskell-sample", + Found: match.EcosystemResult{ + VersionConstraint: "< 0.9.0 (unknown)", + VulnerabilityID: "CVE-haskell-sample", }, Matcher: match.StockMatcher, }, @@ -589,7 +603,7 @@ func addJvmMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Colle CPEs: []string{ "cpe:2.3:a:oracle:jdk:1.8.0:update400:*:*:*:*:*:*", }, - Package: match.CPEPackageParameter{Name: "jdk", Version: "1.8.0_400-b07"}, + Package: match.PackageParameter{Name: "jdk", Version: "1.8.0_400-b07"}, }, Found: match.CPEResult{ VulnerabilityID: "CVE-jdk", @@ -626,17 +640,17 @@ func addRustMatches(t *testing.T, theSource source.Source, catalog *syftPkg.Coll { Type: match.ExactDirectMatch, Confidence: 1.0, - SearchedBy: map[string]any{ - "language": "rust", - "namespace": "github:language:rust", - "package": map[string]string{ - "name": thePkg.Name, - "version": thePkg.Version, + SearchedBy: match.EcosystemParameters{ + Language: "rust", + Namespace: "github:language:rust", + Package: match.PackageParameter{ + Name: thePkg.Name, + Version: thePkg.Version, }, }, - Found: map[string]any{ - "versionConstraint": vulnObj.Constraint.String(), - "vulnerabilityID": vulnObj.ID, + Found: match.EcosystemResult{ + VersionConstraint: vulnObj.Constraint.String(), + VulnerabilityID: vulnObj.ID, }, Matcher: match.RustMatcher, }, @@ -773,7 +787,7 @@ func TestMatchByImage(t *testing.T) { }, }) - actualResults := grype.FindVulnerabilitiesForPackage(theProvider, s.Artifacts.LinuxDistribution, matchers, pkg.FromCollection(s.Artifacts.Packages, pkg.SynthesisConfig{})) + actualResults := grype.FindVulnerabilitiesForPackage(theProvider, distro.FromRelease(s.Artifacts.LinuxDistribution), matchers, pkg.FromCollection(s.Artifacts.Packages, pkg.SynthesisConfig{})) for _, m := range actualResults.Sorted() { for _, d := range m.Details { observedMatchers.Add(string(d.Matcher)) @@ -841,6 +855,7 @@ func TestMatchByImage(t *testing.T) { definedMatchers.Remove(string(match.StockMatcher)) definedMatchers.Remove(string(match.MsrcMatcher)) definedMatchers.Remove(string(match.PortageMatcher)) // TODO: add this back in when #744 is complete + definedMatchers.Remove(string(match.BitnamiMatcher)) // bitnami will be tested via quality gate if len(observedMatchers) != len(definedMatchers) { t.Errorf("matcher coverage incomplete (matchers=%d, coverage=%d)", len(definedMatchers), len(observedMatchers)) @@ -886,23 +901,23 @@ func testIgnoredMatches() []match.IgnoredMatch { }, Details: []match.Detail{ { - Type: "exact-indirect-match", - SearchedBy: map[string]any{ - "distro": map[string]string{ - "type": "alpine", - "version": "3.12.0", + Type: match.ExactIndirectMatch, + SearchedBy: match.DistroParameters{ + Distro: match.DistroIdentification{ + Type: "alpine", + Version: "3.12.0", }, - "namespace": "alpine:distro:alpine:3.12", - "package": map[string]string{ - "name": "libvncserver", - "version": "0.9.9", + Namespace: "alpine:distro:alpine:3.12", + Package: match.PackageParameter{ + Name: "libvncserver", + Version: "0.9.9", }, }, - Found: map[string]any{ - "versionConstraint": "< 0.9.10 (unknown)", - "vulnerabilityID": "CVE-alpine-libvncserver", + Found: match.DistroResult{ + VersionConstraint: "< 0.9.10 (unknown)", + VulnerabilityID: "CVE-alpine-libvncserver", }, - Matcher: "apk-matcher", + Matcher: match.ApkMatcher, Confidence: 1, }, }, @@ -931,7 +946,6 @@ func vexMatches(t *testing.T, ignoredMatches []match.IgnoredMatch, vexStatus vex }, }, }, - Distro: &linux.Release{}, } vexedMatches, ignoredMatches, err := vexMatcher.ApplyVEX(pctx, &matches, ignoredMatches) diff --git a/test/integration/match_by_sbom_document_test.go b/test/integration/match_by_sbom_document_test.go index c4e766d17f1..48977170be8 100644 --- a/test/integration/match_by_sbom_document_test.go +++ b/test/integration/match_by_sbom_document_test.go @@ -23,33 +23,6 @@ func TestMatchBySBOMDocument(t *testing.T) { expectedIDs []string expectedDetails []match.Detail }{ - { - name: "single KB package", - fixture: "test-fixtures/sbom/syft-sbom-with-kb-packages.json", - expectedIDs: []string{"CVE-2016-3333"}, - expectedDetails: []match.Detail{ - { - Type: match.ExactDirectMatch, - SearchedBy: map[string]interface{}{ - "distro": map[string]string{ - "type": "windows", - "version": "10816", - }, - "namespace": "msrc:distro:windows:10816", - "package": map[string]string{ - "name": "10816", - "version": "3200970", - }, - }, - Found: map[string]interface{}{ - "versionConstraint": "3200970 || 878787 || base (kb)", - "vulnerabilityID": "CVE-2016-3333", - }, - Matcher: match.MsrcMatcher, - Confidence: 1, - }, - }, - }, { name: "unknown package type", fixture: "test-fixtures/sbom/syft-sbom-with-unknown-packages.json", @@ -57,14 +30,14 @@ func TestMatchBySBOMDocument(t *testing.T) { expectedDetails: []match.Detail{ { Type: match.ExactDirectMatch, - SearchedBy: map[string]interface{}{ - "language": "idris", - "namespace": "github:language:idris", - "package": map[string]string{"name": "my-package", "version": "1.0.5"}, + SearchedBy: match.EcosystemParameters{ + Language: "idris", + Namespace: "github:language:idris", + Package: match.PackageParameter{Name: "my-package", Version: "1.0.5"}, }, - Found: map[string]interface{}{ - "versionConstraint": "< 2.0 (unknown)", - "vulnerabilityID": "CVE-bogus-my-package-2-idris", + Found: match.EcosystemResult{ + VersionConstraint: "< 2.0 (unknown)", + VulnerabilityID: "CVE-bogus-my-package-2-idris", }, Matcher: match.StockMatcher, Confidence: 1, diff --git a/test/quality/.yardstick.yaml b/test/quality/.yardstick.yaml index 3f7c8c916d3..b32fbdeb4cf 100644 --- a/test/quality/.yardstick.yaml +++ b/test/quality/.yardstick.yaml @@ -170,3 +170,45 @@ result-sets: version: latest+import-db=db.tar.zst takes: SBOM label: reference # this run is the current baseline + + pr_vs_latest_via_sbom_2024: + description: "same as 'pr_vs_latest_via_sbom', but includes vulnerabilities from 2024 and before, instead of 2021 and before" + max_year: 2024 + validations: + - max-f1-regression: 0.1 # allowed to regress 0.1 on f1 score + max-new-false-negatives: 10 + max-unlabeled-percent: 0 + max_year: 2024 + fail_on_empty_match_set: false + matrix: + images: + - docker.io/bitnami/redis:7.4.0@sha256:4bad45268adfdbb0b456d6bf74ded449ef79f3706cb4e473516a0a5b393968c0 + + tools: + - name: syft + # note: we want to use a fixed version of syft for capturing all results (NOT "latest") + version: v1.14.0 + produces: SBOM + refresh: false + + - name: grype + # note: we import a static (pinned) DB as to prevent changes in the DB from affecting the results. The + # point of this test is to ensure the correctness of the logic in grype itself with real production data. + # By pinning the DB the grype code itself becomes the independent variable under test (and not the + # every-changing DB). That being said, we should be updating this DB periodically to ensure what we + # are testing with is not too stale. + # version: git:current-commit+import-db=db.tar.zst + # for local build of grype, use for example: + version: path:../../+import-db=db.tar.zst + takes: SBOM + label: candidate # is candidate better than the current baseline? + + - name: grype + # note: we import a static (pinned) DB as to prevent changes in the DB from affecting the results. The + # point of this test is to ensure the correctness of the logic in grype itself with real production data. + # By pinning the DB the grype code itself becomes the independent variable under test (and not the + # every-changing DB). That being said, we should be updating this DB periodically to ensure what we + # are testing with is not too stale. + version: latest+import-db=db.tar.zst + takes: SBOM + label: reference # this run is the current baseline diff --git a/test/quality/Makefile b/test/quality/Makefile index 3ba091a1175..f7b33a4f800 100644 --- a/test/quality/Makefile +++ b/test/quality/Makefile @@ -27,7 +27,7 @@ all: capture validate ## Fetch or capture all data and run all quality checks .PHONY: validate validate: venv $(VULNERABILITY_LABELS)/Makefile ## Run all quality checks against already collected data - $(YARDSTICK) validate -r $(RESULT_SET) -r $(RESULT_SET)_2022 + $(YARDSTICK) validate -r $(RESULT_SET) -r $(RESULT_SET)_2022 -r $(RESULT_SET)_2024 .PHONY: capture capture: sboms vulns ## Collect and store all syft and grype results @@ -36,18 +36,19 @@ capture: sboms vulns ## Collect and store all syft and grype results vulns: venv $(TEST_DB) ## Collect and store all grype results $(YARDSTICK) -v result capture -r $(RESULT_SET) $(YARDSTICK) -v result capture -r $(RESULT_SET)_2022 + $(YARDSTICK) -v result capture -r $(RESULT_SET)_2024 $(TEST_DB): @curl -o $(TEST_DB) -SsL $(TEST_DB_URL) .PHONY: sboms sboms: $(YARDSTICK_RESULT_DIR) venv clear-results ## Collect and store all syft results (deletes all existing results) - bash -c "make download-sboms || ($(YARDSTICK) -v result capture -r $(RESULT_SET) --only-producers && $(YARDSTICK) -v result capture -r $(RESULT_SET)_2022 --only-producers)" + bash -c "make download-sboms || ($(YARDSTICK) -v result capture -r $(RESULT_SET) --only-producers && $(YARDSTICK) -v result capture -r $(RESULT_SET)_2022 -r $(RESULT_SET)_2024 --only-producers)" .PHONY: download-sboms download-sboms: $(VULNERABILITY_LABELS)/Makefile cd vulnerability-match-labels && make venv - bash -c "export ORAS_CACHE=$(shell pwd)/.oras-cache && make venv && . vulnerability-match-labels/venv/bin/activate && ./vulnerability-match-labels/sboms.py download -r $(RESULT_SET) && ./vulnerability-match-labels/sboms.py download -r $(RESULT_SET)_2022" + bash -c "export ORAS_CACHE=$(shell pwd)/.oras-cache && make venv && . vulnerability-match-labels/venv/bin/activate && ./vulnerability-match-labels/sboms.py download -r $(RESULT_SET) && ./vulnerability-match-labels/sboms.py download -r $(RESULT_SET)_2022 && ./vulnerability-match-labels/sboms.py download -r $(RESULT_SET)_2024" venv: venv/touchfile diff --git a/test/quality/test-db b/test/quality/test-db index 3073ef8e0e6..2e752994b26 100644 --- a/test/quality/test-db +++ b/test/quality/test-db @@ -1 +1 @@ -vulnerability-db_v6.0.2_2025-02-28T01:30:50Z_1740715588.tar.zst +vulnerability-db_v6.0.2_2025-07-02T01:31:00Z_1751429758.tar.zst