diff --git a/cmd/osv-scanner/main.go b/cmd/osv-scanner/main.go index bdf52718b1..a924c419b6 100644 --- a/cmd/osv-scanner/main.go +++ b/cmd/osv-scanner/main.go @@ -108,6 +108,19 @@ func run(args []string, stdout, stderr io.Writer) int { Usage: "also scan files that would be ignored by .gitignore", Value: false, }, + &cli.BoolFlag{ + Name: "experimental-local-db", + Usage: "checks for vulnerabilities using local databases", + }, + &cli.BoolFlag{ + Name: "experimental-offline", + Usage: "checks for vulnerabilities using local databases that are already cached", + }, + &cli.StringFlag{ + Name: "experimental-local-db-path", + Usage: "sets the path that local databases should be stored", + Hidden: true, + }, }, ArgsUsage: "[directory1 directory2...]", Action: func(context *cli.Context) error { @@ -149,7 +162,10 @@ func run(args []string, stdout, stderr io.Writer) int { ConfigOverridePath: context.String("config"), DirectoryPaths: context.Args().Slice(), ExperimentalScannerActions: osvscanner.ExperimentalScannerActions{ - CallAnalysis: context.Bool("experimental-call-analysis"), + LocalDBPath: context.String("experimental-local-db-path"), + CallAnalysis: context.Bool("experimental-call-analysis"), + CompareLocally: context.Bool("experimental-local-db"), + CompareOffline: context.Bool("experimental-offline"), }, }, r) diff --git a/cmd/osv-scanner/main_test.go b/cmd/osv-scanner/main_test.go index 16b64d15e6..9c82b48ea4 100644 --- a/cmd/osv-scanner/main_test.go +++ b/cmd/osv-scanner/main_test.go @@ -13,6 +13,19 @@ import ( "github.com/go-git/go-git/v5" ) +func createTestDir(t *testing.T) (string, func()) { + t.Helper() + + p, err := os.MkdirTemp("", "osv-scanner-test-*") + if err != nil { + t.Fatalf("could not create test directory: %v", err) + } + + return p, func() { + _ = os.RemoveAll(p) + } +} + func dedent(t *testing.T, str string) string { t.Helper() @@ -610,6 +623,226 @@ func TestRun_LockfileWithExplicitParseAs(t *testing.T) { } } +func TestRun_LocalDatabases(t *testing.T) { + t.Parallel() + + tests := []cliTestCase{ + // one specific supported lockfile + { + name: "", + args: []string{"", "--experimental-local-db", "./fixtures/locks-many/composer.lock"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-many/composer.lock + Scanned %%/fixtures/locks-many/composer.lock file and found 1 package + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + No vulnerabilities found + `, + wantStderr: "", + }, + // one specific supported sbom with vulns + { + name: "", + args: []string{"", "--experimental-local-db", "--config=./fixtures/osv-scanner-empty-config.toml", "./fixtures/sbom-insecure/postgres-stretch.cdx.xml"}, + wantExitCode: 1, + wantStdout: ` + Scanning dir ./fixtures/sbom-insecure/postgres-stretch.cdx.xml + Scanned %%/fixtures/sbom-insecure/postgres-stretch.cdx.xml as CycloneDX SBOM and found 136 packages + Loaded Debian local db from %%/osv-scanner/Debian/all.zip + Loaded Go local db from %%/osv-scanner/Go/all.zip + Loaded OSS-Fuzz local db from %%/osv-scanner/OSS-Fuzz/all.zip + +-------------------------------------+------+-----------+--------------------------------+------------------------------------+-------------------------------------------------+ + | OSV URL | CVSS | ECOSYSTEM | PACKAGE | VERSION | SOURCE | + +-------------------------------------+------+-----------+--------------------------------+------------------------------------+-------------------------------------------------+ + | https://osv.dev/GHSA-f3fp-gc8g-vw66 | 5.9 | Go | github.com/opencontainers/runc | v1.0.1 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | + | https://osv.dev/GHSA-g2j6-57v7-gm8c | 6.1 | Go | github.com/opencontainers/runc | v1.0.1 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | + | https://osv.dev/GHSA-m8cg-xc2p-r3fc | 2.5 | Go | github.com/opencontainers/runc | v1.0.1 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | + | https://osv.dev/GHSA-v95c-p5hm-xq8f | 6 | Go | github.com/opencontainers/runc | v1.0.1 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | + | https://osv.dev/GHSA-vpvm-3wq2-2wvm | 7 | Go | github.com/opencontainers/runc | v1.0.1 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | + | https://osv.dev/GHSA-p782-xgp4-8hr8 | 5.3 | Go | golang.org/x/sys | v0.0.0-20210817142637-7d9622a276b7 | fixtures/sbom-insecure/postgres-stretch.cdx.xml | + +-------------------------------------+------+-----------+--------------------------------+------------------------------------+-------------------------------------------------+ + `, + wantStderr: "", + }, + // one specific unsupported lockfile + { + name: "", + args: []string{"", "--experimental-local-db", "./fixtures/locks-many/not-a-lockfile.toml"}, + wantExitCode: 128, + wantStdout: ` + Scanning dir ./fixtures/locks-many/not-a-lockfile.toml + `, + wantStderr: ` + No package sources found, --help for usage information. + `, + }, + // all supported lockfiles in the directory should be checked + { + name: "", + args: []string{"", "--experimental-local-db", "./fixtures/locks-many"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-many + Scanned %%/fixtures/locks-many/Gemfile.lock file and found 1 package + Scanned %%/fixtures/locks-many/alpine.cdx.xml as CycloneDX SBOM and found 15 packages + Scanned %%/fixtures/locks-many/composer.lock file and found 1 package + Scanned %%/fixtures/locks-many/package-lock.json file and found 1 package + Scanned %%/fixtures/locks-many/yarn.lock file and found 1 package + Loaded RubyGems local db from %%/osv-scanner/RubyGems/all.zip + Loaded Alpine local db from %%/osv-scanner/Alpine/all.zip + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + Loaded npm local db from %%/osv-scanner/npm/all.zip + Loaded filter from: %%/fixtures/locks-many/osv-scanner.toml + GHSA-whgm-jr23-g3j9 has been filtered out because: Test manifest file + Filtered 1 vulnerability from output + No vulnerabilities found + `, + wantStderr: "", + }, + // all supported lockfiles in the directory should be checked + { + name: "", + args: []string{"", "--experimental-local-db", "./fixtures/locks-many-with-invalid"}, + wantExitCode: 127, + wantStdout: ` + Scanning dir ./fixtures/locks-many-with-invalid + Scanned %%/fixtures/locks-many-with-invalid/Gemfile.lock file and found 1 package + Scanned %%/fixtures/locks-many-with-invalid/yarn.lock file and found 1 package + Loaded RubyGems local db from %%/osv-scanner/RubyGems/all.zip + Loaded npm local db from %%/osv-scanner/npm/all.zip + `, + wantStderr: ` + Attempted to scan lockfile but failed: %%/fixtures/locks-many-with-invalid/composer.lock + `, + }, + // only the files in the given directories are checked by default (no recursion) + { + name: "", + args: []string{"", "--experimental-local-db", "./fixtures/locks-one-with-nested"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-one-with-nested + Scanned %%/fixtures/locks-one-with-nested/yarn.lock file and found 1 package + Loaded npm local db from %%/osv-scanner/npm/all.zip + No vulnerabilities found + `, + wantStderr: "", + }, + // nested directories are checked when `--recursive` is passed + { + name: "", + args: []string{"", "--experimental-local-db", "--recursive", "./fixtures/locks-one-with-nested"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-one-with-nested + Scanned %%/fixtures/locks-one-with-nested/nested/composer.lock file and found 1 package + Scanned %%/fixtures/locks-one-with-nested/yarn.lock file and found 1 package + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + Loaded npm local db from %%/osv-scanner/npm/all.zip + No vulnerabilities found + `, + wantStderr: "", + }, + // .gitignored files + { + name: "", + args: []string{"", "--experimental-local-db", "--recursive", "./fixtures/locks-gitignore"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-gitignore + Scanned %%/fixtures/locks-gitignore/Gemfile.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/subdir/yarn.lock file and found 1 package + Loaded RubyGems local db from %%/osv-scanner/RubyGems/all.zip + Loaded npm local db from %%/osv-scanner/npm/all.zip + No vulnerabilities found + `, + wantStderr: "", + }, + // ignoring .gitignore + { + name: "", + args: []string{"", "--experimental-local-db", "--recursive", "--no-ignore", "./fixtures/locks-gitignore"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-gitignore + Scanned %%/fixtures/locks-gitignore/Gemfile.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/composer.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/ignored/Gemfile.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/ignored/yarn.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/subdir/Gemfile.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/subdir/composer.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/subdir/yarn.lock file and found 1 package + Scanned %%/fixtures/locks-gitignore/yarn.lock file and found 1 package + Loaded RubyGems local db from %%/osv-scanner/RubyGems/all.zip + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + Loaded npm local db from %%/osv-scanner/npm/all.zip + No vulnerabilities found + `, + wantStderr: "", + }, + // output with json + { + name: "", + args: []string{"", "--experimental-local-db", "--json", "./fixtures/locks-many/composer.lock"}, + wantExitCode: 0, + wantStdout: ` + { + "results": [] + } + `, + wantStderr: ` + Scanning dir ./fixtures/locks-many/composer.lock + Scanned %%/fixtures/locks-many/composer.lock file and found 1 package + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + `, + }, + { + name: "", + args: []string{"", "--experimental-local-db", "--format", "json", "./fixtures/locks-many/composer.lock"}, + wantExitCode: 0, + wantStdout: ` + { + "results": [] + } + `, + wantStderr: ` + Scanning dir ./fixtures/locks-many/composer.lock + Scanned %%/fixtures/locks-many/composer.lock file and found 1 package + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + `, + }, + // output format: markdown table + { + name: "", + args: []string{"", "--experimental-local-db", "--format", "markdown", "./fixtures/locks-many/composer.lock"}, + wantExitCode: 0, + wantStdout: ` + Scanning dir ./fixtures/locks-many/composer.lock + Scanned %%/fixtures/locks-many/composer.lock file and found 1 package + Loaded Packagist local db from %%/osv-scanner/Packagist/all.zip + No vulnerabilities found + `, + wantStderr: "", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + old := tt.args + + tt.args = []string{"", "--experimental-local-db-path", testDir} + tt.args = append(tt.args, old[1:]...) + + testCli(t, tt) + }) + } +} + func TestMain(m *testing.M) { // ensure a git repository doesn't already exist in the fixtures directory, // in case we didn't get a chance to clean-up properly in the last run diff --git a/internal/local/check.go b/internal/local/check.go new file mode 100644 index 0000000000..5408e71031 --- /dev/null +++ b/internal/local/check.go @@ -0,0 +1,143 @@ +package local + +import ( + "fmt" + "os" + "path" + + "github.com/google/osv-scanner/pkg/lockfile" + "github.com/google/osv-scanner/pkg/models" + "github.com/google/osv-scanner/pkg/osv" + "github.com/google/osv-scanner/pkg/reporter" +) + +const zippedDBRemoteHost = "https://osv-vulnerabilities.storage.googleapis.com" +const envKeyLocalDBCacheDirectory = "OSV_SCANNER_LOCAL_DB_CACHE_DIRECTORY" + +func loadDB(dbBasePath string, ecosystem lockfile.Ecosystem, offline bool) (*ZipDB, error) { + return NewZippedDB(dbBasePath, string(ecosystem), fmt.Sprintf("%s/%s/all.zip", zippedDBRemoteHost, ecosystem), offline) +} + +func toPackageDetails(query *osv.Query) (lockfile.PackageDetails, error) { + if query.Package.PURL != "" { + pkg, err := models.PURLToPackage(query.Package.PURL) + + if err != nil { + return lockfile.PackageDetails{}, err + } + + return lockfile.PackageDetails{ + Name: pkg.Name, + Version: pkg.Version, + Ecosystem: lockfile.Ecosystem(pkg.Ecosystem), + CompareAs: lockfile.Ecosystem(pkg.Ecosystem), + }, nil + } + + return lockfile.PackageDetails{ + Name: query.Package.Name, + Version: query.Version, + Commit: query.Commit, + Ecosystem: query.Package.Ecosystem, + CompareAs: query.Package.Ecosystem, + }, nil +} + +// setupLocalDBDirectory attempts to set up the directory the scanner should +// use to store local databases. +// +// if a local path is explicitly provided either by the localDBPath parameter +// or via the envKeyLocalDBCacheDirectory environment variable, the scanner will +// attempt to use the user cache directory if possible or otherwise the temp directory +// +// if an error occurs at any point when a local path is not explicitly provided, +// the scanner will fall back to the temp directory first before finally erroring +func setupLocalDBDirectory(localDBPath string) (string, error) { + var err error + + // fallback to the env variable if a local database path has not been provided + if localDBPath == "" { + if p, envSet := os.LookupEnv(envKeyLocalDBCacheDirectory); envSet { + localDBPath = p + } + } + + implicitPath := localDBPath == "" + + // if we're implicitly picking a path, use the user cache directory if available + if implicitPath { + localDBPath, err = os.UserCacheDir() + + if err != nil { + localDBPath = os.TempDir() + } + } + + err = os.Mkdir(path.Join(localDBPath, "osv-scanner"), 0750) + + if err == nil { + return path.Join(localDBPath, "osv-scanner"), nil + } + + // if we're implicitly picking a path, try the temp directory before giving up + if implicitPath && localDBPath != os.TempDir() { + return setupLocalDBDirectory(os.TempDir()) + } + + return "", err +} + +func MakeRequest(r reporter.Reporter, query osv.BatchedQuery, offline bool, localDBPath string) (*osv.HydratedBatchedResponse, error) { + results := make([]osv.Response, 0, len(query.Queries)) + dbs := make(map[lockfile.Ecosystem]*ZipDB) + + dbBasePath, err := setupLocalDBDirectory(localDBPath) + + if err != nil { + return &osv.HydratedBatchedResponse{}, fmt.Errorf("could not create %s: %w", dbBasePath, err) + } + + loadDBFromCache := func(ecosystem lockfile.Ecosystem) (*ZipDB, error) { + if db, ok := dbs[ecosystem]; ok { + return db, nil + } + + db, err := loadDB(dbBasePath, ecosystem, offline) + + if err != nil { + return nil, err + } + + r.PrintText(fmt.Sprintf("Loaded %s local db from %s\n", db.Name, db.StoredAt)) + + dbs[ecosystem] = db + + return db, nil + } + + for _, query := range query.Queries { + pkg, err := toPackageDetails(query) + + if err != nil { + // currently, this will actually only error if the PURL cannot be parses + r.PrintError(fmt.Sprintf("skipping %s as it is not a valid PURL: %v\n", query.Package.PURL, err)) + results = append(results, osv.Response{Vulns: []models.Vulnerability{}}) + + continue + } + + db, err := loadDBFromCache(pkg.Ecosystem) + + if err != nil { + // currently, this will actually only error if the PURL cannot be parses + r.PrintError(fmt.Sprintf("could not load db for %s ecosystem: %v\n", pkg.Ecosystem, err)) + results = append(results, osv.Response{Vulns: []models.Vulnerability{}}) + + continue + } + + results = append(results, osv.Response{Vulns: db.VulnerabilitiesAffectingPackage(pkg)}) + } + + return &osv.HydratedBatchedResponse{Results: results}, nil +} diff --git a/internal/local/fixtures/db/file.json b/internal/local/fixtures/db/file.json new file mode 100644 index 0000000000..0ba8200f7d --- /dev/null +++ b/internal/local/fixtures/db/file.json @@ -0,0 +1,3 @@ +{ + "id": "GHSA-1234" +} diff --git a/internal/local/fixtures/db/file.yaml b/internal/local/fixtures/db/file.yaml new file mode 100644 index 0000000000..98eb6e8d8d --- /dev/null +++ b/internal/local/fixtures/db/file.yaml @@ -0,0 +1 @@ +id: GHSA-5678 diff --git a/internal/local/fixtures/db/nested-1/osv-1.json b/internal/local/fixtures/db/nested-1/osv-1.json new file mode 100644 index 0000000000..7c531474a8 --- /dev/null +++ b/internal/local/fixtures/db/nested-1/osv-1.json @@ -0,0 +1,3 @@ +{ + "id": "OSV-1" +} diff --git a/internal/local/fixtures/db/nested-2/invalid.json b/internal/local/fixtures/db/nested-2/invalid.json new file mode 100644 index 0000000000..688de50639 --- /dev/null +++ b/internal/local/fixtures/db/nested-2/invalid.json @@ -0,0 +1,2 @@ +{ + "id": "OSV-2" diff --git a/internal/local/fixtures/db/nested-2/osv-2.json b/internal/local/fixtures/db/nested-2/osv-2.json new file mode 100644 index 0000000000..bd60877b18 --- /dev/null +++ b/internal/local/fixtures/db/nested-2/osv-2.json @@ -0,0 +1,3 @@ +{ + "id": "OSV-2" +} diff --git a/internal/local/zip.go b/internal/local/zip.go new file mode 100644 index 0000000000..6b19120284 --- /dev/null +++ b/internal/local/zip.go @@ -0,0 +1,254 @@ +package local + +import ( + "archive/zip" + "bytes" + "context" + "encoding/base64" + "encoding/binary" + "encoding/json" + "errors" + "fmt" + "hash/crc32" + "io" + "net/http" + "os" + "path" + "strings" + + "github.com/google/osv-scanner/pkg/lockfile" + "github.com/google/osv-scanner/pkg/models" + "github.com/google/osv-scanner/pkg/osv" +) + +type ZipDB struct { + // the name of the database + Name string + // the url that the zip archive was downloaded from + ArchiveURL string + // whether this database should make any network requests + Offline bool + // the path to the zip archive on disk + StoredAt string + // the vulnerabilities that are loaded into this database + vulnerabilities []models.Vulnerability +} + +var ErrOfflineDatabaseNotFound = errors.New("no offline version of the OSV database is available") + +func fetchRemoteArchiveCRC32CHash(url string) (uint32, error) { + req, err := http.NewRequestWithContext(context.Background(), http.MethodHead, url, nil) + + if err != nil { + return 0, err + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return 0, err + } + + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return 0, fmt.Errorf("db host returned %s", resp.Status) + } + + for _, value := range resp.Header.Values("x-goog-hash") { + if strings.HasPrefix(value, "crc32c=") { + value = strings.TrimPrefix(value, "crc32c=") + out, err := base64.StdEncoding.DecodeString(value) + + if err != nil { + return 0, fmt.Errorf("could not decode crc32c= checksum: %w", err) + } + + return binary.BigEndian.Uint32(out), nil + } + } + + return 0, fmt.Errorf("could not find crc32c= checksum") +} + +func fetchLocalArchiveCRC32CHash(data []byte) uint32 { + return crc32.Checksum(data, crc32.MakeTable(crc32.Castagnoli)) +} + +func (db *ZipDB) fetchZip() ([]byte, error) { + cache, err := os.ReadFile(db.StoredAt) + + if db.Offline { + if err != nil { + return nil, ErrOfflineDatabaseNotFound + } + + return cache, nil + } + + if err == nil { + remoteHash, err := fetchRemoteArchiveCRC32CHash(db.ArchiveURL) + + if err != nil { + return nil, err + } + + if fetchLocalArchiveCRC32CHash(cache) == remoteHash { + return cache, nil + } + } + + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, db.ArchiveURL, nil) + + if err != nil { + return nil, fmt.Errorf("could not retrieve OSV database archive: %w", err) + } + + if osv.RequestUserAgent != "" { + req.Header.Set("User-Agent", osv.RequestUserAgent) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, fmt.Errorf("could not retrieve OSV database archive: %w", err) + } + + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("db host returned %s", resp.Status) + } + + var body []byte + + body, err = io.ReadAll(resp.Body) + + if err != nil { + return nil, fmt.Errorf("could not read OSV database archive from response: %w", err) + } + + err = os.MkdirAll(path.Dir(db.StoredAt), 0750) + + if err == nil { + //nolint:gosec // being world readable is fine + err = os.WriteFile(db.StoredAt, body, 0644) + } + + if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "Failed to save database to %s: %v\n", db.StoredAt, err) + } + + return body, nil +} + +// Loads the given zip file into the database as an OSV. +// It is assumed that the file is JSON and in the working directory of the db +func (db *ZipDB) loadZipFile(zipFile *zip.File) { + file, err := zipFile.Open() + if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "Could not read %s: %v\n", zipFile.Name, err) + + return + } + defer file.Close() + + content, err := io.ReadAll(file) + if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "Could not read %s: %v\n", zipFile.Name, err) + + return + } + + var vulnerability models.Vulnerability + + if err := json.Unmarshal(content, &vulnerability); err != nil { + _, _ = fmt.Fprintf(os.Stderr, "%s is not a valid JSON file: %v\n", zipFile.Name, err) + + return + } + + db.vulnerabilities = append(db.vulnerabilities, vulnerability) +} + +// load fetches a zip archive of the OSV database and loads known vulnerabilities +// from it (which are assumed to be in json files following the OSV spec). +// +// Internally, the archive is cached along with the date that it was fetched +// so that a new version of the archive is only downloaded if it has been +// modified, per HTTP caching standards. +func (db *ZipDB) load() error { + db.vulnerabilities = []models.Vulnerability{} + + body, err := db.fetchZip() + + if err != nil { + return err + } + + zipReader, err := zip.NewReader(bytes.NewReader(body), int64(len(body))) + if err != nil { + return fmt.Errorf("could not read OSV database archive: %w", err) + } + + // Read all the files from the zip archive + for _, zipFile := range zipReader.File { + if !strings.HasSuffix(zipFile.Name, ".json") { + continue + } + + db.loadZipFile(zipFile) + } + + return nil +} + +func NewZippedDB(dbBasePath, name, url string, offline bool) (*ZipDB, error) { + db := &ZipDB{ + Name: name, + ArchiveURL: url, + Offline: offline, + StoredAt: path.Join(dbBasePath, name, "all.zip"), + } + if err := db.load(); err != nil { + return nil, fmt.Errorf("unable to fetch OSV database: %w", err) + } + + return db, nil +} + +func (db *ZipDB) Vulnerabilities(includeWithdrawn bool) []models.Vulnerability { + if includeWithdrawn { + return db.vulnerabilities + } + + var vulnerabilities []models.Vulnerability + + for _, vulnerability := range db.vulnerabilities { + if vulnerability.Withdrawn.IsZero() { + vulnerabilities = append(vulnerabilities, vulnerability) + } + } + + return vulnerabilities +} + +func (db *ZipDB) VulnerabilitiesAffectingPackage(pkg lockfile.PackageDetails) models.Vulnerabilities { + var vulnerabilities models.Vulnerabilities + + for _, vulnerability := range db.Vulnerabilities(false) { + if vulnerability.IsAffected(pkg) && !vulnerabilities.Includes(vulnerability) { + vulnerabilities = append(vulnerabilities, vulnerability) + } + } + + return vulnerabilities +} + +func (db *ZipDB) Check(pkgs []lockfile.PackageDetails) (models.Vulnerabilities, error) { + vulnerabilities := make(models.Vulnerabilities, 0, len(pkgs)) + + for _, pkg := range pkgs { + vulnerabilities = append(vulnerabilities, db.VulnerabilitiesAffectingPackage(pkg)...) + } + + return vulnerabilities, nil +} diff --git a/internal/local/zip_test.go b/internal/local/zip_test.go new file mode 100644 index 0000000000..e8bba960f0 --- /dev/null +++ b/internal/local/zip_test.go @@ -0,0 +1,470 @@ +package local_test + +import ( + "archive/zip" + "bytes" + "encoding/base64" + "encoding/binary" + "encoding/json" + "errors" + "hash/crc32" + "net/http" + "net/http/httptest" + "os" + "path" + "reflect" + "sort" + "testing" + + "github.com/google/osv-scanner/internal/local" + "github.com/google/osv-scanner/pkg/models" +) + +func createTestDir(t *testing.T) (string, func()) { + t.Helper() + + p, err := os.MkdirTemp("", "osv-scanner-test-*") + if err != nil { + t.Fatal("could not create test directory") + } + + return p, func() { + _ = os.RemoveAll(p) + } +} + +func expectDBToHaveOSVs( + t *testing.T, + db interface { + Vulnerabilities(includeWithdrawn bool) []models.Vulnerability + }, + actual []models.Vulnerability, +) { + t.Helper() + + vulns := db.Vulnerabilities(true) + + sort.Slice(vulns, func(i, j int) bool { + return vulns[i].ID < vulns[j].ID + }) + sort.Slice(actual, func(i, j int) bool { + return actual[i].ID < actual[j].ID + }) + + if !reflect.DeepEqual(vulns, actual) { + t.Errorf("db is missing some vulnerabilities: %v vs %v", vulns, actual) + } +} + +func cacheWrite(t *testing.T, storedAt string, cache []byte) { + t.Helper() + + err := os.MkdirAll(path.Dir(storedAt), 0750) + + if err == nil { + //nolint:gosec // being world readable is fine + err = os.WriteFile(storedAt, cache, 0644) + } + + if err != nil { + t.Errorf("unexpected error with cache: %v", err) + } +} + +func cacheWriteBad(t *testing.T, storedAt string, contents string) { + t.Helper() + + err := os.MkdirAll(path.Dir(storedAt), 0750) + + if err == nil { + //nolint:gosec // being world readable is fine + err = os.WriteFile(storedAt, []byte(contents), 0644) + } + + if err != nil { + t.Errorf("unexpected error with cache: %v", err) + } +} + +func createZipServer(t *testing.T, handler http.HandlerFunc) (*httptest.Server, func()) { + t.Helper() + + ts := httptest.NewServer(handler) + + return ts, ts.Close +} + +func computeCRC32CHash(t *testing.T, data []byte) string { + t.Helper() + + hash := crc32.Checksum(data, crc32.MakeTable(crc32.Castagnoli)) + + return base64.StdEncoding.EncodeToString(binary.BigEndian.AppendUint32([]byte{}, hash)) +} + +func writeOSVsZip(t *testing.T, w http.ResponseWriter, osvs map[string]models.Vulnerability) (int, error) { + t.Helper() + + z := zipOSVs(t, osvs) + + w.Header().Add("x-goog-hash", "crc32c="+computeCRC32CHash(t, z)) + + return w.Write(z) +} + +func zipOSVs(t *testing.T, osvs map[string]models.Vulnerability) []byte { + t.Helper() + + buf := new(bytes.Buffer) + writer := zip.NewWriter(buf) + + for fp, osv := range osvs { + data, err := json.Marshal(osv) + if err != nil { + t.Fatalf("could not marshal %v: %v", osv, err) + } + + f, err := writer.Create(fp) + if err != nil { + t.Fatal(err) + } + _, err = f.Write(data) + if err != nil { + t.Fatal(err) + } + } + + if err := writer.Close(); err != nil { + t.Fatal(err) + } + + return buf.Bytes() +} + +//nolint:unparam // name might get changed at some point +func determineStoredAtPath(dbBasePath, name string) string { + return path.Join(dbBasePath, name, "all.zip") +} + +func TestNewZippedDB_Offline_WithoutCache(t *testing.T) { + t.Parallel() + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + t.Errorf("a server request was made when running offline") + }) + defer cleanupTestServer() + + _, err := local.NewZippedDB(testDir, "my-db", ts.URL, true) + + if !errors.Is(err, local.ErrOfflineDatabaseNotFound) { + t.Errorf("expected \"%v\" error but got \"%v\"", local.ErrOfflineDatabaseNotFound, err) + } +} + +func TestNewZippedDB_Offline_WithCache(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{ + {ID: "GHSA-1"}, + {ID: "GHSA-2"}, + {ID: "GHSA-3"}, + {ID: "GHSA-4"}, + {ID: "GHSA-5"}, + } + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + t.Errorf("a server request was made when running offline") + }) + defer cleanupTestServer() + + cacheWrite(t, determineStoredAtPath(testDir, "my-db"), zipOSVs(t, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + "GHSA-4.json": {ID: "GHSA-4"}, + "GHSA-5.json": {ID: "GHSA-5"}, + })) + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, true) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} + +func TestNewZippedDB_BadZip(t *testing.T) { + t.Parallel() + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("this is not a zip")) + }) + defer cleanupTestServer() + + _, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err == nil { + t.Errorf("expected an error but did not get one") + } +} + +func TestNewZippedDB_UnsupportedProtocol(t *testing.T) { + t.Parallel() + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + _, err := local.NewZippedDB(testDir, "my-db", "file://hello-world", false) + + if err == nil { + t.Errorf("expected an error but did not get one") + } +} + +func TestNewZippedDB_Online_WithoutCache(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{ + {ID: "GHSA-1"}, + {ID: "GHSA-2"}, + {ID: "GHSA-3"}, + {ID: "GHSA-4"}, + {ID: "GHSA-5"}, + } + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = writeOSVsZip(t, w, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + "GHSA-4.json": {ID: "GHSA-4"}, + "GHSA-5.json": {ID: "GHSA-5"}, + }) + }) + defer cleanupTestServer() + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} + +func TestNewZippedDB_Online_WithoutCacheAndNoHashHeader(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{ + {ID: "GHSA-1"}, + {ID: "GHSA-2"}, + {ID: "GHSA-3"}, + {ID: "GHSA-4"}, + {ID: "GHSA-5"}, + } + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write(zipOSVs(t, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + "GHSA-4.json": {ID: "GHSA-4"}, + "GHSA-5.json": {ID: "GHSA-5"}, + })) + }) + defer cleanupTestServer() + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} + +func TestNewZippedDB_Online_WithSameCache(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{ + {ID: "GHSA-1"}, + {ID: "GHSA-2"}, + {ID: "GHSA-3"}, + } + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + cache := zipOSVs(t, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + }) + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodHead { + t.Errorf("unexpected %s request", r.Method) + } + + w.Header().Add("x-goog-hash", "crc32c="+computeCRC32CHash(t, cache)) + + _, _ = w.Write(cache) + }) + defer cleanupTestServer() + + cacheWrite(t, determineStoredAtPath(testDir, "my-db"), cache) + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} + +func TestNewZippedDB_Online_WithDifferentCache(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{ + {ID: "GHSA-1"}, + {ID: "GHSA-2"}, + {ID: "GHSA-3"}, + {ID: "GHSA-4"}, + {ID: "GHSA-5"}, + } + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = writeOSVsZip(t, w, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + "GHSA-4.json": {ID: "GHSA-4"}, + "GHSA-5.json": {ID: "GHSA-5"}, + }) + }) + defer cleanupTestServer() + + cacheWrite(t, determineStoredAtPath(testDir, "my-db"), zipOSVs(t, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + })) + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} + +func TestNewZippedDB_Online_WithCacheButNoHashHeader(t *testing.T) { + t.Parallel() + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write(zipOSVs(t, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + "GHSA-4.json": {ID: "GHSA-4"}, + "GHSA-5.json": {ID: "GHSA-5"}, + })) + }) + defer cleanupTestServer() + + cacheWrite(t, determineStoredAtPath(testDir, "my-db"), zipOSVs(t, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + })) + + _, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err == nil { + t.Errorf("expected an error but did not get one") + } +} + +func TestNewZippedDB_Online_WithBadCache(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{ + {ID: "GHSA-1"}, + {ID: "GHSA-2"}, + {ID: "GHSA-3"}, + } + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = writeOSVsZip(t, w, map[string]models.Vulnerability{ + "GHSA-1.json": {ID: "GHSA-1"}, + "GHSA-2.json": {ID: "GHSA-2"}, + "GHSA-3.json": {ID: "GHSA-3"}, + }) + }) + defer cleanupTestServer() + + cacheWriteBad(t, determineStoredAtPath(testDir, "my-db"), "this is not json!") + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} + +func TestNewZippedDB_FileChecks(t *testing.T) { + t.Parallel() + + osvs := []models.Vulnerability{{ID: "GHSA-1234"}, {ID: "GHSA-4321"}} + + testDir, cleanupTestDir := createTestDir(t) + defer cleanupTestDir() + + ts, cleanupTestServer := createZipServer(t, func(w http.ResponseWriter, r *http.Request) { + _, _ = writeOSVsZip(t, w, map[string]models.Vulnerability{ + "file.json": {ID: "GHSA-1234"}, + // only files with .json suffix should be loaded + "file.yaml": {ID: "GHSA-5678"}, + // (no longer) special case for the GH security database + "advisory-database-main/advisories/unreviewed/file.json": {ID: "GHSA-4321"}, + }) + }) + defer cleanupTestServer() + + db, err := local.NewZippedDB(testDir, "my-db", ts.URL, false) + + if err != nil { + t.Fatalf("unexpected error \"%v\"", err) + } + + expectDBToHaveOSVs(t, db, osvs) +} diff --git a/pkg/models/vulnerabilities.go b/pkg/models/vulnerabilities.go new file mode 100644 index 0000000000..53c9470420 --- /dev/null +++ b/pkg/models/vulnerabilities.go @@ -0,0 +1,43 @@ +package models + +import ( + "encoding/json" + "fmt" +) + +type Vulnerabilities []Vulnerability + +func (vs Vulnerabilities) Includes(vulnerability Vulnerability) bool { + for _, vuln := range vs { + if vuln.ID == vulnerability.ID { + return true + } + + if vuln.isAliasOf(vulnerability) { + return true + } + if vulnerability.isAliasOf(vuln) { + return true + } + } + + return false +} + +// MarshalJSON ensures that if there are no vulnerabilities, +// an empty array is used as the value instead of "null" +func (vs Vulnerabilities) MarshalJSON() ([]byte, error) { + if len(vs) == 0 { + return []byte("[]"), nil + } + + type innerVulnerabilities Vulnerabilities + + out, err := json.Marshal(innerVulnerabilities(vs)) + + if err != nil { + return out, fmt.Errorf("%w", err) + } + + return out, nil +} diff --git a/pkg/models/vulnerabilities_test.go b/pkg/models/vulnerabilities_test.go new file mode 100644 index 0000000000..2430283eb7 --- /dev/null +++ b/pkg/models/vulnerabilities_test.go @@ -0,0 +1,184 @@ +package models_test + +import ( + "encoding/json" + "testing" + + "github.com/google/osv-scanner/pkg/models" +) + +func TestVulnerabilities_Includes(t *testing.T) { + t.Parallel() + + type args struct { + osv models.Vulnerability + } + tests := []struct { + name string + vs models.Vulnerabilities + args args + want bool + }{ + { + name: "", + vs: models.Vulnerabilities{ + models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{}, + }, + }, + args: args{ + osv: models.Vulnerability{ + ID: "GHSA-2", + Aliases: []string{}, + }, + }, + want: false, + }, + { + name: "", + vs: models.Vulnerabilities{ + models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{}, + }, + }, + args: args{ + osv: models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{}, + }, + }, + want: true, + }, + { + name: "", + vs: models.Vulnerabilities{ + models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{"GHSA-2"}, + }, + }, + args: args{ + osv: models.Vulnerability{ + ID: "GHSA-2", + Aliases: []string{}, + }, + }, + want: true, + }, + { + name: "", + vs: models.Vulnerabilities{ + models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{}, + }, + }, + args: args{ + osv: models.Vulnerability{ + ID: "GHSA-2", + Aliases: []string{"GHSA-1"}, + }, + }, + want: true, + }, + { + name: "", + vs: models.Vulnerabilities{ + models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{"CVE-1"}, + }, + }, + args: args{ + osv: models.Vulnerability{ + ID: "GHSA-2", + Aliases: []string{"CVE-1"}, + }, + }, + want: true, + }, + { + name: "", + vs: models.Vulnerabilities{ + models.Vulnerability{ + ID: "GHSA-1", + Aliases: []string{"CVE-2"}, + }, + }, + args: args{ + osv: models.Vulnerability{ + ID: "GHSA-2", + Aliases: []string{"CVE-2"}, + }, + }, + want: true, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if got := tt.vs.Includes(tt.args.osv); got != tt.want { + t.Errorf("Includes() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestVulnerabilities_MarshalJSON(t *testing.T) { + t.Parallel() + + osv := models.Vulnerability{ID: "GHSA-1"} + asJSON, err := json.Marshal(osv) + + if err != nil { + t.Fatalf("Unable to marshal osv to JSON: %v", err) + } + + tests := []struct { + name string + vs models.Vulnerabilities + want string + }{ + { + name: "", + vs: nil, + want: "[]", + }, + { + name: "", + vs: models.Vulnerabilities(nil), + want: "[]", + }, + { + name: "", + vs: models.Vulnerabilities{osv}, + want: "[" + string(asJSON) + "]", + }, + { + name: "", + vs: models.Vulnerabilities{osv, osv}, + want: "[" + string(asJSON) + "," + string(asJSON) + "]", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got, err := tt.vs.MarshalJSON() + if err != nil { + t.Errorf("MarshalJSON() error = %v", err) + + return + } + + if gotStr := string(got); gotStr != tt.want { + t.Errorf("MarshalJSON() got = %v, want %v", gotStr, tt.want) + } + }) + } +} diff --git a/pkg/models/vulnerability.go b/pkg/models/vulnerability.go index db7a0e171a..e92d42306c 100644 --- a/pkg/models/vulnerability.go +++ b/pkg/models/vulnerability.go @@ -2,7 +2,14 @@ package models import ( "encoding/json" + "fmt" + "os" + "sort" "time" + + "github.com/google/osv-scanner/internal/semantic" + "github.com/google/osv-scanner/pkg/lockfile" + "golang.org/x/exp/slices" ) // Package identifies the affected code library or command provided by the @@ -167,3 +174,132 @@ func (v Vulnerability) MarshalYAML() (interface{}, error) { return raw, nil } + +func (e Event) version() string { + if e.Introduced != "" { + return e.Introduced + } + + if e.Fixed != "" { + return e.Fixed + } + + if e.Limit != "" { + return e.Limit + } + + if e.LastAffected != "" { + return e.LastAffected + } + + return "" +} + +func (ar Range) containsVersion(pkg lockfile.PackageDetails) bool { + if ar.Type != RangeEcosystem && ar.Type != RangeSemVer { + return false + } + // todo: we should probably warn here + if len(ar.Events) == 0 { + return false + } + + vp := semantic.MustParse(pkg.Version, pkg.CompareAs) + + sort.Slice(ar.Events, func(i, j int) bool { + return semantic.MustParse(ar.Events[i].version(), pkg.CompareAs).CompareStr(ar.Events[j].version()) < 0 + }) + + var affected bool + for _, e := range ar.Events { + if affected { + if e.Fixed != "" { + affected = vp.CompareStr(e.Fixed) < 0 + } else if e.LastAffected != "" { + affected = e.LastAffected == pkg.Version || vp.CompareStr(e.LastAffected) <= 0 + } + } else if e.Introduced != "" { + affected = e.Introduced == "0" || vp.CompareStr(e.Introduced) >= 0 + } + } + + return affected +} + +// affectsVersion checks if the given version is within the range +// specified by the events of any "Ecosystem" or "Semver" type ranges +func affectsVersion(a []Range, pkg lockfile.PackageDetails) bool { + for _, r := range a { + if r.Type != RangeEcosystem && r.Type != RangeSemVer { + return false + } + if r.containsVersion(pkg) { + return true + } + } + + return false +} + +func (v Vulnerability) isAliasOfID(id string) bool { + for _, alias := range v.Aliases { + if alias == id { + return true + } + } + + return false +} + +func (v Vulnerability) isAliasOf(vulnerability Vulnerability) bool { + for _, alias := range vulnerability.Aliases { + if v.ID == alias || v.isAliasOfID(alias) { + return true + } + } + + return false +} + +func (v Vulnerability) AffectsEcosystem(ecosystem lockfile.Ecosystem) bool { + for _, affected := range v.Affected { + if string(affected.Package.Ecosystem) == string(ecosystem) { + return true + } + } + + return false +} + +func (v Vulnerability) IsAffected(pkg lockfile.PackageDetails) bool { + for _, affected := range v.Affected { + if string(affected.Package.Ecosystem) == string(pkg.Ecosystem) && + affected.Package.Name == pkg.Name { + if len(affected.Ranges) == 0 && len(affected.Versions) == 0 { + _, _ = fmt.Fprintf( + os.Stderr, + "%s does not have any ranges or versions - this is probably a mistake!\n", + v.ID, + ) + + continue + } + + if slices.Contains(affected.Versions, pkg.Version) { + return true + } + + if affectsVersion(affected.Ranges, pkg) { + return true + } + + // if a package does not have a version, assume it is vulnerable + // as false positives are better than false negatives here + if pkg.Version == "" { + return true + } + } + } + + return false +} diff --git a/pkg/models/vulnerability_test.go b/pkg/models/vulnerability_test.go index 11171ca641..14a39924cd 100644 --- a/pkg/models/vulnerability_test.go +++ b/pkg/models/vulnerability_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/google/osv-scanner/pkg/lockfile" "github.com/google/osv-scanner/pkg/models" "gopkg.in/yaml.v3" ) @@ -231,3 +232,626 @@ func TestVulnerability_MarshalYAMLTimes(t *testing.T) { }) } } + +func expectIsAffected(t *testing.T, vuln models.Vulnerability, version string, expectAffected bool) { + t.Helper() + + pkg := lockfile.PackageDetails{ + Name: "my-package", + Version: version, + Ecosystem: lockfile.NpmEcosystem, + CompareAs: lockfile.NpmEcosystem, + } + + if vuln.IsAffected(pkg) != expectAffected { + if expectAffected { + t.Errorf("Expected OSV to affect package version %s but it did not", version) + } else { + t.Errorf("Expected OSV not to affect package version %s but it did", version) + } + } +} + +func buildOSVWithAffected(affected ...models.Affected) models.Vulnerability { + return models.Vulnerability{ + ID: "1", + Published: time.Time{}, + Modified: time.Time{}, + Details: "This is an open source vulnerability!", + Affected: affected, + } +} + +func buildEcosystemAffectsRange(events ...models.Event) models.Range { + return models.Range{Type: models.RangeEcosystem, Events: events} +} + +func buildSemverAffectsRange(events ...models.Event) models.Range { + return models.Range{Type: models.RangeSemVer, Events: events} +} + +func TestOSV_AffectsEcosystem(t *testing.T) { + t.Parallel() + + type AffectsTest struct { + Affected []models.Affected + Ecosystem lockfile.Ecosystem + Expected bool + } + + tests := []AffectsTest{ + {Affected: nil, Ecosystem: "Go", Expected: false}, + {Affected: nil, Ecosystem: "npm", Expected: false}, + {Affected: nil, Ecosystem: "PyPI", Expected: false}, + {Affected: nil, Ecosystem: "", Expected: false}, + { + Affected: []models.Affected{ + {Package: models.Package{Ecosystem: "crates.io"}}, + {Package: models.Package{Ecosystem: "npm"}}, + {Package: models.Package{Ecosystem: "PyPI"}}, + }, + Ecosystem: "Packagist", + Expected: false, + }, + { + Affected: []models.Affected{ + {Package: models.Package{Ecosystem: "NuGet"}}, + }, + Ecosystem: "NuGet", + Expected: true, + }, + { + Affected: []models.Affected{ + {Package: models.Package{Ecosystem: "npm"}}, + {Package: models.Package{Ecosystem: "npm"}}, + }, + Ecosystem: "npm", + Expected: true, + }, + } + + for i, test := range tests { + vuln := models.Vulnerability{ + ID: "1", + Published: time.Time{}, + Modified: time.Time{}, + Details: "This is an open source vulnerability!", + Affected: test.Affected, + } + + if vuln.AffectsEcosystem(test.Ecosystem) != test.Expected { + t.Errorf( + "Test #%d: Expected OSV to return %t but it returned %t", + i, + test.Expected, + !test.Expected, + ) + } + } + + // test when the OSV doesn't have an "Affected" + vuln := models.Vulnerability{ + ID: "1", + Published: time.Time{}, + Modified: time.Time{}, + Details: "This is an open source vulnerability!", + Affected: nil, + } + + if vuln.AffectsEcosystem("npm") { + t.Errorf( + "Expected OSV to report 'false' when it doesn't have an Affected, but it reported true!", + ) + } +} + +func TestOSV_IsAffected_AffectsWithEcosystem_DifferentEcosystem(t *testing.T) { + t.Parallel() + + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemPyPI, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange(models.Event{Introduced: "0"}), + }, + }, + ) + + for _, v := range []string{"1.0.0", "1.1.1", "2.0.0"} { + expectIsAffected(t, vuln, v, false) + } +} + +func TestOSV_IsAffected_AffectsWithEcosystem_SingleAffected(t *testing.T) { + t.Parallel() + + var vuln models.Vulnerability + + // "Introduced: 0" means everything is affected + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange(models.Event{Introduced: "0"}), + }, + }, + ) + + for _, v := range []string{"1.0.0", "1.1.1", "2.0.0"} { + expectIsAffected(t, vuln, v, true) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) + + // "Fixed: 1" means all versions after this are not vulnerable + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) + + // multiple fixes and introduced + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + models.Event{Introduced: "2.1.0"}, + models.Event{Fixed: "3.2.0"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) + + // "LastAffected: 1" means all versions after this are not vulnerable + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "0"}, + models.Event{LastAffected: "1"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc", "1.0.0"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.1", "1.1.0", "2.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) + + // mix of fixes, last_known_affected, and introduced + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + models.Event{Introduced: "2.1.0"}, + models.Event{LastAffected: "3.1.9"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc", "3.1.9"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} + +func TestOSV_IsAffected_AffectsWithEcosystem_MultipleAffected(t *testing.T) { + t.Parallel() + + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + ), + }, + }, + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "2.1.0"}, + models.Event{Fixed: "3.2.0"}, + ), + }, + }, + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "3.3.0"}, + models.Event{LastAffected: "3.5.0"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"3.3.1", "3.4.5"} { + expectIsAffected(t, vuln, v, true) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} + +func TestOSV_IsAffected_AffectsWithEcosystem_Unsorted(t *testing.T) { + t.Parallel() + + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildEcosystemAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Introduced: "2.1.0"}, + models.Event{Fixed: "1"}, + models.Event{LastAffected: "3.1.9"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc", "3.1.9"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} + +func TestOSV_IsAffected_AffectsWithSemver_DifferentEcosystem(t *testing.T) { + t.Parallel() + + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemPyPI, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange(models.Event{Introduced: "0"}), + }, + }, + ) + + for _, v := range []string{"1.0.0", "1.1.1", "2.0.0"} { + expectIsAffected(t, vuln, v, false) + } +} + +func TestOSV_IsAffected_AffectsWithSemver_SingleAffected(t *testing.T) { + t.Parallel() + + var vuln models.Vulnerability + + // "Introduced: 0" means everything is affected + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange(models.Event{Introduced: "0"}), + }, + }, + ) + + for _, v := range []string{"v1.0.0", "v1.1.1", "v2.0.0"} { + expectIsAffected(t, vuln, v, true) + } + + // "Fixed: 1" means all versions after this are not vulnerable + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1.0.0"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // multiple fixes and introduced + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + models.Event{Introduced: "2.1.0"}, + models.Event{Fixed: "3.2.0"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) + + // "LastAffected: 1" means all versions after this are not vulnerable + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "0"}, + models.Event{LastAffected: "1.0.0"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc", "1.0.0"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.1", "1.1.0", "2.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // mix of fixes, last_known_affected, and introduced + vuln = buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + models.Event{Introduced: "2.1.0"}, + models.Event{LastAffected: "3.1.9"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} + +func TestOSV_IsAffected_AffectsWithSemver_MultipleAffected(t *testing.T) { + t.Parallel() + + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Fixed: "1"}, + ), + }, + }, + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "2.1.0"}, + models.Event{Fixed: "3.2.0"}, + ), + }, + }, + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "3.3.0"}, + models.Event{LastAffected: "3.5.0"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"3.3.1", "3.4.5", "3.5.0"} { + expectIsAffected(t, vuln, v, true) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} + +func TestOSV_IsAffected_AffectsWithSemver_Unsorted(t *testing.T) { + t.Parallel() + + // mix of fixes, last_known_affected, and introduced + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Ranges: []models.Range{ + buildSemverAffectsRange( + models.Event{Introduced: "0"}, + models.Event{Introduced: "2.1.0"}, + models.Event{Fixed: "1"}, + models.Event{LastAffected: "3.1.9"}, + ), + }, + }, + ) + + for _, v := range []string{"0.0.0", "0.1.0", "0.0.0.1", "1.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"1.0.0", "1.1.0", "2.0.0rc2", "2.0.1"} { + expectIsAffected(t, vuln, v, false) + } + + for _, v := range []string{"2.1.1", "2.3.4", "3.0.0", "3.0.0-rc"} { + expectIsAffected(t, vuln, v, true) + } + + for _, v := range []string{"3.2.0", "3.2.1", "4.0.0"} { + expectIsAffected(t, vuln, v, false) + } + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} + +func TestOSV_IsAffected_OnlyVersions(t *testing.T) { + t.Parallel() + + vuln := buildOSVWithAffected( + models.Affected{ + Package: models.Package{Ecosystem: models.EcosystemNPM, Name: "my-package"}, + Versions: []string{"1.0.0"}, + }, + ) + + expectIsAffected(t, vuln, "0.0.0", false) + expectIsAffected(t, vuln, "1.0.0", true) + expectIsAffected(t, vuln, "1.0.0-beta1", false) + expectIsAffected(t, vuln, "1.1.0", false) + + // an empty version should always be treated as affected + expectIsAffected(t, vuln, "", true) +} diff --git a/pkg/osv/osv.go b/pkg/osv/osv.go index 8e7e503f74..4c67defa4e 100644 --- a/pkg/osv/osv.go +++ b/pkg/osv/osv.go @@ -32,9 +32,9 @@ var RequestUserAgent = "" // Package represents a package identifier for OSV. type Package struct { - PURL string `json:"purl,omitempty"` - Name string `json:"name,omitempty"` - Ecosystem string `json:"ecosystem,omitempty"` + PURL string `json:"purl,omitempty"` + Name string `json:"name,omitempty"` + Ecosystem lockfile.Ecosystem `json:"ecosystem,omitempty"` } // Query represents a query to OSV. @@ -57,7 +57,7 @@ type MinimalVulnerability struct { // Response represents a full response from OSV. type Response struct { - Vulns []models.Vulnerability `json:"vulns"` + Vulns models.Vulnerabilities `json:"vulns"` } // MinimalResponse represents an unhydrated response from OSV. @@ -98,7 +98,7 @@ func MakePkgRequest(pkgDetails lockfile.PackageDetails) *Query { // Commit: pkgDetails.Commit, Package: Package{ Name: pkgDetails.Name, - Ecosystem: string(pkgDetails.Ecosystem), + Ecosystem: pkgDetails.Ecosystem, }, } } diff --git a/pkg/osvscanner/osvscanner.go b/pkg/osvscanner/osvscanner.go index e220d8ced8..dc6acbe062 100644 --- a/pkg/osvscanner/osvscanner.go +++ b/pkg/osvscanner/osvscanner.go @@ -9,6 +9,7 @@ import ( "path/filepath" "strings" + "github.com/google/osv-scanner/internal/local" "github.com/google/osv-scanner/internal/output" "github.com/google/osv-scanner/internal/sbom" "github.com/google/osv-scanner/pkg/config" @@ -38,7 +39,11 @@ type ScannerActions struct { } type ExperimentalScannerActions struct { - CallAnalysis bool + CallAnalysis bool + CompareLocally bool + CompareOffline bool + + LocalDBPath string } // NoPackagesFoundErr for when no packages are found during a scan. @@ -497,6 +502,14 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe r = &reporter.VoidReporter{} } + if actions.CompareOffline { + actions.CompareLocally = true + } + + if actions.CompareLocally { + actions.SkipGit = true + } + configManager := config.ConfigManager{ DefaultConfig: config.Config{}, ConfigMap: make(map[string]config.Config), @@ -561,18 +574,10 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe return models.VulnerabilityResults{}, NoPackagesFoundErr } - if osv.RequestUserAgent == "" { - osv.RequestUserAgent = "osv-scanner-api" - } - - resp, err := osv.MakeRequest(query) - if err != nil { - return models.VulnerabilityResults{}, fmt.Errorf("scan failed %w", err) - } + hydratedResp, err := makeRequest(r, actions.CompareLocally, actions.CompareOffline, query, actions.LocalDBPath) - hydratedResp, err := osv.Hydrate(resp) if err != nil { - return models.VulnerabilityResults{}, fmt.Errorf("failed to hydrate OSV response: %w", err) + return models.VulnerabilityResults{}, err } vulnerabilityResults := groupResponseBySource(r, query, hydratedResp, actions.CallAnalysis) @@ -600,3 +605,36 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe return vulnerabilityResults, nil } + +func makeRequest( + r reporter.Reporter, + compareLocally bool, + compareOffline bool, + query osv.BatchedQuery, + localDBPath string, +) (*osv.HydratedBatchedResponse, error) { + if compareLocally { + hydratedResp, err := local.MakeRequest(r, query, compareOffline, localDBPath) + if err != nil { + return &osv.HydratedBatchedResponse{}, fmt.Errorf("scan failed %w", err) + } + + return hydratedResp, nil + } + + if osv.RequestUserAgent == "" { + osv.RequestUserAgent = "osv-scanner-api" + } + + resp, err := osv.MakeRequest(query) + if err != nil { + return &osv.HydratedBatchedResponse{}, fmt.Errorf("scan failed %w", err) + } + + hydratedResp, err := osv.Hydrate(resp) + if err != nil { + return &osv.HydratedBatchedResponse{}, fmt.Errorf("failed to hydrate OSV response: %w", err) + } + + return hydratedResp, nil +} diff --git a/pkg/osvscanner/vulnerability_result.go b/pkg/osvscanner/vulnerability_result.go index 075b98b9a8..1572766d2c 100644 --- a/pkg/osvscanner/vulnerability_result.go +++ b/pkg/osvscanner/vulnerability_result.go @@ -42,7 +42,7 @@ func groupResponseBySource(r reporter.Reporter, query osv.BatchedQuery, resp *os Package: models.PackageInfo{ Name: query.Package.Name, Version: query.Version, - Ecosystem: query.Package.Ecosystem, + Ecosystem: string(query.Package.Ecosystem), }, } }