Compare commits

...

25 Commits

Author SHA1 Message Date
MaineK00n
6682232b5c feat(os): support Amazon Linux 2023 (#1621) 2023-03-16 17:31:57 +09:00
sadayuki-matsuno
984debe929 fix(detector/github) change timeout 10s to 10m (#1616) 2023-03-01 16:58:11 +09:00
Kota Kanbe
a528362663 fix(saas): upload JSON if err occured during scan (#1615) 2023-03-01 14:52:03 +09:00
MaineK00n
ee97d98c39 feat: update EOL (#1598) 2023-02-22 16:00:05 +09:00
MaineK00n
4e486dae1d style: fix typo (#1592)
* style: fix typo

* style: add comment
2023-02-22 15:59:47 +09:00
MaineK00n
897fef24a3 feat(detector/exploitdb): mod update and add more urls (#1610) 2023-02-22 15:58:24 +09:00
MaineK00n
73f0adad95 fix: use GetCveContentTypes instead of NewCveContentType (#1603) 2023-02-21 11:56:26 +09:00
Sinclair
704492963c Revert: gost/Ubuntu.ConvertToModel() is public method now (#1597) 2023-02-08 11:36:36 +09:00
Sinclair
1927ed344c fix(report): tidy dependencies for multiple repo on integration with GSA (#1593)
* initialize dependencyGraphManifests out of loop

* remove GitHubSecurityAlert.PackageName

* tidy dependency map for multi repo

* set repo name into SBOM components & purl for multi repo
2023-02-07 19:47:32 +09:00
MaineK00n
ad2edbb844 fix(ubuntu): vulnerability detection for kernel package (#1591)
* fix(ubuntu): vulnerability detection for kernel package

* feat(gost/ubuntu): update mod to treat status: deferred as unfixed

* feat(ubuntu): support 22.10
2023-02-03 15:56:58 +09:00
MaineK00n
bfe0db77b4 feat(cwe): add cwe-id for category and view (#1578) 2023-01-20 18:02:07 +09:00
MaineK00n
ff3b9cdc16 fix: add comment (#1585) 2023-01-20 18:01:10 +09:00
Sinclair
2deb1b9d32 chore: update version for golangci-lint (#1586) 2023-01-20 18:00:54 +09:00
kl-sinclair
ca64d7fc31 feat(report): Include dependencies into scan result and cyclondex for supply chain security on Integration with GitHub Security Alerts (#1584)
* feat(report): Enhance scan result and cyclondex for supply chain security on Integration with GitHub Security Alerts

* derive ecosystem/version from dependency graph

* fix vars name && fetch manifest info on GSA && arrange ghpkgToPURL structure

* fix miscs

* typo in error message

* fix ecosystem equally to trivy

* miscs

* refactoring

* recursive dependency graph pagination

* change var name && update comments

* omit map type of ghpkgToPURL in signatures

* fix vars name

* goimports

* make fmt

* fix comment

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2023-01-20 15:32:36 +09:00
Brian Prodoehl
554ecc437e fix(report/email): add Critical to email summary (#1565)
* Add criticals to email summary

* chore(report/email): add Critical keys

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-12-20 11:56:07 +09:00
Kota Kanbe
f6cd4d9223 feat(libscan): support conan.lock C/C++ (#1572) 2022-12-20 11:22:36 +09:00
Kota Kanbe
03c59866d4 feat(libscan): support gradle.lockfile (#1568)
* feat(libscan): support gradle.lockfile

* add gradle.lockfile to integration test

* fix readme

* chore: update integration

* find *gradle.lockfile

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-12-20 08:52:45 +09:00
Kota Kanbe
1d97e91341 fix(libscan): delete map that keeps all file contents detected by FindLock to save memory (#1556)
* fix(libscan): delete Map that keeps all files detected by FindLock to save memory

* continue analyzing libs if err occurred

* FindLockDirs

* fix

* fix
2022-11-10 10:19:15 +09:00
MaineK00n
96333f38c9 chore(ubuntu): set Ubuntu 22.10 EOL (#1552) 2022-11-01 14:00:56 +09:00
MaineK00n
8b5d1c8e92 feat(cwe, cti): update dictionary (#1553)
* feat(cwe): update CWE dictionary

* feat(cti): update CTI dictionary

* fix(cwe): fix typo
2022-11-01 14:00:23 +09:00
MaineK00n
dea80f860c feat(report): add cyclonedx format (#1543) 2022-11-01 13:58:31 +09:00
dependabot[bot]
6eb4c5a5fe chore(deps): bump github.com/aquasecurity/trivy from 0.31.3 to 0.32.1 (#1538)
* chore(deps): bump github.com/aquasecurity/trivy from 0.31.3 to 0.32.1

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.31.3 to 0.32.1.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.31.3...v0.32.1)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* chore(deps): bump github.com/aquasecurity/trivy 0.32.1 to 0.33.0

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-10-27 01:24:06 +09:00
Kota Kanbe
b219a8495e fix(cpescan): match if affected version is NA (#1548)
https://github.com/vulsio/go-cve-dictionary/pull/283
2022-10-19 16:57:32 +09:00
Kota Kanbe
eb87d5d4e1 fix(saas): panic: runtime error: comparing uncomparable type config.PortScanConf (#1537) 2022-10-04 11:55:48 +09:00
tomofumi0003
6963442a5e fix(report): send report to each slack channel (#1530)
* fix send report to each slack channel

* fix(report): use w.Cnf.Channel instead of channel

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-09-29 16:08:36 +09:00
54 changed files with 7186 additions and 3433 deletions

View File

@@ -19,7 +19,7 @@ jobs:
uses: golangci/golangci-lint-action@v3
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: v1.46
version: v1.50.1
args: --timeout=10m
# Optional: working directory, useful for monorepos

View File

@@ -88,7 +88,7 @@ NOW=$(shell date --iso-8601=seconds)
NOW_JSON_DIR := '${BASE_DIR}/$(NOW)'
ONE_SEC_AFTER=$(shell date -d '+1 second' --iso-8601=seconds)
ONE_SEC_AFTER_JSON_DIR := '${BASE_DIR}/$(ONE_SEC_AFTER)'
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'pnpm' 'cargo' 'gomod' 'gosum' 'gobinary' 'jar' 'pom' 'nuget-lock' 'nuget-config' 'dotnet-deps' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'pnpm' 'cargo' 'gomod' 'gosum' 'gobinary' 'jar' 'pom' 'gradle' 'nuget-lock' 'nuget-config' 'dotnet-deps' 'conan' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
diff:
# git clone git@github.com:vulsio/vulsctl.git

View File

@@ -95,11 +95,7 @@ Vuls is a tool created to solve the problems listed above. It has the following
- [mitre/cti](https://github.com/mitre/cti)
- Libraries
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
- [Safety DB(Python)](https://github.com/pyupio/safety-db)
- [PHP Security Advisories Database](https://github.com/FriendsOfPHP/security-advisories)
- [RustSec Advisory Database](https://github.com/RustSec/advisory-db)
- [aquasecurity/vuln-list](https://github.com/aquasecurity/vuln-list)
- WordPress
- [wpscan](https://wpscan.com/api)

2
cache/bolt.go vendored
View File

@@ -48,7 +48,7 @@ func (b Bolt) Close() error {
return b.db.Close()
}
// CreateBucketIfNotExists creates a bucket that is specified by arg.
// CreateBucketIfNotExists creates a bucket that is specified by arg.
func (b *Bolt) createBucketIfNotExists(name string) error {
return b.db.Update(func(tx *bolt.Tx) error {
_, err := tx.CreateBucketIfNotExists([]byte(name))

View File

@@ -21,7 +21,7 @@ var Revision string
// Conf has Configuration
var Conf Config
//Config is struct of Configuration
// Config is struct of Configuration
type Config struct {
logging.LogOpts
@@ -240,6 +240,7 @@ type ServerInfo struct {
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
FindLockDirs []string `toml:"findLockDirs,omitempty" json:"findLockDirs,omitempty"`
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`

View File

@@ -41,8 +41,12 @@ func GetEOL(family, release string) (eol EOL, found bool) {
case constant.Amazon:
eol, found = map[string]EOL{
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
"2": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
"2": {StandardSupportUntil: time.Date(2025, 6, 30, 23, 59, 59, 0, time.UTC)},
"2022": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
"2023": {StandardSupportUntil: time.Date(2027, 6, 30, 23, 59, 59, 0, time.UTC)},
"2025": {StandardSupportUntil: time.Date(2029, 6, 30, 23, 59, 59, 0, time.UTC)},
"2027": {StandardSupportUntil: time.Date(2031, 6, 30, 23, 59, 59, 0, time.UTC)},
"2029": {StandardSupportUntil: time.Date(2033, 6, 30, 23, 59, 59, 0, time.UTC)},
}[getAmazonLinuxVersion(release)]
case constant.RedHat:
// https://access.redhat.com/support/policy/updates/errata
@@ -130,18 +134,35 @@ func GetEOL(family, release string) (eol EOL, found bool) {
case constant.Ubuntu:
// https://wiki.ubuntu.com/Releases
eol, found = map[string]EOL{
"14.10": {Ended: true},
"6.06": {Ended: true},
"6.10": {Ended: true},
"7.04": {Ended: true},
"7.10": {Ended: true},
"8.04": {Ended: true},
"8.10": {Ended: true},
"9.04": {Ended: true},
"9.10": {Ended: true},
"10.04": {Ended: true},
"10.10": {Ended: true},
"11.04": {Ended: true},
"11.10": {Ended: true},
"12.04": {Ended: true},
"12.10": {Ended: true},
"13.04": {Ended: true},
"13.10": {Ended: true},
"14.04": {
ExtendedSupportUntil: time.Date(2022, 4, 1, 23, 59, 59, 0, time.UTC),
},
"14.10": {Ended: true},
"15.04": {Ended: true},
"16.10": {Ended: true},
"17.04": {Ended: true},
"17.10": {Ended: true},
"15.10": {Ended: true},
"16.04": {
StandardSupportUntil: time.Date(2021, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2024, 4, 1, 23, 59, 59, 0, time.UTC),
},
"16.10": {Ended: true},
"17.04": {Ended: true},
"17.10": {Ended: true},
"18.04": {
StandardSupportUntil: time.Date(2023, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2028, 4, 1, 23, 59, 59, 0, time.UTC),
@@ -166,6 +187,12 @@ func GetEOL(family, release string) (eol EOL, found bool) {
StandardSupportUntil: time.Date(2027, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2032, 4, 1, 23, 59, 59, 0, time.UTC),
},
"22.10": {
StandardSupportUntil: time.Date(2023, 7, 20, 23, 59, 59, 0, time.UTC),
},
// "23.04": {
// StandardSupportUntil: time.Date(2024, 1, 31, 23, 59, 59, 0, time.UTC),
// },
}[release]
case constant.OpenSUSE:
// https://en.opensuse.org/Lifetime
@@ -264,6 +291,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"3.14": {StandardSupportUntil: time.Date(2023, 5, 1, 23, 59, 59, 0, time.UTC)},
"3.15": {StandardSupportUntil: time.Date(2023, 11, 1, 23, 59, 59, 0, time.UTC)},
"3.16": {StandardSupportUntil: time.Date(2024, 5, 23, 23, 59, 59, 0, time.UTC)},
"3.17": {StandardSupportUntil: time.Date(2024, 11, 22, 23, 59, 59, 0, time.UTC)},
}[majorDotMinor(release)]
case constant.FreeBSD:
// https://www.freebsd.org/security/
@@ -273,17 +301,19 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"9": {Ended: true},
"10": {Ended: true},
"11": {StandardSupportUntil: time.Date(2021, 9, 30, 23, 59, 59, 0, time.UTC)},
"12": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
"12": {StandardSupportUntil: time.Date(2023, 12, 31, 23, 59, 59, 0, time.UTC)},
"13": {StandardSupportUntil: time.Date(2026, 1, 31, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case constant.Fedora:
// https://docs.fedoraproject.org/en-US/releases/eol/
// https://endoflife.date/fedora
eol, found = map[string]EOL{
"32": {StandardSupportUntil: time.Date(2021, 5, 25, 23, 59, 59, 0, time.UTC)},
"33": {StandardSupportUntil: time.Date(2021, 11, 30, 23, 59, 59, 0, time.UTC)},
"34": {StandardSupportUntil: time.Date(2022, 5, 17, 23, 59, 59, 0, time.UTC)},
"35": {StandardSupportUntil: time.Date(2022, 12, 7, 23, 59, 59, 0, time.UTC)},
"32": {StandardSupportUntil: time.Date(2021, 5, 24, 23, 59, 59, 0, time.UTC)},
"33": {StandardSupportUntil: time.Date(2021, 11, 29, 23, 59, 59, 0, time.UTC)},
"34": {StandardSupportUntil: time.Date(2022, 6, 6, 23, 59, 59, 0, time.UTC)},
"35": {StandardSupportUntil: time.Date(2022, 12, 12, 23, 59, 59, 0, time.UTC)},
"36": {StandardSupportUntil: time.Date(2023, 5, 16, 23, 59, 59, 0, time.UTC)},
"37": {StandardSupportUntil: time.Date(2023, 12, 15, 23, 59, 59, 0, time.UTC)},
}[major(release)]
}
return
@@ -302,9 +332,25 @@ func majorDotMinor(osVer string) (majorDotMinor string) {
}
func getAmazonLinuxVersion(osRelease string) string {
ss := strings.Fields(osRelease)
if len(ss) == 1 {
switch s := strings.Fields(osRelease)[0]; s {
case "1":
return "1"
case "2":
return "2"
case "2022":
return "2022"
case "2023":
return "2023"
case "2025":
return "2025"
case "2027":
return "2027"
case "2029":
return "2029"
default:
if _, err := time.Parse("2006.01", s); err == nil {
return "1"
}
return "unknown"
}
return ss[0]
}

View File

@@ -54,8 +54,16 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "amazon linux 2024 not found",
fields: fields{family: Amazon, release: "2024 (Amazon Linux)"},
name: "amazon linux 2023 supported",
fields: fields{family: Amazon, release: "2023"},
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "amazon linux 2031 not found",
fields: fields{family: Amazon, release: "2031"},
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
@@ -244,8 +252,8 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
},
//Ubuntu
{
name: "Ubuntu 12.10 not found",
fields: fields{family: Ubuntu, release: "12.10"},
name: "Ubuntu 5.10 not found",
fields: fields{family: Ubuntu, release: "5.10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
found: false,
stdEnded: false,
@@ -339,6 +347,22 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
stdEnded: false,
extEnded: false,
},
{
name: "Ubuntu 22.10 supported",
fields: fields{family: Ubuntu, release: "22.10"},
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
// {
// name: "Ubuntu 23.04 supported",
// fields: fields{family: Ubuntu, release: "23.04"},
// now: time.Date(2023, 3, 16, 23, 59, 59, 0, time.UTC),
// found: true,
// stdEnded: false,
// extEnded: false,
// },
//Debian
{
name: "Debian 9 supported",
@@ -438,14 +462,30 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "Alpine 3.17 not found",
name: "Alpine 3.17 supported",
fields: fields{family: Alpine, release: "3.17"},
now: time.Date(2022, 1, 14, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alpine 3.18 not found",
fields: fields{family: Alpine, release: "3.18"},
now: time.Date(2022, 1, 14, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
// freebsd
{
name: "freebsd 10 eol",
fields: fields{family: FreeBSD, release: "10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "freebsd 11 supported",
fields: fields{family: FreeBSD, release: "11"},
@@ -478,27 +518,19 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
extEnded: false,
found: true,
},
{
name: "freebsd 10 eol",
fields: fields{family: FreeBSD, release: "10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
// Fedora
{
name: "Fedora 32 supported",
fields: fields{family: Fedora, release: "32"},
now: time.Date(2021, 5, 25, 23, 59, 59, 0, time.UTC),
now: time.Date(2021, 5, 24, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 32 eol on 2021-5-25",
name: "Fedora 32 eol since 2021-5-25",
fields: fields{family: Fedora, release: "32"},
now: time.Date(2021, 5, 26, 23, 59, 59, 0, time.UTC),
now: time.Date(2021, 5, 25, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
@@ -506,15 +538,15 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
{
name: "Fedora 33 supported",
fields: fields{family: Fedora, release: "33"},
now: time.Date(2021, 11, 30, 23, 59, 59, 0, time.UTC),
now: time.Date(2021, 11, 29, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 33 eol on 2021-5-26",
name: "Fedora 33 eol since 2021-11-30",
fields: fields{family: Fedora, release: "32"},
now: time.Date(2021, 5, 27, 23, 59, 59, 0, time.UTC),
now: time.Date(2021, 11, 30, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
@@ -522,15 +554,15 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
{
name: "Fedora 34 supported",
fields: fields{family: Fedora, release: "34"},
now: time.Date(2022, 5, 17, 23, 59, 59, 0, time.UTC),
now: time.Date(2022, 6, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 32 eol on 2022-5-17",
name: "Fedora 34 eol since 2022-6-7",
fields: fields{family: Fedora, release: "34"},
now: time.Date(2022, 5, 18, 23, 59, 59, 0, time.UTC),
now: time.Date(2022, 6, 7, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
@@ -538,19 +570,59 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
{
name: "Fedora 35 supported",
fields: fields{family: Fedora, release: "35"},
now: time.Date(2022, 12, 7, 23, 59, 59, 0, time.UTC),
now: time.Date(2022, 12, 12, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 35 eol on 2022-12-7",
name: "Fedora 35 eol since 2022-12-13",
fields: fields{family: Fedora, release: "35"},
now: time.Date(2022, 12, 8, 23, 59, 59, 0, time.UTC),
now: time.Date(2022, 12, 13, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 36 supported",
fields: fields{family: Fedora, release: "36"},
now: time.Date(2023, 5, 16, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 36 eol since 2023-05-17",
fields: fields{family: Fedora, release: "36"},
now: time.Date(2023, 5, 17, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 37 supported",
fields: fields{family: Fedora, release: "37"},
now: time.Date(2023, 12, 15, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 37 eol since 2023-12-16",
fields: fields{family: Fedora, release: "37"},
now: time.Date(2023, 12, 16, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 38 not found",
fields: fields{family: Fedora, release: "38"},
now: time.Date(2023, 12, 15, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@@ -616,3 +688,58 @@ func Test_majorDotMinor(t *testing.T) {
})
}
}
func Test_getAmazonLinuxVersion(t *testing.T) {
tests := []struct {
release string
want string
}{
{
release: "2017.09",
want: "1",
},
{
release: "2018.03",
want: "1",
},
{
release: "1",
want: "1",
},
{
release: "2",
want: "2",
},
{
release: "2022",
want: "2022",
},
{
release: "2023",
want: "2023",
},
{
release: "2025",
want: "2025",
},
{
release: "2027",
want: "2027",
},
{
release: "2029",
want: "2029",
},
{
release: "2031",
want: "unknown",
},
}
for _, tt := range tests {
t.Run(tt.release, func(t *testing.T) {
if got := getAmazonLinuxVersion(tt.release); got != tt.want {
t.Errorf("getAmazonLinuxVersion() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -660,7 +660,7 @@ var TechniqueDict = map[string]Technique{
Name: "CAPEC-35: Leverage Executable Code in Non-Executable Files",
},
"CAPEC-36": {
Name: "CAPEC-36: Using Unpublished Interfaces",
Name: "CAPEC-36: Using Unpublished Interfaces or Functionality",
},
"CAPEC-37": {
Name: "CAPEC-37: Retrieve Embedded Sensitive Data",
@@ -831,7 +831,7 @@ var TechniqueDict = map[string]Technique{
Name: "CAPEC-442: Infected Software",
},
"CAPEC-443": {
Name: "CAPEC-443: Malicious Logic Inserted Into Product Software by Authorized Developer",
Name: "CAPEC-443: Malicious Logic Inserted Into Product by Authorized Developer",
},
"CAPEC-444": {
Name: "CAPEC-444: Development Alteration",
@@ -840,7 +840,7 @@ var TechniqueDict = map[string]Technique{
Name: "CAPEC-445: Malicious Logic Insertion into Product Software via Configuration Management Manipulation",
},
"CAPEC-446": {
Name: "CAPEC-446: Malicious Logic Insertion into Product Software via Inclusion of 3rd Party Component Dependency",
Name: "CAPEC-446: Malicious Logic Insertion into Product via Inclusion of Third-Party Component",
},
"CAPEC-447": {
Name: "CAPEC-447: Design Alteration",
@@ -1382,9 +1382,6 @@ var TechniqueDict = map[string]Technique{
"CAPEC-628": {
Name: "CAPEC-628: Carry-Off GPS Attack",
},
"CAPEC-629": {
Name: "CAPEC-629: Unauthorized Use of Device Resources",
},
"CAPEC-63": {
Name: "CAPEC-63: Cross-Site Scripting (XSS)",
},
@@ -1464,7 +1461,7 @@ var TechniqueDict = map[string]Technique{
Name: "CAPEC-652: Use of Known Kerberos Credentials",
},
"CAPEC-653": {
Name: "CAPEC-653: Use of Known Windows Credentials",
Name: "CAPEC-653: Use of Known Operating System Credentials",
},
"CAPEC-654": {
Name: "CAPEC-654: Credential Prompt Impersonation",
@@ -1553,9 +1550,39 @@ var TechniqueDict = map[string]Technique{
"CAPEC-681": {
Name: "CAPEC-681: Exploitation of Improperly Controlled Hardware Security Identifiers",
},
"CAPEC-682": {
Name: "CAPEC-682: Exploitation of Firmware or ROM Code with Unpatchable Vulnerabilities",
},
"CAPEC-69": {
Name: "CAPEC-69: Target Programs with Elevated Privileges",
},
"CAPEC-690": {
Name: "CAPEC-690: Metadata Spoofing",
},
"CAPEC-691": {
Name: "CAPEC-691: Spoof Open-Source Software Metadata",
},
"CAPEC-692": {
Name: "CAPEC-692: Spoof Version Control System Commit Metadata",
},
"CAPEC-693": {
Name: "CAPEC-693: StarJacking",
},
"CAPEC-694": {
Name: "CAPEC-694: System Location Discovery",
},
"CAPEC-695": {
Name: "CAPEC-695: Repo Jacking",
},
"CAPEC-696": {
Name: "CAPEC-696: Load Value Injection",
},
"CAPEC-697": {
Name: "CAPEC-697: DHCP Spoofing",
},
"CAPEC-698": {
Name: "CAPEC-698: Install Malicious Extension",
},
"CAPEC-7": {
Name: "CAPEC-7: Blind SQL Injection",
},
@@ -1596,7 +1623,7 @@ var TechniqueDict = map[string]Technique{
Name: "CAPEC-80: Using UTF-8 Encoding to Bypass Validation Logic",
},
"CAPEC-81": {
Name: "CAPEC-81: Web Logs Tampering",
Name: "CAPEC-81: Web Server Logs Tampering",
},
"CAPEC-83": {
Name: "CAPEC-83: XPath Injection",
@@ -1814,6 +1841,18 @@ var TechniqueDict = map[string]Technique{
Name: "TA0005: Defense Evasion => T1027.006: HTML Smuggling",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1027.007": {
Name: "TA0005: Defense Evasion => T1027.007: Dynamic API Resolution",
Platforms: []string{"Windows"},
},
"T1027.008": {
Name: "TA0005: Defense Evasion => T1027.008: Stripped Payloads",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1027.009": {
Name: "TA0005: Defense Evasion => T1027.009: Embedded Payloads",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1029": {
Name: "TA0010: Exfiltration => T1029: Scheduled Transfer",
Platforms: []string{"Linux", "Windows", "macOS"},
@@ -2087,8 +2126,8 @@ var TechniqueDict = map[string]Technique{
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Office 365", "SaaS"},
},
"T1070": {
Name: "TA0005: Defense Evasion => T1070: Indicator Removal on Host",
Platforms: []string{"Containers", "Linux", "Network", "Windows", "macOS"},
Name: "TA0005: Defense Evasion => T1070: Indicator Removal",
Platforms: []string{"Containers", "Google Workspace", "Linux", "Network", "Office 365", "Windows", "macOS"},
},
"T1070.001": {
Name: "TA0005: Defense Evasion => T1070.001: Clear Windows Event Logs",
@@ -2114,6 +2153,18 @@ var TechniqueDict = map[string]Technique{
Name: "TA0005: Defense Evasion => T1070.006: Timestomp",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1070.007": {
Name: "TA0005: Defense Evasion => T1070.007: Clear Network Connection History and Configurations",
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
},
"T1070.008": {
Name: "TA0005: Defense Evasion => T1070.008: Clear Mailbox Data",
Platforms: []string{"Google Workspace", "Linux", "Office 365", "Windows", "macOS"},
},
"T1070.009": {
Name: "TA0005: Defense Evasion => T1070.009: Clear Persistence",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1071": {
Name: "TA0011: Command and Control => T1071: Application Layer Protocol",
Platforms: []string{"Linux", "Windows", "macOS"},
@@ -2152,7 +2203,7 @@ var TechniqueDict = map[string]Technique{
},
"T1078": {
Name: "TA0001: Initial Access, TA0003: Persistence, TA0004: Privilege Escalation, TA0005: Defense Evasion => T1078: Valid Accounts",
Platforms: []string{"Azure AD", "Containers", "Google Workspace", "IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
Platforms: []string{"Azure AD", "Containers", "Google Workspace", "IaaS", "Linux", "Network", "Office 365", "SaaS", "Windows", "macOS"},
},
"T1078.001": {
Name: "TA0001: Initial Access, TA0003: Persistence, TA0004: Privilege Escalation, TA0005: Defense Evasion => T1078.001: Default Accounts",
@@ -2504,7 +2555,7 @@ var TechniqueDict = map[string]Technique{
},
"T1199": {
Name: "TA0001: Initial Access => T1199: Trusted Relationship",
Platforms: []string{"IaaS", "Linux", "SaaS", "Windows", "macOS"},
Platforms: []string{"IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
},
"T1200": {
Name: "TA0001: Initial Access => T1200: Hardware Additions",
@@ -2546,6 +2597,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0011: Command and Control => T1205.001: Port Knocking",
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
},
"T1205.002": {
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0011: Command and Control => T1205.002: Socket Filters",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1207": {
Name: "TA0005: Defense Evasion => T1207: Rogue Domain Controller",
Platforms: []string{"Windows"},
@@ -2780,7 +2835,7 @@ var TechniqueDict = map[string]Technique{
},
"T1505": {
Name: "TA0003: Persistence => T1505: Server Software Component",
Platforms: []string{"Linux", "Windows", "macOS"},
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
},
"T1505.001": {
Name: "TA0003: Persistence => T1505.001: SQL Stored Procedures",
@@ -2792,7 +2847,7 @@ var TechniqueDict = map[string]Technique{
},
"T1505.003": {
Name: "TA0003: Persistence => T1505.003: Web Shell",
Platforms: []string{"Linux", "Windows", "macOS"},
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
},
"T1505.004": {
Name: "TA0003: Persistence => T1505.004: IIS Components",
@@ -2827,8 +2882,8 @@ var TechniqueDict = map[string]Technique{
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
},
"T1530": {
Name: "TA0009: Collection => T1530: Data from Cloud Storage Object",
Platforms: []string{"IaaS"},
Name: "TA0009: Collection => T1530: Data from Cloud Storage",
Platforms: []string{"IaaS", "SaaS"},
},
"T1531": {
Name: "TA0040: Impact => T1531: Account Access Removal",
@@ -2900,7 +2955,7 @@ var TechniqueDict = map[string]Technique{
},
"T1546": {
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546: Event Triggered Execution",
Platforms: []string{"Linux", "Windows", "macOS"},
Platforms: []string{"IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
},
"T1546.001": {
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546.001: Change Default File Association",
@@ -2962,6 +3017,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546.015: Component Object Model Hijacking",
Platforms: []string{"Windows"},
},
"T1546.016": {
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546.016: Installer Packages",
Platforms: []string{"Linux", "Windows", "macOS"},
},
"T1547": {
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1547: Boot or Logon Autostart Execution",
Platforms: []string{"Linux", "Windows", "macOS"},
@@ -3048,7 +3107,7 @@ var TechniqueDict = map[string]Technique{
},
"T1550.001": {
Name: "TA0005: Defense Evasion, TA0008: Lateral Movement => T1550.001: Application Access Token",
Platforms: []string{"Containers", "Google Workspace", "Office 365", "SaaS"},
Platforms: []string{"Azure AD", "Containers", "Google Workspace", "IaaS", "Office 365", "SaaS"},
},
"T1550.002": {
Name: "TA0005: Defense Evasion, TA0008: Lateral Movement => T1550.002: Pass the Hash",
@@ -3152,7 +3211,7 @@ var TechniqueDict = map[string]Technique{
},
"T1556": {
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556: Modify Authentication Process",
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Linux", "Network", "Office 365", "SaaS", "Windows", "macOS"},
},
"T1556.001": {
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.001: Domain Controller Authentication",
@@ -3174,9 +3233,17 @@ var TechniqueDict = map[string]Technique{
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.005: Reversible Encryption",
Platforms: []string{"Windows"},
},
"T1556.006": {
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.006: Multi-Factor Authentication",
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
},
"T1556.007": {
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.007: Hybrid Identity",
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Office 365", "SaaS", "Windows"},
},
"T1557": {
Name: "TA0006: Credential Access, TA0009: Collection => T1557: Adversary-in-the-Middle",
Platforms: []string{"Linux", "Windows", "macOS"},
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
},
"T1557.001": {
Name: "TA0006: Credential Access, TA0009: Collection => T1557.001: LLMNR/NBT-NS Poisoning and SMB Relay",
@@ -3550,6 +3617,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0042: Resource Development => T1583.006: Web Services",
Platforms: []string{"PRE"},
},
"T1583.007": {
Name: "TA0042: Resource Development => T1583.007: Serverless",
Platforms: []string{"PRE"},
},
"T1584": {
Name: "TA0042: Resource Development => T1584: Compromise Infrastructure",
Platforms: []string{"PRE"},
@@ -3578,6 +3649,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0042: Resource Development => T1584.006: Web Services",
Platforms: []string{"PRE"},
},
"T1584.007": {
Name: "TA0042: Resource Development => T1584.007: Serverless",
Platforms: []string{"PRE"},
},
"T1585": {
Name: "TA0042: Resource Development => T1585: Establish Accounts",
Platforms: []string{"PRE"},
@@ -3590,6 +3665,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0042: Resource Development => T1585.002: Email Accounts",
Platforms: []string{"PRE"},
},
"T1585.003": {
Name: "TA0042: Resource Development => T1585.003: Cloud Accounts",
Platforms: []string{"PRE"},
},
"T1586": {
Name: "TA0042: Resource Development => T1586: Compromise Accounts",
Platforms: []string{"PRE"},
@@ -3602,6 +3681,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0042: Resource Development => T1586.002: Email Accounts",
Platforms: []string{"PRE"},
},
"T1586.003": {
Name: "TA0042: Resource Development => T1586.003: Cloud Accounts",
Platforms: []string{"PRE"},
},
"T1587": {
Name: "TA0042: Resource Development => T1587: Develop Capabilities",
Platforms: []string{"PRE"},
@@ -3746,6 +3829,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0043: Reconnaissance => T1593.002: Search Engines",
Platforms: []string{"PRE"},
},
"T1593.003": {
Name: "TA0043: Reconnaissance => T1593.003: Code Repositories",
Platforms: []string{"PRE"},
},
"T1594": {
Name: "TA0043: Reconnaissance => T1594: Search Victim-Owned Websites",
Platforms: []string{"PRE"},
@@ -3898,6 +3985,10 @@ var TechniqueDict = map[string]Technique{
Name: "TA0042: Resource Development => T1608.005: Link Target",
Platforms: []string{"PRE"},
},
"T1608.006": {
Name: "TA0042: Resource Development => T1608.006: SEO Poisoning",
Platforms: []string{"PRE"},
},
"T1609": {
Name: "TA0002: Execution => T1609: Container Administration Command",
Platforms: []string{"Containers"},
@@ -3950,4 +4041,12 @@ var TechniqueDict = map[string]Technique{
Name: "TA0005: Defense Evasion => T1647: Plist File Modification",
Platforms: []string{"macOS"},
},
"T1648": {
Name: "TA0002: Execution => T1648: Serverless Execution",
Platforms: []string{"IaaS", "Office 365", "SaaS"},
},
"T1649": {
Name: "TA0006: Credential Access => T1649: Steal or Forge Authentication Certificates",
Platforms: []string{"Azure AD", "Linux", "Windows", "macOS"},
},
}

3112
cwe/en.go

File diff suppressed because it is too large Load Diff

2890
cwe/ja.go

File diff suppressed because it is too large Load Diff

View File

@@ -291,6 +291,8 @@ func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHub
if len(githubConfs) == 0 {
return nil
}
r.GitHubManifests = models.DependencyGraphManifests{}
for ownerRepo, setting := range githubConfs {
ss := strings.Split(ownerRepo, "/")
if len(ss) != 2 {
@@ -303,6 +305,10 @@ func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHub
}
logging.Log.Infof("%s: %d CVEs detected with GHSA %s/%s",
r.FormatServerName(), n, owner, repo)
if err = DetectGitHubDependencyGraph(r, owner, repo, setting.Token); err != nil {
return xerrors.Errorf("Failed to access GitHub Dependency graph: %w", err)
}
}
return nil
}
@@ -426,7 +432,7 @@ func detectPkgsCvesWithOval(cnf config.GovalDictConf, r *models.ScanResult, logO
}
if !ok {
switch r.Family {
case constant.Debian:
case constant.Debian, constant.Ubuntu:
logging.Log.Infof("Skip OVAL and Scan with gost alone.")
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), 0)
return nil
@@ -466,19 +472,21 @@ func detectPkgsCvesWithGost(cnf config.GostConf, r *models.ScanResult, logOpts l
nCVEs, err := client.DetectCVEs(r, true)
if err != nil {
if r.Family == constant.Debian {
switch r.Family {
case constant.Debian, constant.Ubuntu:
return xerrors.Errorf("Failed to detect CVEs with gost: %w", err)
default:
return xerrors.Errorf("Failed to detect unfixed CVEs with gost: %w", err)
}
return xerrors.Errorf("Failed to detect unfixed CVEs with gost: %w", err)
}
if r.Family == constant.Debian {
logging.Log.Infof("%s: %d CVEs are detected with gost",
r.FormatServerName(), nCVEs)
} else {
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost",
r.FormatServerName(), nCVEs)
switch r.Family {
case constant.Debian, constant.Ubuntu:
logging.Log.Infof("%s: %d CVEs are detected with gost", r.FormatServerName(), nCVEs)
default:
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost", r.FormatServerName(), nCVEs)
}
return nil
}

View File

@@ -109,14 +109,20 @@ func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf, logOpts loggi
// ConvertToModelsExploit converts exploit model to vuls model
func ConvertToModelsExploit(es []exploitmodels.Exploit) (exploits []models.Exploit) {
for _, e := range es {
var documentURL, shellURL *string
var documentURL, shellURL, paperURL, ghdbURL *string
if e.OffensiveSecurity != nil {
os := e.OffensiveSecurity
if os.Document != nil {
documentURL = &os.Document.DocumentURL
documentURL = &os.Document.FileURL
}
if os.ShellCode != nil {
shellURL = &os.ShellCode.ShellCodeURL
shellURL = &os.ShellCode.FileURL
}
if os.Paper != nil {
paperURL = &os.Paper.FileURL
}
if os.GHDB != nil {
ghdbURL = &os.GHDB.Link
}
}
exploit := models.Exploit{
@@ -126,6 +132,8 @@ func ConvertToModelsExploit(es []exploitmodels.Exploit) (exploits []models.Explo
Description: e.Description,
DocumentURL: documentURL,
ShellCodeURL: shellURL,
PaperURL: paperURL,
GHDBURL: ghdbURL,
}
exploits = append(exploits, exploit)
}

View File

@@ -29,7 +29,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string,
// TODO Use `https://github.com/shurcooL/githubv4` if the tool supports vulnerabilityAlerts Endpoint
// Memo : https://developer.github.com/v4/explorer/
const jsonfmt = `{"query":
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, states:[OPEN], %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, states:[OPEN], %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } vulnerableManifestFilename vulnerableManifestPath vulnerableRequirements securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
after := ""
for {
@@ -79,11 +79,15 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string,
continue
}
pkgName := fmt.Sprintf("%s %s",
alerts.Data.Repository.URL, v.Node.SecurityVulnerability.Package.Name)
m := models.GitHubSecurityAlert{
PackageName: pkgName,
Repository: alerts.Data.Repository.URL,
Package: models.GSAVulnerablePackage{
Name: v.Node.SecurityVulnerability.Package.Name,
Ecosystem: v.Node.SecurityVulnerability.Package.Ecosystem,
ManifestFilename: v.Node.VulnerableManifestFilename,
ManifestPath: v.Node.VulnerableManifestPath,
Requirements: v.Node.VulnerableRequirements,
},
FixedIn: v.Node.SecurityVulnerability.FirstPatchedVersion.Identifier,
AffectedRange: v.Node.SecurityVulnerability.VulnerableVersionRange,
Dismissed: len(v.Node.DismissReason) != 0,
@@ -148,7 +152,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string,
return nCVEs, err
}
//SecurityAlerts has detected CVE-IDs, PackageNames, Refs
// SecurityAlerts has detected CVE-IDs, PackageNames, Refs
type SecurityAlerts struct {
Data struct {
Repository struct {
@@ -175,7 +179,10 @@ type SecurityAlerts struct {
Identifier string `json:"identifier"`
} `json:"firstPatchedVersion"`
} `json:"securityVulnerability"`
SecurityAdvisory struct {
VulnerableManifestFilename string `json:"vulnerableManifestFilename"`
VulnerableManifestPath string `json:"vulnerableManifestPath"`
VulnerableRequirements string `json:"vulnerableRequirements"`
SecurityAdvisory struct {
Description string `json:"description"`
GhsaID string `json:"ghsaId"`
Permalink string `json:"permalink"`
@@ -199,3 +206,138 @@ type SecurityAlerts struct {
} `json:"repository"`
} `json:"data"`
}
// DetectGitHubDependencyGraph access to owner/repo on GitHub and fetch dependency graph of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
// https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-the-dependency-graph
func DetectGitHubDependencyGraph(r *models.ScanResult, owner, repo, token string) (err error) {
src := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: token},
)
//TODO Proxy
httpClient := oauth2.NewClient(context.Background(), src)
return fetchDependencyGraph(r, httpClient, owner, repo, "", "")
}
// recursive function
func fetchDependencyGraph(r *models.ScanResult, httpClient *http.Client, owner, repo, after, dependenciesAfter string) (err error) {
const queryFmt = `{"query":
"query { repository(owner:\"%s\", name:\"%s\") { url dependencyGraphManifests(first: %d, withDependencies: true%s) { pageInfo { endCursor hasNextPage } edges { node { blobPath filename repository { url } parseable exceedsMaxSize dependenciesCount dependencies%s { pageInfo { endCursor hasNextPage } edges { node { packageName packageManager repository { url } requirements hasDependencies } } } } } } } }"}`
queryStr := fmt.Sprintf(queryFmt, owner, repo, 100, after, dependenciesAfter)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
"https://api.github.com/graphql",
bytes.NewBuffer([]byte(queryStr)),
)
defer cancel()
if err != nil {
return err
}
// https://docs.github.com/en/graphql/overview/schema-previews#access-to-a-repository-s-dependency-graph-preview
// TODO remove this header if it is no longer preview status in the future.
req.Header.Set("Accept", "application/vnd.github.hawkgirl-preview+json")
req.Header.Set("Content-Type", "application/json")
resp, err := httpClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
graph := DependencyGraph{}
if err := json.Unmarshal(body, &graph); err != nil {
return err
}
if graph.Data.Repository.URL == "" {
return errof.New(errof.ErrFailedToAccessGithubAPI,
fmt.Sprintf("Failed to access to GitHub API. Response: %s", string(body)))
}
dependenciesAfter = ""
for _, m := range graph.Data.Repository.DependencyGraphManifests.Edges {
manifest, ok := r.GitHubManifests[m.Node.BlobPath]
if !ok {
manifest = models.DependencyGraphManifest{
BlobPath: m.Node.BlobPath,
Filename: m.Node.Filename,
Repository: m.Node.Repository.URL,
Dependencies: []models.Dependency{},
}
}
for _, d := range m.Node.Dependencies.Edges {
manifest.Dependencies = append(manifest.Dependencies, models.Dependency{
PackageName: d.Node.PackageName,
PackageManager: d.Node.PackageManager,
Repository: d.Node.Repository.URL,
Requirements: d.Node.Requirements,
})
}
r.GitHubManifests[m.Node.BlobPath] = manifest
if m.Node.Dependencies.PageInfo.HasNextPage {
dependenciesAfter = fmt.Sprintf(`(after: \"%s\")`, m.Node.Dependencies.PageInfo.EndCursor)
}
}
if dependenciesAfter != "" {
return fetchDependencyGraph(r, httpClient, owner, repo, after, dependenciesAfter)
}
if graph.Data.Repository.DependencyGraphManifests.PageInfo.HasNextPage {
after = fmt.Sprintf(`, after: \"%s\"`, graph.Data.Repository.DependencyGraphManifests.PageInfo.EndCursor)
return fetchDependencyGraph(r, httpClient, owner, repo, after, dependenciesAfter)
}
return nil
}
// DependencyGraph is a GitHub API response
type DependencyGraph struct {
Data struct {
Repository struct {
URL string `json:"url"`
DependencyGraphManifests struct {
PageInfo struct {
EndCursor string `json:"endCursor"`
HasNextPage bool `json:"hasNextPage"`
} `json:"pageInfo"`
Edges []struct {
Node struct {
BlobPath string `json:"blobPath"`
Filename string `json:"filename"`
Repository struct {
URL string `json:"url"`
}
Parseable bool `json:"parseable"`
ExceedsMaxSize bool `json:"exceedsMaxSize"`
DependenciesCount int `json:"dependenciesCount"`
Dependencies struct {
PageInfo struct {
EndCursor string `json:"endCursor"`
HasNextPage bool `json:"hasNextPage"`
} `json:"pageInfo"`
Edges []struct {
Node struct {
PackageName string `json:"packageName"`
PackageManager string `json:"packageManager"`
Repository struct {
URL string `json:"url"`
}
Requirements string `json:"requirements"`
HasDependencies bool `json:"hasDependencies"`
} `json:"node"`
} `json:"edges"`
} `json:"dependencies"`
} `json:"node"`
} `json:"edges"`
} `json:"dependencyGraphManifests"`
} `json:"repository"`
} `json:"data"`
}

View File

@@ -183,11 +183,7 @@ func getMinusDiffCves(previous, current models.ScanResult) models.VulnInfos {
}
func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
cTypes := []models.CveContentType{
models.Nvd,
models.Jvn,
models.NewCveContentType(current.Family),
}
cTypes := append([]models.CveContentType{models.Nvd, models.Jvn}, models.GetCveContentTypes(current.Family)...)
prevLastModified := map[models.CveContentType][]time.Time{}
preVinfo, ok := previous.ScannedCves[cveID]

View File

@@ -21,7 +21,7 @@ import (
"golang.org/x/xerrors"
)
//WpCveInfos is for wpscan json
// WpCveInfos is for wpscan json
type WpCveInfos struct {
ReleaseDate string `json:"release_date"`
ChangelogURL string `json:"changelog_url"`
@@ -33,7 +33,7 @@ type WpCveInfos struct {
Error string `json:"error"`
}
//WpCveInfo is for wpscan json
// WpCveInfo is for wpscan json
type WpCveInfo struct {
ID string `json:"id"`
Title string `json:"title"`
@@ -44,7 +44,7 @@ type WpCveInfo struct {
FixedIn string `json:"fixed_in"`
}
//References is for wpscan json
// References is for wpscan json
type References struct {
URL []string `json:"url"`
Cve []string `json:"cve"`

178
go.mod
View File

@@ -4,19 +4,21 @@ go 1.18
require (
github.com/Azure/azure-sdk-for-go v66.0.0+incompatible
github.com/BurntSushi/toml v1.2.0
github.com/BurntSushi/toml v1.2.1
github.com/CycloneDX/cyclonedx-go v0.7.0
github.com/Ullaakut/nmap/v2 v2.1.2-0.20210406060955-59a52fe80a4f
github.com/aquasecurity/go-dep-parser v0.0.0-20220819065825-29e1e04fb7ae
github.com/aquasecurity/trivy v0.31.3
github.com/aquasecurity/go-dep-parser v0.0.0-20221114145626-35ef808901e8
github.com/aquasecurity/trivy v0.35.0
github.com/aquasecurity/trivy-db v0.0.0-20220627104749-930461748b63
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
github.com/aws/aws-sdk-go v1.44.77
github.com/aws/aws-sdk-go v1.44.136
github.com/c-robinson/iplib v1.0.3
github.com/cenkalti/backoff v2.2.1+incompatible
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
github.com/emersion/go-smtp v0.14.0
github.com/google/subcommands v1.2.0
github.com/google/uuid v1.3.0
github.com/gosuri/uitable v0.0.4
github.com/hashicorp/go-uuid v1.0.3
github.com/hashicorp/go-version v1.6.0
@@ -31,29 +33,31 @@ require (
github.com/mitchellh/go-homedir v1.1.0
github.com/nlopes/slack v0.6.0
github.com/olekukonko/tablewriter v0.0.5
github.com/package-url/packageurl-go v0.1.1-0.20220203205134-d70459300c8a
github.com/parnurzeal/gorequest v0.2.16
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
github.com/sirupsen/logrus v1.9.0
github.com/spf13/cobra v1.5.0
github.com/spf13/cobra v1.6.1
github.com/vulsio/go-cti v0.0.2-0.20220613013115-8c7e57a6aa86
github.com/vulsio/go-cve-dictionary v0.8.2-0.20211028094424-0a854f8e8f85
github.com/vulsio/go-exploitdb v0.4.2
github.com/vulsio/go-cve-dictionary v0.8.2
github.com/vulsio/go-exploitdb v0.4.4
github.com/vulsio/go-kev v0.1.1-0.20220118062020-5f69b364106f
github.com/vulsio/go-msfdb v0.2.1-0.20211028071756-4a9759bd9f14
github.com/vulsio/gost v0.4.2-0.20220630181607-2ed593791ec3
github.com/vulsio/gost v0.4.2-0.20230203045609-dcfab39a9ff4
github.com/vulsio/goval-dictionary v0.8.0
go.etcd.io/bbolt v1.3.6
golang.org/x/exp v0.0.0-20220613132600-b0d781184e0d
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
golang.org/x/oauth2 v0.1.0
golang.org/x/sync v0.1.0
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2
)
require (
cloud.google.com/go v0.100.2 // indirect
cloud.google.com/go/compute v1.6.1 // indirect
cloud.google.com/go/iam v0.3.0 // indirect
cloud.google.com/go/storage v1.14.0 // indirect
cloud.google.com/go v0.105.0 // indirect
cloud.google.com/go/compute v1.14.0 // indirect
cloud.google.com/go/compute/metadata v0.2.3 // indirect
cloud.google.com/go/iam v0.8.0 // indirect
cloud.google.com/go/storage v1.27.0 // indirect
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
github.com/Azure/go-autorest/autorest v0.11.28 // indirect
github.com/Azure/go-autorest/autorest/adal v0.9.21 // indirect
@@ -61,46 +65,41 @@ require (
github.com/Azure/go-autorest/autorest/to v0.3.0 // indirect
github.com/Azure/go-autorest/logger v0.2.1 // indirect
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
github.com/Microsoft/go-winio v0.5.2 // indirect
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect
github.com/PuerkitoBio/goquery v1.6.1 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acomagu/bufpipe v1.0.3 // indirect
github.com/andybalholm/cascadia v1.2.0 // indirect
github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce // indirect
github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 // indirect
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 // indirect
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
github.com/briandowns/spinner v1.18.1 // indirect
github.com/caarlos0/env/v6 v6.9.3 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/briandowns/spinner v1.21.0 // indirect
github.com/caarlos0/env/v6 v6.10.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cheggaaa/pb/v3 v3.1.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgryski/go-minhash v0.0.0-20170608043002-7fe510aff544 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/docker/cli v20.10.17+incompatible // indirect
github.com/dnaeon/go-vcr v1.2.0 // indirect
github.com/docker/cli v20.10.20+incompatible // indirect
github.com/docker/distribution v2.8.1+incompatible // indirect
github.com/docker/docker v20.10.17+incompatible // indirect
github.com/docker/docker-credential-helpers v0.6.4 // indirect
github.com/ekzhu/minhash-lsh v0.0.0-20171225071031-5c06ee8586a1 // indirect
github.com/emirpasic/gods v1.12.0 // indirect
github.com/fatih/color v1.13.0 // indirect
github.com/fsnotify/fsnotify v1.5.4 // indirect
github.com/go-enry/go-license-detector/v4 v4.3.0 // indirect
github.com/go-git/gcfg v1.5.0 // indirect
github.com/go-git/go-billy/v5 v5.3.1 // indirect
github.com/go-git/go-git/v5 v5.4.2 // indirect
github.com/docker/docker v20.10.20+incompatible // indirect
github.com/docker/docker-credential-helpers v0.7.0 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/fatih/color v1.14.1 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/go-redis/redis/v8 v8.11.5 // indirect
github.com/go-sql-driver/mysql v1.6.0 // indirect
github.com/go-sql-driver/mysql v1.7.0 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/gofrs/uuid v4.0.0+incompatible // indirect
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-containerregistry v0.8.0 // indirect
github.com/google/go-cmp v0.5.9 // indirect
github.com/google/go-containerregistry v0.12.0 // indirect
github.com/google/licenseclassifier/v2 v2.0.0-pre6 // indirect
github.com/googleapis/gax-go/v2 v2.4.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.2.1 // indirect
github.com/googleapis/gax-go/v2 v2.7.0 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/gorilla/websocket v1.4.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
@@ -109,85 +108,74 @@ require (
github.com/hashicorp/go-retryablehttp v0.7.1 // indirect
github.com/hashicorp/go-safetemp v1.0.0 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hhatto/gorst v0.0.0-20181029133204-ca9f730cac5b // indirect
github.com/imdario/mergo v0.3.13 // indirect
github.com/inconshreveable/log15 v0.0.0-20201112154412-8562bdadbbac // indirect
github.com/inconshreveable/mousetrap v1.0.0 // indirect
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
github.com/jackc/pgconn v1.12.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect
github.com/inconshreveable/log15 v2.16.0+incompatible // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.3.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
github.com/jackc/pgtype v1.11.0 // indirect
github.com/jackc/pgx/v4 v4.16.1 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jdkato/prose v1.1.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.3.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/kevinburke/ssh_config v1.1.0 // indirect
github.com/klauspost/compress v1.15.6 // indirect
github.com/magiconair/properties v1.8.6 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.15.11 // indirect
github.com/liamg/jfather v0.0.7 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 // indirect
github.com/mattn/go-colorable v0.1.12 // indirect
github.com/mattn/go-isatty v0.0.14 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/mattn/go-sqlite3 v1.14.14 // indirect
github.com/masahiro331/go-xfs-filesystem v0.0.0-20221127135739-051c25f1becd // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.17 // indirect
github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/mattn/go-sqlite3 v1.14.16 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mitchellh/go-testing-interface v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/montanaflynn/stats v0.0.0-20151014174947-eeaced052adb // indirect
github.com/nsf/termbox-go v1.1.1 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.0.3-0.20220303224323-02efb9a75ee1 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pelletier/go-toml/v2 v2.0.2 // indirect
github.com/opencontainers/image-spec v1.1.0-rc2 // indirect
github.com/pelletier/go-toml/v2 v2.0.6 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.3.1 // indirect
github.com/rivo/uniseg v0.4.4 // indirect
github.com/rogpeppe/go-internal v1.8.1 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/sergi/go-diff v1.2.0 // indirect
github.com/shogo82148/go-shuffle v0.0.0-20170808115208-59829097ff3b // indirect
github.com/spf13/afero v1.9.2 // indirect
github.com/samber/lo v1.33.0 // indirect
github.com/sergi/go-diff v1.3.1 // indirect
github.com/smartystreets/assertions v1.13.0 // indirect
github.com/spdx/tools-golang v0.3.0 // indirect
github.com/spf13/afero v1.9.3 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/viper v1.12.0 // indirect
github.com/stretchr/objx v0.4.0 // indirect
github.com/stretchr/testify v1.8.0 // indirect
github.com/subosito/gotenv v1.4.0 // indirect
github.com/spf13/viper v1.15.0 // indirect
github.com/stretchr/objx v0.5.0 // indirect
github.com/stretchr/testify v1.8.1 // indirect
github.com/subosito/gotenv v1.4.2 // indirect
github.com/ulikunitz/xz v0.5.10 // indirect
github.com/xanzy/ssh-agent v0.3.0 // indirect
go.opencensus.io v0.23.0 // indirect
go.uber.org/atomic v1.7.0 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
go.opencensus.io v0.24.0 // indirect
go.uber.org/atomic v1.10.0 // indirect
go.uber.org/goleak v1.1.12 // indirect
go.uber.org/multierr v1.7.0 // indirect
go.uber.org/zap v1.22.0 // indirect
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
golang.org/x/net v0.0.0-20220802222814-0bcc04d9c69b // indirect
golang.org/x/sys v0.0.0-20220731174439-a90be440212d // indirect
golang.org/x/term v0.0.0-20220526004731-065cf7ba2467 // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect
gonum.org/v1/gonum v0.7.0 // indirect
google.golang.org/api v0.81.0 // indirect
go.uber.org/multierr v1.8.0 // indirect
go.uber.org/zap v1.23.0 // indirect
golang.org/x/crypto v0.6.0 // indirect
golang.org/x/mod v0.8.0 // indirect
golang.org/x/net v0.7.0 // indirect
golang.org/x/sys v0.5.0 // indirect
golang.org/x/term v0.5.0 // indirect
golang.org/x/text v0.7.0 // indirect
golang.org/x/tools v0.6.0 // indirect
google.golang.org/api v0.107.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f // indirect
google.golang.org/grpc v1.48.0 // indirect
google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef // indirect
google.golang.org/grpc v1.52.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
gopkg.in/ini.v1 v1.66.6 // indirect
gopkg.in/neurosnap/sentences.v1 v1.0.6 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/driver/mysql v1.3.5 // indirect
gorm.io/driver/postgres v1.3.8 // indirect
gorm.io/driver/sqlite v1.3.6 // indirect
gorm.io/gorm v1.23.8 // indirect
k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 // indirect
gorm.io/driver/mysql v1.4.7 // indirect
gorm.io/driver/postgres v1.4.8 // indirect
gorm.io/driver/sqlite v1.4.4 // indirect
gorm.io/gorm v1.24.5 // indirect
k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed // indirect
moul.io/http2curl v1.0.0 // indirect
)

1633
go.sum

File diff suppressed because it is too large Load Diff

View File

@@ -32,7 +32,7 @@ func (red RedHat) DetectCVEs(r *models.ScanResult, ignoreWillNotFix bool) (nCVEs
if err != nil {
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
}
responses, err := getAllUnfixedCvesViaHTTP(r, prefix)
responses, err := getCvesWithFixStateViaHTTP(r, prefix, "unfixed-cves")
if err != nil {
return 0, xerrors.Errorf("Failed to get Unfixed CVEs via HTTP. err: %w", err)
}

View File

@@ -5,6 +5,8 @@ package gost
import (
"encoding/json"
"fmt"
"regexp"
"strings"
"golang.org/x/xerrors"
@@ -22,19 +24,52 @@ type Ubuntu struct {
func (ubu Ubuntu) supported(version string) bool {
_, ok := map[string]string{
"606": "dapper",
"610": "edgy",
"704": "feisty",
"710": "gutsy",
"804": "hardy",
"810": "intrepid",
"904": "jaunty",
"910": "karmic",
"1004": "lucid",
"1010": "maverick",
"1104": "natty",
"1110": "oneiric",
"1204": "precise",
"1210": "quantal",
"1304": "raring",
"1310": "saucy",
"1404": "trusty",
"1410": "utopic",
"1504": "vivid",
"1510": "wily",
"1604": "xenial",
"1610": "yakkety",
"1704": "zesty",
"1710": "artful",
"1804": "bionic",
"1810": "cosmic",
"1904": "disco",
"1910": "eoan",
"2004": "focal",
"2010": "groovy",
"2104": "hirsute",
"2110": "impish",
"2204": "jammy",
"2210": "kinetic",
// "2304": "lunar",
}[version]
return ok
}
type cveContent struct {
cveContent models.CveContent
fixStatuses models.PackageFixStatuses
}
var kernelSourceNamePattern = regexp.MustCompile(`^linux((-(ti-omap4|armadaxp|mako|manta|flo|goldfish|joule|raspi2?|snapdragon|aws|azure|bluefield|dell300x|gcp|gke(op)?|ibm|intel|lowlatency|kvm|oem|oracle|euclid|lts-xenial|hwe|riscv))?(-(edge|fde|iotg|hwe|osp1))?(-[\d\.]+)?)?$`)
// DetectCVEs fills cve information that has in Gost
func (ubu Ubuntu) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
ubuReleaseVer := strings.Replace(r.Release, ".", "", 1)
@@ -43,129 +78,222 @@ func (ubu Ubuntu) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error
return 0, nil
}
linuxImage := "linux-image-" + r.RunningKernel.Release
// Add linux and set the version of running kernel to search Gost.
if r.Container.ContainerID == "" {
newVer := ""
if p, ok := r.Packages[linuxImage]; ok {
newVer = p.NewVersion
}
r.Packages["linux"] = models.Package{
Name: "linux",
Version: r.RunningKernel.Version,
NewVersion: newVer,
}
}
packCvesList := []packCves{}
detects := map[string]cveContent{}
if ubu.driver == nil {
url, err := util.URLPathJoin(ubu.baseURL, "ubuntu", ubuReleaseVer, "pkgs")
urlPrefix, err := util.URLPathJoin(ubu.baseURL, "ubuntu", ubuReleaseVer, "pkgs")
if err != nil {
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
}
responses, err := getAllUnfixedCvesViaHTTP(r, url)
responses, err := getCvesWithFixStateViaHTTP(r, urlPrefix, "fixed-cves")
if err != nil {
return 0, xerrors.Errorf("Failed to get Unfixed CVEs via HTTP. err: %w", err)
return 0, xerrors.Errorf("Failed to get fixed CVEs via HTTP. err: %w", err)
}
for _, res := range responses {
ubuCves := map[string]gostmodels.UbuntuCVE{}
if err := json.Unmarshal([]byte(res.json), &ubuCves); err != nil {
if !res.request.isSrcPack {
continue
}
n := strings.NewReplacer("linux-signed", "linux", "linux-meta", "linux").Replace(res.request.packName)
if kernelSourceNamePattern.MatchString(n) {
isDetect := false
for _, bn := range r.SrcPackages[res.request.packName].BinaryNames {
if bn == fmt.Sprintf("linux-image-%s", r.RunningKernel.Release) {
isDetect = true
break
}
}
if !isDetect {
continue
}
}
fixeds := map[string]gostmodels.UbuntuCVE{}
if err := json.Unmarshal([]byte(res.json), &fixeds); err != nil {
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
}
cves := []models.CveContent{}
for _, ubucve := range ubuCves {
cves = append(cves, *ubu.ConvertToModel(&ubucve))
for _, content := range detect(fixeds, true, models.SrcPackage{Name: res.request.packName, Version: r.SrcPackages[res.request.packName].Version, BinaryNames: r.SrcPackages[res.request.packName].BinaryNames}, fmt.Sprintf("linux-image-%s", r.RunningKernel.Release)) {
c, ok := detects[content.cveContent.CveID]
if ok {
content.fixStatuses = append(content.fixStatuses, c.fixStatuses...)
}
detects[content.cveContent.CveID] = content
}
packCvesList = append(packCvesList, packCves{
packName: res.request.packName,
isSrcPack: res.request.isSrcPack,
cves: cves,
})
}
} else {
for _, pack := range r.Packages {
ubuCves, err := ubu.driver.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
if err != nil {
return 0, xerrors.Errorf("Failed to get Unfixed CVEs For Package. err: %w", err)
}
cves := []models.CveContent{}
for _, ubucve := range ubuCves {
cves = append(cves, *ubu.ConvertToModel(&ubucve))
}
packCvesList = append(packCvesList, packCves{
packName: pack.Name,
isSrcPack: false,
cves: cves,
})
}
// SrcPack
responses, err = getCvesWithFixStateViaHTTP(r, urlPrefix, "unfixed-cves")
if err != nil {
return 0, xerrors.Errorf("Failed to get unfixed CVEs via HTTP. err: %w", err)
}
for _, res := range responses {
if !res.request.isSrcPack {
continue
}
n := strings.NewReplacer("linux-signed", "linux", "linux-meta", "linux").Replace(res.request.packName)
if kernelSourceNamePattern.MatchString(n) {
isDetect := false
for _, bn := range r.SrcPackages[res.request.packName].BinaryNames {
if bn == fmt.Sprintf("linux-image-%s", r.RunningKernel.Release) {
isDetect = true
break
}
}
if !isDetect {
continue
}
}
unfixeds := map[string]gostmodels.UbuntuCVE{}
if err := json.Unmarshal([]byte(res.json), &unfixeds); err != nil {
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
}
for _, content := range detect(unfixeds, false, models.SrcPackage{Name: res.request.packName, Version: r.SrcPackages[res.request.packName].Version, BinaryNames: r.SrcPackages[res.request.packName].BinaryNames}, fmt.Sprintf("linux-image-%s", r.RunningKernel.Release)) {
c, ok := detects[content.cveContent.CveID]
if ok {
content.fixStatuses = append(content.fixStatuses, c.fixStatuses...)
}
detects[content.cveContent.CveID] = content
}
}
} else {
for _, pack := range r.SrcPackages {
ubuCves, err := ubu.driver.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
n := strings.NewReplacer("linux-signed", "linux", "linux-meta", "linux").Replace(pack.Name)
if kernelSourceNamePattern.MatchString(n) {
isDetect := false
for _, bn := range pack.BinaryNames {
if bn == fmt.Sprintf("linux-image-%s", r.RunningKernel.Release) {
isDetect = true
break
}
}
if !isDetect {
continue
}
}
fixeds, err := ubu.driver.GetFixedCvesUbuntu(ubuReleaseVer, n)
if err != nil {
return 0, xerrors.Errorf("Failed to get Unfixed CVEs For SrcPackage. err: %w", err)
return 0, xerrors.Errorf("Failed to get fixed CVEs for SrcPackage. err: %w", err)
}
cves := []models.CveContent{}
for _, ubucve := range ubuCves {
cves = append(cves, *ubu.ConvertToModel(&ubucve))
for _, content := range detect(fixeds, true, pack, fmt.Sprintf("linux-image-%s", r.RunningKernel.Release)) {
c, ok := detects[content.cveContent.CveID]
if ok {
content.fixStatuses = append(content.fixStatuses, c.fixStatuses...)
}
detects[content.cveContent.CveID] = content
}
unfixeds, err := ubu.driver.GetUnfixedCvesUbuntu(ubuReleaseVer, n)
if err != nil {
return 0, xerrors.Errorf("Failed to get unfixed CVEs for SrcPackage. err: %w", err)
}
for _, content := range detect(unfixeds, false, pack, fmt.Sprintf("linux-image-%s", r.RunningKernel.Release)) {
c, ok := detects[content.cveContent.CveID]
if ok {
content.fixStatuses = append(content.fixStatuses, c.fixStatuses...)
}
detects[content.cveContent.CveID] = content
}
packCvesList = append(packCvesList, packCves{
packName: pack.Name,
isSrcPack: true,
cves: cves,
})
}
}
delete(r.Packages, "linux")
for _, p := range packCvesList {
for _, cve := range p.cves {
v, ok := r.ScannedCves[cve.CveID]
if ok {
if v.CveContents == nil {
v.CveContents = models.NewCveContents(cve)
} else {
v.CveContents[models.UbuntuAPI] = []models.CveContent{cve}
}
for _, content := range detects {
v, ok := r.ScannedCves[content.cveContent.CveID]
if ok {
if v.CveContents == nil {
v.CveContents = models.NewCveContents(content.cveContent)
} else {
v = models.VulnInfo{
CveID: cve.CveID,
CveContents: models.NewCveContents(cve),
Confidences: models.Confidences{models.UbuntuAPIMatch},
}
nCVEs++
v.CveContents[models.UbuntuAPI] = []models.CveContent{content.cveContent}
v.Confidences = models.Confidences{models.UbuntuAPIMatch}
}
} else {
v = models.VulnInfo{
CveID: content.cveContent.CveID,
CveContents: models.NewCveContents(content.cveContent),
Confidences: models.Confidences{models.UbuntuAPIMatch},
}
}
names := []string{}
if p.isSrcPack {
if srcPack, ok := r.SrcPackages[p.packName]; ok {
for _, binName := range srcPack.BinaryNames {
if _, ok := r.Packages[binName]; ok {
names = append(names, binName)
for _, s := range content.fixStatuses {
v.AffectedPackages = v.AffectedPackages.Store(s)
}
r.ScannedCves[content.cveContent.CveID] = v
}
return len(detects), nil
}
func detect(cves map[string]gostmodels.UbuntuCVE, fixed bool, srcPkg models.SrcPackage, runningKernelBinaryPkgName string) []cveContent {
n := strings.NewReplacer("linux-signed", "linux", "linux-meta", "linux").Replace(srcPkg.Name)
var contents []cveContent
for _, cve := range cves {
c := cveContent{
cveContent: *(Ubuntu{}).ConvertToModel(&cve),
}
if fixed {
for _, p := range cve.Patches {
for _, rp := range p.ReleasePatches {
installedVersion := srcPkg.Version
patchedVersion := rp.Note
// https://git.launchpad.net/ubuntu-cve-tracker/tree/scripts/generate-oval#n384
if kernelSourceNamePattern.MatchString(n) && strings.HasPrefix(srcPkg.Name, "linux-meta") {
// 5.15.0.1026.30~20.04.16 -> 5.15.0.1026
ss := strings.Split(installedVersion, ".")
if len(ss) >= 4 {
installedVersion = strings.Join(ss[:4], ".")
}
// 5.15.0-1026.30~20.04.16 -> 5.15.0.1026
lhs, rhs, ok := strings.Cut(patchedVersion, "-")
if ok {
patchedVersion = fmt.Sprintf("%s.%s", lhs, strings.Split(rhs, ".")[0])
}
}
affected, err := isGostDefAffected(installedVersion, patchedVersion)
if err != nil {
logging.Log.Debugf("Failed to parse versions: %s, Ver: %s, Gost: %s", err, installedVersion, patchedVersion)
continue
}
if affected {
for _, bn := range srcPkg.BinaryNames {
if kernelSourceNamePattern.MatchString(n) && bn != runningKernelBinaryPkgName {
continue
}
c.fixStatuses = append(c.fixStatuses, models.PackageFixStatus{
Name: bn,
FixedIn: patchedVersion,
})
}
}
}
} else {
if p.packName == "linux" {
names = append(names, linuxImage)
} else {
names = append(names, p.packName)
}
}
for _, name := range names {
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
Name: name,
} else {
for _, bn := range srcPkg.BinaryNames {
if kernelSourceNamePattern.MatchString(n) && bn != runningKernelBinaryPkgName {
continue
}
c.fixStatuses = append(c.fixStatuses, models.PackageFixStatus{
Name: bn,
FixState: "open",
NotFixedYet: true,
})
}
r.ScannedCves[cve.CveID] = v
}
if len(c.fixStatuses) > 0 {
contents = append(contents, c)
}
}
return nCVEs, nil
return contents
}
// ConvertToModel converts gost model to vuls model

View File

@@ -127,11 +127,171 @@ func TestUbuntuConvertToModel(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ubu := Ubuntu{}
got := ubu.ConvertToModel(&tt.input)
if !reflect.DeepEqual(got, &tt.expected) {
if got := (Ubuntu{}).ConvertToModel(&tt.input); !reflect.DeepEqual(got, &tt.expected) {
t.Errorf("Ubuntu.ConvertToModel() = %#v, want %#v", got, &tt.expected)
}
})
}
}
func Test_detect(t *testing.T) {
type args struct {
cves map[string]gostmodels.UbuntuCVE
fixed bool
srcPkg models.SrcPackage
runningKernelBinaryPkgName string
}
tests := []struct {
name string
args args
want []cveContent
}{
{
name: "fixed",
args: args{
cves: map[string]gostmodels.UbuntuCVE{
"CVE-0000-0000": {
Candidate: "CVE-0000-0000",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "pkg",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "released", Note: "0.0.0-0"}},
},
},
},
"CVE-0000-0001": {
Candidate: "CVE-0000-0001",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "pkg",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "released", Note: "0.0.0-2"}},
},
},
},
},
fixed: true,
srcPkg: models.SrcPackage{Name: "pkg", Version: "0.0.0-1", BinaryNames: []string{"pkg"}},
runningKernelBinaryPkgName: "",
},
want: []cveContent{
{
cveContent: models.CveContent{Type: models.UbuntuAPI, CveID: "CVE-0000-0001", SourceLink: "https://ubuntu.com/security/CVE-0000-0001", References: []models.Reference{}},
fixStatuses: models.PackageFixStatuses{{
Name: "pkg",
FixedIn: "0.0.0-2",
}},
},
},
},
{
name: "unfixed",
args: args{
cves: map[string]gostmodels.UbuntuCVE{
"CVE-0000-0000": {
Candidate: "CVE-0000-0000",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "pkg",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "open"}},
},
},
},
},
fixed: false,
srcPkg: models.SrcPackage{Name: "pkg", Version: "0.0.0-1", BinaryNames: []string{"pkg"}},
runningKernelBinaryPkgName: "",
},
want: []cveContent{
{
cveContent: models.CveContent{Type: models.UbuntuAPI, CveID: "CVE-0000-0000", SourceLink: "https://ubuntu.com/security/CVE-0000-0000", References: []models.Reference{}},
fixStatuses: models.PackageFixStatuses{{
Name: "pkg",
FixState: "open",
NotFixedYet: true,
}},
},
},
},
{
name: "linux-signed",
args: args{
cves: map[string]gostmodels.UbuntuCVE{
"CVE-0000-0000": {
Candidate: "CVE-0000-0000",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "linux",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "released", Note: "0.0.0-0"}},
},
},
},
"CVE-0000-0001": {
Candidate: "CVE-0000-0001",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "linux",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "released", Note: "0.0.0-2"}},
},
},
},
},
fixed: true,
srcPkg: models.SrcPackage{Name: "linux-signed", Version: "0.0.0-1", BinaryNames: []string{"linux-image-generic", "linux-headers-generic"}},
runningKernelBinaryPkgName: "linux-image-generic",
},
want: []cveContent{
{
cveContent: models.CveContent{Type: models.UbuntuAPI, CveID: "CVE-0000-0001", SourceLink: "https://ubuntu.com/security/CVE-0000-0001", References: []models.Reference{}},
fixStatuses: models.PackageFixStatuses{{
Name: "linux-image-generic",
FixedIn: "0.0.0-2",
}},
},
},
},
{
name: "linux-meta",
args: args{
cves: map[string]gostmodels.UbuntuCVE{
"CVE-0000-0000": {
Candidate: "CVE-0000-0000",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "linux",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "released", Note: "0.0.0-0"}},
},
},
},
"CVE-0000-0001": {
Candidate: "CVE-0000-0001",
Patches: []gostmodels.UbuntuPatch{
{
PackageName: "linux",
ReleasePatches: []gostmodels.UbuntuReleasePatch{{ReleaseName: "jammy", Status: "released", Note: "0.0.0-2"}},
},
},
},
},
fixed: true,
srcPkg: models.SrcPackage{Name: "linux-meta", Version: "0.0.0.1", BinaryNames: []string{"linux-image-generic", "linux-headers-generic"}},
runningKernelBinaryPkgName: "linux-image-generic",
},
want: []cveContent{
{
cveContent: models.CveContent{Type: models.UbuntuAPI, CveID: "CVE-0000-0001", SourceLink: "https://ubuntu.com/security/CVE-0000-0001", References: []models.Reference{}},
fixStatuses: models.PackageFixStatuses{{
Name: "linux-image-generic",
FixedIn: "0.0.0.2",
}},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := detect(tt.args.cves, tt.args.fixed, tt.args.srcPkg, tt.args.runningKernelBinaryPkgName); !reflect.DeepEqual(got, tt.want) {
t.Errorf("detect() = %#v, want %#v", got, tt.want)
}
})
}
}

View File

@@ -9,11 +9,12 @@ import (
"time"
"github.com/cenkalti/backoff"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
type response struct {
@@ -84,11 +85,6 @@ type request struct {
cveID string
}
func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
responses []response, err error) {
return getCvesWithFixStateViaHTTP(r, urlPrefix, "unfixed-cves")
}
func getCvesWithFixStateViaHTTP(r *models.ScanResult, urlPrefix, fixState string) (responses []response, err error) {
nReq := len(r.Packages) + len(r.SrcPackages)
reqChan := make(chan request, nReq)

View File

@@ -15,7 +15,7 @@ import (
formatter "github.com/kotakanbe/logrus-prefixed-formatter"
)
//LogOpts has options for logging
// LogOpts has options for logging
type LogOpts struct {
Debug bool `json:"debug,omitempty"`
DebugSQL bool `json:"debugSQL,omitempty"`
@@ -45,6 +45,13 @@ func NewNormalLogger() Logger {
return Logger{Entry: logrus.Entry{Logger: logrus.New()}}
}
// NewIODiscardLogger creates discard logger
func NewIODiscardLogger() Logger {
l := logrus.New()
l.Out = io.Discard
return Logger{Entry: logrus.Entry{Logger: l}}
}
// NewCustomLogger creates logrus
func NewCustomLogger(debug, quiet, logToFile bool, logDir, logMsgAnsiColor, serverName string) Logger {
log := logrus.New()

View File

@@ -75,7 +75,7 @@ func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string, confidences Co
}
}
order := CveContentTypes{Nvd, NewCveContentType(myFamily), GitHub}
order := append(append(CveContentTypes{Nvd}, GetCveContentTypes(myFamily)...), GitHub)
for _, ctype := range order {
if conts, found := v[ctype]; found {
for _, cont := range conts {
@@ -133,24 +133,6 @@ func (v CveContents) PatchURLs() (urls []string) {
return
}
/*
// Severities returns Severities
func (v CveContents) Severities(myFamily string) (values []CveContentStr) {
order := CveContentTypes{NVD, NewCveContentType(myFamily)}
order = append(order, AllCveContetTypes.Except(append(order)...)...)
for _, ctype := range order {
if cont, found := v[ctype]; found && 0 < len(cont.Severity) {
values = append(values, CveContentStr{
Type: ctype,
Value: cont.Severity,
})
}
}
return
}
*/
// CveContentCpes has CveContentType and Value
type CveContentCpes struct {
Type CveContentType
@@ -159,7 +141,7 @@ type CveContentCpes struct {
// Cpes returns affected CPEs of this Vulnerability
func (v CveContents) Cpes(myFamily string) (values []CveContentCpes) {
order := CveContentTypes{NewCveContentType(myFamily)}
order := GetCveContentTypes(myFamily)
order = append(order, AllCveContetTypes.Except(order...)...)
for _, ctype := range order {
@@ -185,7 +167,7 @@ type CveContentRefs struct {
// References returns References
func (v CveContents) References(myFamily string) (values []CveContentRefs) {
order := CveContentTypes{NewCveContentType(myFamily)}
order := GetCveContentTypes(myFamily)
order = append(order, AllCveContetTypes.Except(order...)...)
for _, ctype := range order {
@@ -206,7 +188,7 @@ func (v CveContents) References(myFamily string) (values []CveContentRefs) {
// CweIDs returns related CweIDs of the vulnerability
func (v CveContents) CweIDs(myFamily string) (values []CveContentStr) {
order := CveContentTypes{NewCveContentType(myFamily)}
order := GetCveContentTypes(myFamily)
order = append(order, AllCveContetTypes.Except(order...)...)
for _, ctype := range order {
if conts, found := v[ctype]; found {
@@ -352,6 +334,30 @@ func NewCveContentType(name string) CveContentType {
}
}
// GetCveContentTypes return CveContentTypes
func GetCveContentTypes(family string) []CveContentType {
switch family {
case constant.RedHat, constant.CentOS, constant.Alma, constant.Rocky:
return []CveContentType{RedHat, RedHatAPI}
case constant.Fedora:
return []CveContentType{Fedora}
case constant.Oracle:
return []CveContentType{Oracle}
case constant.Amazon:
return []CveContentType{Amazon}
case constant.Debian, constant.Raspbian:
return []CveContentType{Debian, DebianSecurityTracker}
case constant.Ubuntu:
return []CveContentType{Ubuntu, UbuntuAPI}
case constant.OpenSUSE, constant.OpenSUSELeap, constant.SUSEEnterpriseServer, constant.SUSEEnterpriseDesktop:
return []CveContentType{SUSE}
case constant.Windows:
return []CveContentType{Microsoft}
default:
return nil
}
}
const (
// Nvd is Nvd JSON
Nvd CveContentType = "nvd"

View File

@@ -3,6 +3,8 @@ package models
import (
"reflect"
"testing"
"github.com/future-architect/vuls/constant"
)
func TestExcept(t *testing.T) {
@@ -249,3 +251,61 @@ func TestCveContents_Sort(t *testing.T) {
})
}
}
func TestNewCveContentType(t *testing.T) {
tests := []struct {
name string
want CveContentType
}{
{
name: "redhat",
want: RedHat,
},
{
name: "centos",
want: RedHat,
},
{
name: "unknown",
want: Unknown,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := NewCveContentType(tt.name); got != tt.want {
t.Errorf("NewCveContentType() = %v, want %v", got, tt.want)
}
})
}
}
func TestGetCveContentTypes(t *testing.T) {
tests := []struct {
family string
want []CveContentType
}{
{
family: constant.RedHat,
want: []CveContentType{RedHat, RedHatAPI},
},
{
family: constant.Debian,
want: []CveContentType{Debian, DebianSecurityTracker},
},
{
family: constant.Ubuntu,
want: []CveContentType{Ubuntu, UbuntuAPI},
},
{
family: constant.FreeBSD,
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.family, func(t *testing.T) {
if got := GetCveContentTypes(tt.family); !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetCveContentTypes() = %v, want %v", got, tt.want)
}
})
}
}

96
models/github.go Normal file
View File

@@ -0,0 +1,96 @@
package models
import (
"fmt"
"strings"
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
)
// DependencyGraphManifests has a map of DependencyGraphManifest
// key: BlobPath
type DependencyGraphManifests map[string]DependencyGraphManifest
// DependencyGraphManifest has filename, repository, dependencies
type DependencyGraphManifest struct {
BlobPath string `json:"blobPath"`
Filename string `json:"filename"`
Repository string `json:"repository"`
Dependencies []Dependency `json:"dependencies"`
}
// RepoURLFilename should be same format with GitHubSecurityAlert.RepoURLManifestPath()
func (m DependencyGraphManifest) RepoURLFilename() string {
return fmt.Sprintf("%s/%s", m.Repository, m.Filename)
}
// Ecosystem returns a name of ecosystem(or package manager) of manifest(lock) file in trivy way
// https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-the-dependency-graph#supported-package-ecosystems
func (m DependencyGraphManifest) Ecosystem() string {
switch {
case strings.HasSuffix(m.Filename, "Cargo.lock"),
strings.HasSuffix(m.Filename, "Cargo.toml"):
return ftypes.Cargo // Rust
case strings.HasSuffix(m.Filename, "composer.lock"),
strings.HasSuffix(m.Filename, "composer.json"):
return ftypes.Composer // PHP
case strings.HasSuffix(m.Filename, ".csproj"),
strings.HasSuffix(m.Filename, ".vbproj"),
strings.HasSuffix(m.Filename, ".nuspec"),
strings.HasSuffix(m.Filename, ".vcxproj"),
strings.HasSuffix(m.Filename, ".fsproj"),
strings.HasSuffix(m.Filename, "packages.config"):
return ftypes.NuGet // .NET languages (C#, F#, VB), C++
case strings.HasSuffix(m.Filename, "go.sum"),
strings.HasSuffix(m.Filename, "go.mod"):
return ftypes.GoModule // Go
case strings.HasSuffix(m.Filename, "pom.xml"):
return ftypes.Pom // Java, Scala
case strings.HasSuffix(m.Filename, "package-lock.json"),
strings.HasSuffix(m.Filename, "package.json"):
return ftypes.Npm // JavaScript
case strings.HasSuffix(m.Filename, "yarn.lock"):
return ftypes.Yarn // JavaScript
case strings.HasSuffix(m.Filename, "requirements.txt"),
strings.HasSuffix(m.Filename, "requirements-dev.txt"),
strings.HasSuffix(m.Filename, "setup.py"):
return ftypes.Pip // Python
case strings.HasSuffix(m.Filename, "Pipfile.lock"),
strings.HasSuffix(m.Filename, "Pipfile"):
return ftypes.Pipenv // Python
case strings.HasSuffix(m.Filename, "poetry.lock"),
strings.HasSuffix(m.Filename, "pyproject.toml"):
return ftypes.Poetry // Python
case strings.HasSuffix(m.Filename, "Gemfile.lock"),
strings.HasSuffix(m.Filename, "Gemfile"):
return ftypes.Bundler // Ruby
case strings.HasSuffix(m.Filename, ".gemspec"):
return ftypes.GemSpec // Ruby
case strings.HasSuffix(m.Filename, "pubspec.lock"),
strings.HasSuffix(m.Filename, "pubspec.yaml"):
return "pub" // Dart
case strings.HasSuffix(m.Filename, ".yml"),
strings.HasSuffix(m.Filename, ".yaml"):
return "actions" // GitHub Actions workflows
default:
return "unknown"
}
}
// Dependency has dependency package information
type Dependency struct {
PackageName string `json:"packageName"`
PackageManager string `json:"packageManager"`
Repository string `json:"repository"`
Requirements string `json:"requirements"`
}
// Version returns version
func (d Dependency) Version() string {
s := strings.Split(d.Requirements, " ")
if len(s) == 2 && s[0] == "=" {
return s[1]
}
// in case of ranged version
return ""
}

View File

@@ -146,7 +146,9 @@ var FindLockFiles = []string{
// gomod
ftypes.GoMod, ftypes.GoSum,
// java
ftypes.MavenPom, "*.jar", "*.war", "*.ear", "*.par",
ftypes.MavenPom, "*.jar", "*.war", "*.ear", "*.par", "*gradle.lockfile",
// C / C++
ftypes.ConanLock,
}
// GetLibraryKey returns target library key
@@ -160,7 +162,7 @@ func (s LibraryScanner) GetLibraryKey() string {
return "php"
case ftypes.GoBinary, ftypes.GoModule:
return "gomod"
case ftypes.Jar, ftypes.Pom:
case ftypes.Jar, ftypes.Pom, ftypes.Gradle:
return "java"
case ftypes.Npm, ftypes.Yarn, ftypes.Pnpm, ftypes.NodePkg, ftypes.JavaScript:
return "node"
@@ -168,6 +170,8 @@ func (s LibraryScanner) GetLibraryKey() string {
return ".net"
case ftypes.Pipenv, ftypes.Poetry, ftypes.Pip, ftypes.PythonPkg:
return "python"
case ftypes.ConanLock:
return "c"
default:
return ""
}

View File

@@ -45,15 +45,16 @@ type ScanResult struct {
Errors []string `json:"errors"`
Warnings []string `json:"warnings"`
ScannedCves VulnInfos `json:"scannedCves"`
RunningKernel Kernel `json:"runningKernel"`
Packages Packages `json:"packages"`
SrcPackages SrcPackages `json:",omitempty"`
EnabledDnfModules []string `json:"enabledDnfModules,omitempty"` // for dnf modules
WordPressPackages WordPressPackages `json:",omitempty"`
LibraryScanners LibraryScanners `json:"libraries,omitempty"`
CweDict CweDict `json:"cweDict,omitempty"`
Optional map[string]interface{} `json:",omitempty"`
ScannedCves VulnInfos `json:"scannedCves"`
RunningKernel Kernel `json:"runningKernel"`
Packages Packages `json:"packages"`
SrcPackages SrcPackages `json:",omitempty"`
EnabledDnfModules []string `json:"enabledDnfModules,omitempty"` // for dnf modules
WordPressPackages WordPressPackages `json:",omitempty"`
GitHubManifests DependencyGraphManifests `json:"gitHubManifests,omitempty"`
LibraryScanners LibraryScanners `json:"libraries,omitempty"`
CweDict CweDict `json:"cweDict,omitempty"`
Optional map[string]interface{} `json:",omitempty"`
Config struct {
Scan config.Config `json:"scan"`
Report config.Config `json:"report"`

View File

@@ -284,7 +284,7 @@ type GitHubSecurityAlerts []GitHubSecurityAlert
// Add adds given arg to the slice and return the slice (immutable)
func (g GitHubSecurityAlerts) Add(alert GitHubSecurityAlert) GitHubSecurityAlerts {
for _, a := range g {
if a.PackageName == alert.PackageName {
if a.RepoURLPackageName() == alert.RepoURLPackageName() {
return g
}
}
@@ -294,19 +294,39 @@ func (g GitHubSecurityAlerts) Add(alert GitHubSecurityAlert) GitHubSecurityAlert
// Names return a slice of lib names
func (g GitHubSecurityAlerts) Names() (names []string) {
for _, a := range g {
names = append(names, a.PackageName)
names = append(names, a.RepoURLPackageName())
}
return names
}
// GitHubSecurityAlert has detected CVE-ID, PackageName, Status fetched via GitHub API
// GitHubSecurityAlert has detected CVE-ID, GSAVulnerablePackage, Status fetched via GitHub API
type GitHubSecurityAlert struct {
PackageName string `json:"packageName"`
FixedIn string `json:"fixedIn"`
AffectedRange string `json:"affectedRange"`
Dismissed bool `json:"dismissed"`
DismissedAt time.Time `json:"dismissedAt"`
DismissReason string `json:"dismissReason"`
Repository string `json:"repository"`
Package GSAVulnerablePackage `json:"package,omitempty"`
FixedIn string `json:"fixedIn"`
AffectedRange string `json:"affectedRange"`
Dismissed bool `json:"dismissed"`
DismissedAt time.Time `json:"dismissedAt"`
DismissReason string `json:"dismissReason"`
}
// RepoURLPackageName returns a string connecting the repository and package name
func (a GitHubSecurityAlert) RepoURLPackageName() string {
return fmt.Sprintf("%s %s", a.Repository, a.Package.Name)
}
// RepoURLManifestPath should be same format with DependencyGraphManifest.RepoURLFilename()
func (a GitHubSecurityAlert) RepoURLManifestPath() string {
return fmt.Sprintf("%s/%s", a.Repository, a.Package.ManifestPath)
}
// GSAVulnerablePackage has vulnerable package information
type GSAVulnerablePackage struct {
Name string `json:"name"`
Ecosystem string `json:"ecosystem"`
ManifestFilename string `json:"manifestFilename"`
ManifestPath string `json:"manifestPath"`
Requirements string `json:"requirements"`
}
// LibraryFixedIns is a list of Library's FixedIn
@@ -393,7 +413,7 @@ func (v VulnInfo) Titles(lang, myFamily string) (values []CveContentStr) {
}
}
order := CveContentTypes{Trivy, Nvd, NewCveContentType(myFamily)}
order := append(CveContentTypes{Trivy, Nvd}, GetCveContentTypes(myFamily)...)
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
for _, ctype := range order {
if conts, found := v.CveContents[ctype]; found {
@@ -440,7 +460,7 @@ func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
}
}
order := CveContentTypes{Trivy, NewCveContentType(myFamily), Nvd, GitHub}
order := append(append(CveContentTypes{Trivy}, GetCveContentTypes(myFamily)...), Nvd, GitHub)
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
for _, ctype := range order {
if conts, found := v.CveContents[ctype]; found {
@@ -532,7 +552,7 @@ func (v VulnInfo) Cvss3Scores() (values []CveContentCvss) {
}
}
for _, ctype := range []CveContentType{Debian, DebianSecurityTracker, Ubuntu, Amazon, Trivy, GitHub, WpScan} {
for _, ctype := range []CveContentType{Debian, DebianSecurityTracker, Ubuntu, UbuntuAPI, Amazon, Trivy, GitHub, WpScan} {
if conts, found := v.CveContents[ctype]; found {
for _, cont := range conts {
if cont.Cvss3Severity != "" {
@@ -710,7 +730,7 @@ func severityToCvssScoreRange(severity string) string {
return "7.0-8.9"
case "MODERATE", "MEDIUM":
return "4.0-6.9"
case "LOW":
case "LOW", "NEGLIGIBLE":
return "0.1-3.9"
}
return "None"
@@ -728,6 +748,10 @@ func severityToCvssScoreRange(severity string) string {
// Critical, High, Medium, Low
// https://wiki.ubuntu.com/Bugs/Importance
// https://people.canonical.com/~ubuntu-security/cve/priority.html
//
// Ubuntu CVE Tracker
// Critical, High, Medium, Low, Negligible
// https://people.canonical.com/~ubuntu-security/priority.html
func severityToCvssScoreRoughly(severity string) float64 {
switch strings.ToUpper(severity) {
case "CRITICAL":
@@ -736,7 +760,7 @@ func severityToCvssScoreRoughly(severity string) float64 {
return 8.9
case "MODERATE", "MEDIUM":
return 6.9
case "LOW":
case "LOW", "NEGLIGIBLE":
return 3.9
}
return 0
@@ -797,6 +821,8 @@ type Exploit struct {
DocumentURL *string `json:"documentURL,omitempty"`
ShellCodeURL *string `json:"shellCodeURL,omitempty"`
BinaryURL *string `json:"binaryURL,omitempty"`
PaperURL *string `json:"paperURL,omitempty"`
GHDBURL *string `json:"ghdbURL,omitempty"`
}
// Metasploit :

View File

@@ -4,15 +4,11 @@
package oval
import (
"fmt"
"strings"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
ovaldb "github.com/vulsio/goval-dictionary/db"
ovalmodels "github.com/vulsio/goval-dictionary/models"
)
@@ -219,322 +215,6 @@ func NewUbuntu(driver ovaldb.DB, baseURL string) Ubuntu {
}
// FillWithOval returns scan result after updating CVE info by OVAL
func (o Ubuntu) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
switch util.Major(r.Release) {
case "14":
kernelNamesInOval := []string{
"linux-aws",
"linux-azure",
"linux-lts-xenial",
"linux-meta",
"linux-meta-aws",
"linux-meta-azure",
"linux-meta-lts-xenial",
"linux-signed",
"linux-signed-azure",
"linux-signed-lts-xenial",
"linux",
}
return o.fillWithOval(r, kernelNamesInOval)
case "16":
kernelNamesInOval := []string{
"linux-aws",
"linux-aws-hwe",
"linux-azure",
"linux-euclid",
"linux-flo",
"linux-gcp",
"linux-gke",
"linux-goldfish",
"linux-hwe",
"linux-kvm",
"linux-mako",
"linux-meta",
"linux-meta-aws",
"linux-meta-aws-hwe",
"linux-meta-azure",
"linux-meta-gcp",
"linux-meta-hwe",
"linux-meta-kvm",
"linux-meta-oracle",
"linux-meta-raspi2",
"linux-meta-snapdragon",
"linux-oem",
"linux-oracle",
"linux-raspi2",
"linux-signed",
"linux-signed-azure",
"linux-signed-gcp",
"linux-signed-hwe",
"linux-signed-oracle",
"linux-snapdragon",
"linux",
}
return o.fillWithOval(r, kernelNamesInOval)
case "18":
kernelNamesInOval := []string{
"linux-aws",
"linux-aws-5.0",
"linux-azure",
"linux-gcp",
"linux-gcp-5.3",
"linux-gke-4.15",
"linux-gke-5.0",
"linux-gke-5.3",
"linux-hwe",
"linux-kvm",
"linux-meta",
"linux-meta-aws",
"linux-meta-aws-5.0",
"linux-meta-azure",
"linux-meta-gcp",
"linux-meta-gcp-5.3",
"linux-meta-gke-4.15",
"linux-meta-gke-5.0",
"linux-meta-gke-5.3",
"linux-meta-hwe",
"linux-meta-kvm",
"linux-meta-oem",
"linux-meta-oem-osp1",
"linux-meta-oracle",
"linux-meta-oracle-5.0",
"linux-meta-oracle-5.3",
"linux-meta-raspi2",
"linux-meta-raspi2-5.3",
"linux-meta-snapdragon",
"linux-oem",
"linux-oem-osp1",
"linux-oracle",
"linux-oracle-5.0",
"linux-oracle-5.3",
"linux-raspi2",
"linux-raspi2-5.3",
"linux-signed",
"linux-signed-azure",
"linux-signed-gcp",
"linux-signed-gcp-5.3",
"linux-signed-gke-4.15",
"linux-signed-gke-5.0",
"linux-signed-gke-5.3",
"linux-signed-hwe",
"linux-signed-oem",
"linux-signed-oem-osp1",
"linux-signed-oracle",
"linux-signed-oracle-5.0",
"linux-signed-oracle-5.3",
"linux-snapdragon",
"linux",
}
return o.fillWithOval(r, kernelNamesInOval)
case "20":
kernelNamesInOval := []string{
"linux-aws",
"linux-azure",
"linux-gcp",
"linux-kvm",
"linux-meta",
"linux-meta-aws",
"linux-meta-azure",
"linux-meta-gcp",
"linux-meta-kvm",
"linux-meta-oem-5.6",
"linux-meta-oracle",
"linux-meta-raspi",
"linux-meta-riscv",
"linux-oem-5.6",
"linux-oracle",
"linux-raspi",
"linux-raspi2",
"linux-riscv",
"linux-signed",
"linux-signed-azure",
"linux-signed-gcp",
"linux-signed-oem-5.6",
"linux-signed-oracle",
"linux",
}
return o.fillWithOval(r, kernelNamesInOval)
case "21":
kernelNamesInOval := []string{
"linux-aws",
"linux-base-sgx",
"linux-base",
"linux-cloud-tools-common",
"linux-cloud-tools-generic",
"linux-cloud-tools-lowlatency",
"linux-cloud-tools-virtual",
"linux-gcp",
"linux-generic",
"linux-gke",
"linux-headers-aws",
"linux-headers-gcp",
"linux-headers-gke",
"linux-headers-oracle",
"linux-image-aws",
"linux-image-extra-virtual",
"linux-image-gcp",
"linux-image-generic",
"linux-image-gke",
"linux-image-lowlatency",
"linux-image-oracle",
"linux-image-virtual",
"linux-lowlatency",
"linux-modules-extra-aws",
"linux-modules-extra-gcp",
"linux-modules-extra-gke",
"linux-oracle",
"linux-tools-aws",
"linux-tools-common",
"linux-tools-gcp",
"linux-tools-generic",
"linux-tools-gke",
"linux-tools-host",
"linux-tools-lowlatency",
"linux-tools-oracle",
"linux-tools-virtual",
"linux-virtual",
}
return o.fillWithOval(r, kernelNamesInOval)
case "22":
kernelNamesInOval := []string{
"linux-aws",
"linux-azure",
"linux-gcp",
"linux-generic",
"linux-gke",
"linux-header-aws",
"linux-header-azure",
"linux-header-gcp",
"linux-header-generic",
"linux-header-gke",
"linux-header-oracle",
"linux-image-aws",
"linux-image-azure",
"linux-image-gcp",
"linux-image-generic",
"linux-image-gke",
"linux-image-oracle",
"linux-oracle",
"linux-tools-aws",
"linux-tools-azure",
"linux-tools-common",
"linux-tools-gcp",
"linux-tools-generic",
"linux-tools-gke",
"linux-tools-oracle",
}
return o.fillWithOval(r, kernelNamesInOval)
}
return 0, fmt.Errorf("Ubuntu %s is not support for now", r.Release)
}
func (o Ubuntu) fillWithOval(r *models.ScanResult, kernelNamesInOval []string) (nCVEs int, err error) {
linuxImage := "linux-image-" + r.RunningKernel.Release
runningKernelVersion := ""
kernelPkgInOVAL := ""
isOVALKernelPkgAdded := false
unusedKernels := []models.Package{}
copiedSourcePkgs := models.SrcPackages{}
if r.Container.ContainerID == "" {
if v, ok := r.Packages[linuxImage]; ok {
runningKernelVersion = v.Version
} else {
logging.Log.Warnf("Unable to detect vulns of running kernel because the version of the running kernel is unknown. server: %s",
r.ServerName)
}
for _, n := range kernelNamesInOval {
if p, ok := r.Packages[n]; ok {
kernelPkgInOVAL = p.Name
break
}
}
// remove unused kernels from packages to prevent detecting vulns of unused kernel
for _, n := range kernelNamesInOval {
if v, ok := r.Packages[n]; ok {
unusedKernels = append(unusedKernels, v)
delete(r.Packages, n)
}
}
// Remove linux-* in order to detect only vulnerabilities in the running kernel.
for n := range r.Packages {
if n != kernelPkgInOVAL && strings.HasPrefix(n, "linux-") {
unusedKernels = append(unusedKernels, r.Packages[n])
delete(r.Packages, n)
}
}
for srcPackName, srcPack := range r.SrcPackages {
copiedSourcePkgs[srcPackName] = srcPack
targetBinaryNames := []string{}
for _, n := range srcPack.BinaryNames {
if n == kernelPkgInOVAL || !strings.HasPrefix(n, "linux-") {
targetBinaryNames = append(targetBinaryNames, n)
}
}
srcPack.BinaryNames = targetBinaryNames
r.SrcPackages[srcPackName] = srcPack
}
if kernelPkgInOVAL == "" {
logging.Log.Warnf("The OVAL name of the running kernel image %+v is not found. So vulns of `linux` wll be detected. server: %s",
r.RunningKernel, r.ServerName)
kernelPkgInOVAL = "linux"
isOVALKernelPkgAdded = true
}
if runningKernelVersion != "" {
r.Packages[kernelPkgInOVAL] = models.Package{
Name: kernelPkgInOVAL,
Version: runningKernelVersion,
}
}
}
var relatedDefs ovalResult
if o.driver == nil {
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.baseURL); err != nil {
return 0, xerrors.Errorf("Failed to get Definitions via HTTP. err: %w", err)
}
} else {
if relatedDefs, err = getDefsByPackNameFromOvalDB(r, o.driver); err != nil {
return 0, xerrors.Errorf("Failed to get Definitions from DB. err: %w", err)
}
}
if isOVALKernelPkgAdded {
delete(r.Packages, kernelPkgInOVAL)
}
for _, p := range unusedKernels {
r.Packages[p.Name] = p
}
r.SrcPackages = copiedSourcePkgs
for _, defPacks := range relatedDefs.entries {
// Remove "linux" added above for searching oval
// "linux" is not a real package name (key of affected packages in OVAL)
if nfy, ok := defPacks.binpkgFixstat[kernelPkgInOVAL]; isOVALKernelPkgAdded && ok {
defPacks.binpkgFixstat[linuxImage] = nfy
delete(defPacks.binpkgFixstat, kernelPkgInOVAL)
for i, p := range defPacks.def.AffectedPacks {
if p.Name == kernelPkgInOVAL {
p.Name = linuxImage
defPacks.def.AffectedPacks[i] = p
}
}
}
o.update(r, defPacks)
}
for _, vuln := range r.ScannedCves {
if conts, ok := vuln.CveContents[models.Ubuntu]; ok {
for i, cont := range conts {
cont.SourceLink = "http://people.ubuntu.com/~ubuntu-security/cve/" + cont.CveID
vuln.CveContents[models.Ubuntu][i] = cont
}
}
}
return len(relatedDefs.entries), nil
func (o Ubuntu) FillWithOval(_ *models.ScanResult) (nCVEs int, err error) {
return 0, nil
}

View File

@@ -68,12 +68,15 @@ func (o RedHatBase) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
for _, d := range vuln.DistroAdvisories {
if conts, ok := vuln.CveContents[models.Amazon]; ok {
for i, cont := range conts {
if strings.HasPrefix(d.AdvisoryID, "ALAS2022-") {
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2022/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2022", "ALAS"))
} else if strings.HasPrefix(d.AdvisoryID, "ALAS2-") {
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2", "ALAS"))
} else if strings.HasPrefix(d.AdvisoryID, "ALAS-") {
switch {
case strings.HasPrefix(d.AdvisoryID, "ALAS-"):
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/%s.html", d.AdvisoryID)
case strings.HasPrefix(d.AdvisoryID, "ALAS2-"):
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2", "ALAS"))
case strings.HasPrefix(d.AdvisoryID, "ALAS2022-"):
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2022/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2022", "ALAS"))
case strings.HasPrefix(d.AdvisoryID, "ALAS2023-"):
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2023/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2023", "ALAS"))
}
vuln.CveContents[models.Amazon][i] = cont
}

View File

@@ -112,13 +112,25 @@ func getDefsByPackNameViaHTTP(r *models.ScanResult, url string) (relatedDefs ova
case constant.CentOS:
ovalRelease = strings.TrimPrefix(r.Release, "stream")
case constant.Amazon:
switch strings.Fields(r.Release)[0] {
case "2022":
ovalRelease = "2022"
switch s := strings.Fields(r.Release)[0]; s {
case "1":
ovalRelease = "1"
case "2":
ovalRelease = "2"
case "2022":
ovalRelease = "2022"
case "2023":
ovalRelease = "2023"
case "2025":
ovalRelease = "2025"
case "2027":
ovalRelease = "2027"
case "2029":
ovalRelease = "2029"
default:
ovalRelease = "1"
if _, err := time.Parse("2006.01", s); err == nil {
ovalRelease = "1"
}
}
}
@@ -274,13 +286,25 @@ func getDefsByPackNameFromOvalDB(r *models.ScanResult, driver ovaldb.DB) (relate
case constant.CentOS:
ovalRelease = strings.TrimPrefix(r.Release, "stream")
case constant.Amazon:
switch strings.Fields(r.Release)[0] {
case "2022":
ovalRelease = "2022"
switch s := strings.Fields(r.Release)[0]; s {
case "1":
ovalRelease = "1"
case "2":
ovalRelease = "2"
case "2022":
ovalRelease = "2022"
case "2023":
ovalRelease = "2023"
case "2025":
ovalRelease = "2025"
case "2027":
ovalRelease = "2027"
case "2029":
ovalRelease = "2029"
default:
ovalRelease = "1"
if _, err := time.Parse("2006.01", s); err == nil {
ovalRelease = "1"
}
}
}

View File

@@ -20,6 +20,7 @@ type ChatWorkWriter struct {
Proxy string
}
// Write results to ChatWork
func (w ChatWorkWriter) Write(rs ...models.ScanResult) (err error) {
for _, r := range rs {

View File

@@ -23,6 +23,7 @@ type EMailWriter struct {
Cnf config.SMTPConf
}
// Write results to Email
func (w EMailWriter) Write(rs ...models.ScanResult) (err error) {
var message string
sender := NewEMailSender(w.Cnf)
@@ -31,7 +32,7 @@ func (w EMailWriter) Write(rs ...models.ScanResult) (err error) {
if w.FormatOneEMail {
message += formatFullPlainText(r) + "\r\n\r\n"
mm := r.ScannedCves.CountGroupBySeverity()
keys := []string{"High", "Medium", "Low", "Unknown"}
keys := []string{"Critical", "High", "Medium", "Low", "Unknown"}
for _, k := range keys {
m[k] += mm[k]
}
@@ -60,9 +61,9 @@ func (w EMailWriter) Write(rs ...models.ScanResult) (err error) {
}
}
summary := fmt.Sprintf("Total: %d (High:%d Medium:%d Low:%d ?:%d)",
m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
m["High"], m["Medium"], m["Low"], m["Unknown"])
summary := fmt.Sprintf("Total: %d (Critical:%d High:%d Medium:%d Low:%d ?:%d)",
m["Critical"]+m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
m["Critical"], m["High"], m["Medium"], m["Low"], m["Unknown"])
origmessage := message
if w.FormatOneEMail {

View File

@@ -21,6 +21,7 @@ type GoogleChatWriter struct {
Proxy string
}
// Write results to Google Chat
func (w GoogleChatWriter) Write(rs ...models.ScanResult) (err error) {
re := regexp.MustCompile(w.Cnf.ServerNameRegexp)

View File

@@ -2,26 +2,33 @@ package reporter
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/future-architect/vuls/models"
"github.com/CycloneDX/cyclonedx-go"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/reporter/sbom"
)
// LocalFileWriter writes results to a local file.
type LocalFileWriter struct {
CurrentDir string
DiffPlus bool
DiffMinus bool
FormatJSON bool
FormatCsv bool
FormatFullText bool
FormatOneLineText bool
FormatList bool
Gzip bool
CurrentDir string
DiffPlus bool
DiffMinus bool
FormatJSON bool
FormatCsv bool
FormatFullText bool
FormatOneLineText bool
FormatList bool
FormatCycloneDXJSON bool
FormatCycloneDXXML bool
Gzip bool
}
// Write results to Local File
func (w LocalFileWriter) Write(rs ...models.ScanResult) (err error) {
if w.FormatOneLineText {
path := filepath.Join(w.CurrentDir, "summary.txt")
@@ -86,6 +93,28 @@ func (w LocalFileWriter) Write(rs ...models.ScanResult) (err error) {
}
}
if w.FormatCycloneDXJSON {
bs, err := sbom.GenerateCycloneDX(cyclonedx.BOMFileFormatJSON, r)
if err != nil {
return xerrors.Errorf("Failed to generate CycloneDX JSON. err: %w", err)
}
p := fmt.Sprintf("%s_cyclonedx.json", path)
if err := w.writeFile(p, bs, 0600); err != nil {
return xerrors.Errorf("Failed to write CycloneDX JSON. path: %s, err: %w", p, err)
}
}
if w.FormatCycloneDXXML {
bs, err := sbom.GenerateCycloneDX(cyclonedx.BOMFileFormatXML, r)
if err != nil {
return xerrors.Errorf("Failed to generate CycloneDX XML. err: %w", err)
}
p := fmt.Sprintf("%s_cyclonedx.xml", path)
if err := w.writeFile(p, bs, 0600); err != nil {
return xerrors.Errorf("Failed to write CycloneDX XML. path: %s, err: %w", p, err)
}
}
}
return nil
}

561
reporter/sbom/cyclonedx.go Normal file
View File

@@ -0,0 +1,561 @@
package sbom
import (
"bytes"
"fmt"
"strconv"
"strings"
"time"
cdx "github.com/CycloneDX/cyclonedx-go"
"github.com/google/uuid"
"github.com/package-url/packageurl-go"
"golang.org/x/exp/maps"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/models"
)
// GenerateCycloneDX generates a string in CycloneDX format
func GenerateCycloneDX(format cdx.BOMFileFormat, r models.ScanResult) ([]byte, error) {
bom := cdx.NewBOM()
bom.SerialNumber = uuid.New().URN()
bom.Metadata = cdxMetadata(r)
bom.Components, bom.Dependencies, bom.Vulnerabilities = cdxComponents(r, bom.Metadata.Component.BOMRef)
buf := new(bytes.Buffer)
enc := cdx.NewBOMEncoder(buf, format)
enc.SetPretty(true)
if err := enc.Encode(bom); err != nil {
return nil, xerrors.Errorf("Failed to encode CycloneDX. err: %w", err)
}
return buf.Bytes(), nil
}
func cdxMetadata(result models.ScanResult) *cdx.Metadata {
metadata := cdx.Metadata{
Timestamp: result.ReportedAt.Format(time.RFC3339),
Tools: &[]cdx.Tool{
{
Vendor: "future-architect",
Name: "vuls",
Version: fmt.Sprintf("%s-%s", result.ReportedVersion, result.ReportedRevision),
},
},
Component: &cdx.Component{
BOMRef: uuid.NewString(),
Type: cdx.ComponentTypeOS,
Name: result.ServerName,
},
}
return &metadata
}
func cdxComponents(result models.ScanResult, metaBomRef string) (*[]cdx.Component, *[]cdx.Dependency, *[]cdx.Vulnerability) {
var components []cdx.Component
bomRefs := map[string][]string{}
ospkgToPURL := map[string]string{}
if ospkgComps := ospkgToCdxComponents(result.Family, result.Release, result.RunningKernel, result.Packages, result.SrcPackages, ospkgToPURL); ospkgComps != nil {
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], ospkgComps[0].BOMRef)
for _, comp := range ospkgComps[1:] {
bomRefs[ospkgComps[0].BOMRef] = append(bomRefs[ospkgComps[0].BOMRef], comp.BOMRef)
}
components = append(components, ospkgComps...)
}
if cpeComps := cpeToCdxComponents(result.ScannedCves); cpeComps != nil {
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], cpeComps[0].BOMRef)
for _, comp := range cpeComps[1:] {
bomRefs[cpeComps[0].BOMRef] = append(bomRefs[cpeComps[0].BOMRef], comp.BOMRef)
}
components = append(components, cpeComps...)
}
libpkgToPURL := map[string]map[string]string{}
for _, libscanner := range result.LibraryScanners {
libpkgToPURL[libscanner.LockfilePath] = map[string]string{}
libpkgComps := libpkgToCdxComponents(libscanner, libpkgToPURL)
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], libpkgComps[0].BOMRef)
for _, comp := range libpkgComps[1:] {
bomRefs[libpkgComps[0].BOMRef] = append(bomRefs[libpkgComps[0].BOMRef], comp.BOMRef)
}
components = append(components, libpkgComps...)
}
ghpkgToPURL := map[string]map[string]string{}
for _, ghm := range result.GitHubManifests {
ghpkgToPURL[ghm.RepoURLFilename()] = map[string]string{}
ghpkgComps := ghpkgToCdxComponents(ghm, ghpkgToPURL)
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], ghpkgComps[0].BOMRef)
for _, comp := range ghpkgComps[1:] {
bomRefs[ghpkgComps[0].BOMRef] = append(bomRefs[ghpkgComps[0].BOMRef], comp.BOMRef)
}
components = append(components, ghpkgComps...)
}
wppkgToPURL := map[string]string{}
if wppkgComps := wppkgToCdxComponents(result.WordPressPackages, wppkgToPURL); wppkgComps != nil {
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], wppkgComps[0].BOMRef)
for _, comp := range wppkgComps[1:] {
bomRefs[wppkgComps[0].BOMRef] = append(bomRefs[wppkgComps[0].BOMRef], comp.BOMRef)
}
components = append(components, wppkgComps...)
}
return &components, cdxDependencies(bomRefs), cdxVulnerabilities(result, ospkgToPURL, libpkgToPURL, ghpkgToPURL, wppkgToPURL)
}
func osToCdxComponent(family, release, runningKernelRelease, runningKernelVersion string) cdx.Component {
props := []cdx.Property{
{
Name: "future-architect:vuls:Type",
Value: "Package",
},
}
if runningKernelRelease != "" {
props = append(props, cdx.Property{
Name: "RunningKernelRelease",
Value: runningKernelRelease,
})
}
if runningKernelVersion != "" {
props = append(props, cdx.Property{
Name: "RunningKernelVersion",
Value: runningKernelVersion,
})
}
return cdx.Component{
BOMRef: uuid.NewString(),
Type: cdx.ComponentTypeOS,
Name: family,
Version: release,
Properties: &props,
}
}
func ospkgToCdxComponents(family, release string, runningKernel models.Kernel, binpkgs models.Packages, srcpkgs models.SrcPackages, ospkgToPURL map[string]string) []cdx.Component {
if family == "" {
return nil
}
components := []cdx.Component{
osToCdxComponent(family, release, runningKernel.Release, runningKernel.Version),
}
if len(binpkgs) == 0 {
return components
}
type srcpkg struct {
name string
version string
arch string
}
binToSrc := map[string]srcpkg{}
for _, pack := range srcpkgs {
for _, binpkg := range pack.BinaryNames {
binToSrc[binpkg] = srcpkg{
name: pack.Name,
version: pack.Version,
arch: pack.Arch,
}
}
}
for _, pack := range binpkgs {
var props []cdx.Property
if p, ok := binToSrc[pack.Name]; ok {
if p.name != "" {
props = append(props, cdx.Property{
Name: "future-architect:vuls:SrcName",
Value: p.name,
})
}
if p.version != "" {
props = append(props, cdx.Property{
Name: "future-architect:vuls:SrcVersion",
Value: p.version,
})
}
if p.arch != "" {
props = append(props, cdx.Property{
Name: "future-architect:vuls:SrcArch",
Value: p.arch,
})
}
}
purl := toPkgPURL(family, release, pack.Name, pack.Version, pack.Release, pack.Arch, pack.Repository)
components = append(components, cdx.Component{
BOMRef: purl,
Type: cdx.ComponentTypeLibrary,
Name: pack.Name,
Version: pack.Version,
PackageURL: purl,
Properties: &props,
})
ospkgToPURL[pack.Name] = purl
}
return components
}
func cpeToCdxComponents(scannedCves models.VulnInfos) []cdx.Component {
cpes := map[string]struct{}{}
for _, cve := range scannedCves {
for _, cpe := range cve.CpeURIs {
cpes[cpe] = struct{}{}
}
}
if len(cpes) == 0 {
return nil
}
components := []cdx.Component{
{
BOMRef: uuid.NewString(),
Type: cdx.ComponentTypeApplication,
Name: "CPEs",
Properties: &[]cdx.Property{
{
Name: "future-architect:vuls:Type",
Value: "CPE",
},
},
},
}
for cpe := range cpes {
components = append(components, cdx.Component{
BOMRef: cpe,
Type: cdx.ComponentTypeLibrary,
Name: cpe,
CPE: cpe,
})
}
return components
}
func libpkgToCdxComponents(libscanner models.LibraryScanner, libpkgToPURL map[string]map[string]string) []cdx.Component {
components := []cdx.Component{
{
BOMRef: uuid.NewString(),
Type: cdx.ComponentTypeApplication,
Name: libscanner.LockfilePath,
Properties: &[]cdx.Property{
{
Name: "future-architect:vuls:Type",
Value: libscanner.Type,
},
},
},
}
for _, lib := range libscanner.Libs {
purl := packageurl.NewPackageURL(libscanner.Type, "", lib.Name, lib.Version, packageurl.Qualifiers{{Key: "file_path", Value: libscanner.LockfilePath}}, "").ToString()
components = append(components, cdx.Component{
BOMRef: purl,
Type: cdx.ComponentTypeLibrary,
Name: lib.Name,
Version: lib.Version,
PackageURL: purl,
})
libpkgToPURL[libscanner.LockfilePath][lib.Name] = purl
}
return components
}
func ghpkgToCdxComponents(m models.DependencyGraphManifest, ghpkgToPURL map[string]map[string]string) []cdx.Component {
components := []cdx.Component{
{
BOMRef: uuid.NewString(),
Type: cdx.ComponentTypeApplication,
Name: m.BlobPath,
Properties: &[]cdx.Property{
{
Name: "future-architect:vuls:Type",
Value: m.Ecosystem(),
},
},
},
}
for _, dep := range m.Dependencies {
purl := packageurl.NewPackageURL(m.Ecosystem(), "", dep.PackageName, dep.Version(), packageurl.Qualifiers{{Key: "repo_url", Value: m.Repository}, {Key: "file_path", Value: m.Filename}}, "").ToString()
components = append(components, cdx.Component{
BOMRef: purl,
Type: cdx.ComponentTypeLibrary,
Name: dep.PackageName,
Version: dep.Version(),
PackageURL: purl,
})
ghpkgToPURL[m.RepoURLFilename()][dep.PackageName] = purl
}
return components
}
func wppkgToCdxComponents(wppkgs models.WordPressPackages, wppkgToPURL map[string]string) []cdx.Component {
if len(wppkgs) == 0 {
return nil
}
components := []cdx.Component{
{
BOMRef: uuid.NewString(),
Type: cdx.ComponentTypeApplication,
Name: "wordpress",
Properties: &[]cdx.Property{
{
Name: "future-architect:vuls:Type",
Value: "WordPress",
},
},
},
}
for _, wppkg := range wppkgs {
purl := packageurl.NewPackageURL("wordpress", wppkg.Type, wppkg.Name, wppkg.Version, packageurl.Qualifiers{{Key: "status", Value: wppkg.Status}}, "").ToString()
components = append(components, cdx.Component{
BOMRef: purl,
Type: cdx.ComponentTypeLibrary,
Name: wppkg.Name,
Version: wppkg.Version,
PackageURL: purl,
})
wppkgToPURL[wppkg.Name] = purl
}
return components
}
func cdxDependencies(bomRefs map[string][]string) *[]cdx.Dependency {
dependencies := make([]cdx.Dependency, 0, len(bomRefs))
for ref, depRefs := range bomRefs {
ds := depRefs
dependencies = append(dependencies, cdx.Dependency{
Ref: ref,
Dependencies: &ds,
})
}
return &dependencies
}
func toPkgPURL(osFamily, osVersion, packName, packVersion, packRelease, packArch, packRepository string) string {
var purlType string
switch osFamily {
case constant.Alma, constant.Amazon, constant.CentOS, constant.Fedora, constant.OpenSUSE, constant.OpenSUSELeap, constant.Oracle, constant.RedHat, constant.Rocky, constant.SUSEEnterpriseDesktop, constant.SUSEEnterpriseServer:
purlType = "rpm"
case constant.Alpine:
purlType = "apk"
case constant.Debian, constant.Raspbian, constant.Ubuntu:
purlType = "deb"
case constant.FreeBSD:
purlType = "pkg"
case constant.Windows:
purlType = "win"
case constant.ServerTypePseudo:
purlType = "pseudo"
default:
purlType = "unknown"
}
version := packVersion
if packRelease != "" {
version = fmt.Sprintf("%s-%s", packVersion, packRelease)
}
var qualifiers packageurl.Qualifiers
if osVersion != "" {
qualifiers = append(qualifiers, packageurl.Qualifier{
Key: "distro",
Value: osVersion,
})
}
if packArch != "" {
qualifiers = append(qualifiers, packageurl.Qualifier{
Key: "arch",
Value: packArch,
})
}
if packRepository != "" {
qualifiers = append(qualifiers, packageurl.Qualifier{
Key: "repo",
Value: packRepository,
})
}
return packageurl.NewPackageURL(purlType, osFamily, packName, version, qualifiers, "").ToString()
}
func cdxVulnerabilities(result models.ScanResult, ospkgToPURL map[string]string, libpkgToPURL, ghpkgToPURL map[string]map[string]string, wppkgToPURL map[string]string) *[]cdx.Vulnerability {
vulnerabilities := make([]cdx.Vulnerability, 0, len(result.ScannedCves))
for _, cve := range result.ScannedCves {
vulnerabilities = append(vulnerabilities, cdx.Vulnerability{
ID: cve.CveID,
Ratings: cdxRatings(cve.CveContents),
CWEs: cdxCWEs(cve.CveContents),
Description: cdxDescription(cve.CveContents),
Advisories: cdxAdvisories(cve.CveContents),
Affects: cdxAffects(cve, ospkgToPURL, libpkgToPURL, ghpkgToPURL, wppkgToPURL),
})
}
return &vulnerabilities
}
func cdxRatings(cveContents models.CveContents) *[]cdx.VulnerabilityRating {
var ratings []cdx.VulnerabilityRating
for _, contents := range cveContents {
for _, content := range contents {
if content.Cvss2Score != 0 || content.Cvss2Vector != "" || content.Cvss2Severity != "" {
ratings = append(ratings, cdxCVSS2Rating(string(content.Type), content.Cvss2Vector, content.Cvss2Score, content.Cvss2Severity))
}
if content.Cvss3Score != 0 || content.Cvss3Vector != "" || content.Cvss3Severity != "" {
ratings = append(ratings, cdxCVSS3Rating(string(content.Type), content.Cvss3Vector, content.Cvss3Score, content.Cvss3Severity))
}
}
}
return &ratings
}
func cdxCVSS2Rating(source, vector string, score float64, severity string) cdx.VulnerabilityRating {
r := cdx.VulnerabilityRating{
Source: &cdx.Source{Name: source},
Method: cdx.ScoringMethodCVSSv2,
Vector: vector,
}
if score != 0 {
r.Score = &score
}
switch strings.ToLower(severity) {
case "high":
r.Severity = cdx.SeverityHigh
case "medium":
r.Severity = cdx.SeverityMedium
case "low":
r.Severity = cdx.SeverityLow
default:
r.Severity = cdx.SeverityUnknown
}
return r
}
func cdxCVSS3Rating(source, vector string, score float64, severity string) cdx.VulnerabilityRating {
r := cdx.VulnerabilityRating{
Source: &cdx.Source{Name: source},
Method: cdx.ScoringMethodCVSSv3,
Vector: vector,
}
if strings.HasPrefix(vector, "CVSS:3.1") {
r.Method = cdx.ScoringMethodCVSSv31
}
if score != 0 {
r.Score = &score
}
switch strings.ToLower(severity) {
case "critical":
r.Severity = cdx.SeverityCritical
case "high":
r.Severity = cdx.SeverityHigh
case "medium":
r.Severity = cdx.SeverityMedium
case "low":
r.Severity = cdx.SeverityLow
case "none":
r.Severity = cdx.SeverityNone
default:
r.Severity = cdx.SeverityUnknown
}
return r
}
func cdxAffects(cve models.VulnInfo, ospkgToPURL map[string]string, libpkgToPURL, ghpkgToPURL map[string]map[string]string, wppkgToPURL map[string]string) *[]cdx.Affects {
affects := make([]cdx.Affects, 0, len(cve.AffectedPackages)+len(cve.CpeURIs)+len(cve.LibraryFixedIns)+len(cve.WpPackageFixStats))
for _, p := range cve.AffectedPackages {
affects = append(affects, cdx.Affects{
Ref: ospkgToPURL[p.Name],
})
}
for _, cpe := range cve.CpeURIs {
affects = append(affects, cdx.Affects{
Ref: cpe,
})
}
for _, lib := range cve.LibraryFixedIns {
affects = append(affects, cdx.Affects{
Ref: libpkgToPURL[lib.Path][lib.Name],
})
}
for _, alert := range cve.GitHubSecurityAlerts {
// TODO: not in dependency graph
if purl, ok := ghpkgToPURL[alert.RepoURLManifestPath()][alert.Package.Name]; ok {
affects = append(affects, cdx.Affects{
Ref: purl,
})
}
}
for _, wppack := range cve.WpPackageFixStats {
affects = append(affects, cdx.Affects{
Ref: wppkgToPURL[wppack.Name],
})
}
return &affects
}
func cdxCWEs(cveContents models.CveContents) *[]int {
m := map[int]struct{}{}
for _, contents := range cveContents {
for _, content := range contents {
for _, cweID := range content.CweIDs {
if !strings.HasPrefix(cweID, "CWE-") {
continue
}
i, err := strconv.Atoi(strings.TrimPrefix(cweID, "CWE-"))
if err != nil {
continue
}
m[i] = struct{}{}
}
}
}
cweIDs := maps.Keys(m)
return &cweIDs
}
func cdxDescription(cveContents models.CveContents) string {
if contents, ok := cveContents[models.Nvd]; ok {
return contents[0].Summary
}
return ""
}
func cdxAdvisories(cveContents models.CveContents) *[]cdx.Advisory {
urls := map[string]struct{}{}
for _, contents := range cveContents {
for _, content := range contents {
if content.SourceLink != "" {
urls[content.SourceLink] = struct{}{}
}
for _, r := range content.References {
urls[r.Link] = struct{}{}
}
}
}
advisories := make([]cdx.Advisory, 0, len(urls))
for u := range urls {
advisories = append(advisories, cdx.Advisory{
URL: u,
})
}
return &advisories
}

View File

@@ -33,12 +33,13 @@ type message struct {
Attachments []slack.Attachment `json:"attachments"`
}
// Write results to Slack
func (w SlackWriter) Write(rs ...models.ScanResult) (err error) {
channel := w.Cnf.Channel
for _, r := range rs {
w.lang, w.osFamily = r.Lang, r.Family
if channel == "${servername}" {
channel := w.Cnf.Channel
if w.Cnf.Channel == "${servername}" {
channel = fmt.Sprintf("#%s", r.ServerName)
}
@@ -195,7 +196,7 @@ func (w SlackWriter) toSlackAttachments(r models.ScanResult) (attaches []slack.A
candidate = append(candidate, "?")
}
for _, n := range vinfo.GitHubSecurityAlerts {
installed = append(installed, n.PackageName)
installed = append(installed, n.RepoURLPackageName())
candidate = append(candidate, "?")
}

View File

@@ -23,6 +23,7 @@ func (w StdoutWriter) WriteScanSummary(rs ...models.ScanResult) {
fmt.Printf("%s\n", formatScanSummary(rs...))
}
// Write results to stdout
func (w StdoutWriter) Write(rs ...models.ScanResult) error {
if w.FormatOneLineText {
fmt.Print("\n\n")

View File

@@ -16,6 +16,7 @@ type SyslogWriter struct {
Cnf config.SyslogConf
}
// Write results to syslog
func (w SyslogWriter) Write(rs ...models.ScanResult) (err error) {
facility, _ := w.Cnf.GetFacility()
severity, _ := w.Cnf.GetSeverity()

View File

@@ -21,6 +21,7 @@ type TelegramWriter struct {
Proxy string
}
// Write results to Telegram
func (w TelegramWriter) Write(rs ...models.ScanResult) (err error) {
for _, r := range rs {
msgs := []string{fmt.Sprintf("*%s*\n%s\n%s\n%s",

View File

@@ -404,7 +404,7 @@ No CVE-IDs are found in updatable packages.
}
for _, alert := range vuln.GitHubSecurityAlerts {
data = append(data, []string{"GitHub", alert.PackageName})
data = append(data, []string{"GitHub", alert.RepoURLPackageName()})
}
for _, wp := range vuln.WpPackageFixStats {
@@ -730,11 +730,7 @@ func getMinusDiffCves(previous, current models.ScanResult) models.VulnInfos {
}
func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
cTypes := []models.CveContentType{
models.Nvd,
models.Jvn,
models.NewCveContentType(current.Family),
}
cTypes := append([]models.CveContentType{models.Nvd, models.Jvn}, models.GetCveContentTypes(current.Family)...)
prevLastModifieds := map[models.CveContentType][]time.Time{}
preVinfo, ok := previous.ScannedCves[cveID]

View File

@@ -103,6 +103,9 @@ func writeToFile(cnf config.Config, path string) error {
if cnf.Default.WordPress != nil && cnf.Default.WordPress.IsZero() {
cnf.Default.WordPress = nil
}
if cnf.Default.PortScan != nil && cnf.Default.PortScan.IsZero() {
cnf.Default.PortScan = nil
}
c := struct {
Saas *config.SaasConf `toml:"saas"`
@@ -198,5 +201,11 @@ func cleanForTOMLEncoding(server config.ServerInfo, def config.ServerInfo) confi
}
}
if server.PortScan != nil {
if server.PortScan.IsZero() || reflect.DeepEqual(server.PortScan, def.PortScan) {
server.PortScan = nil
}
}
return server
}

View File

@@ -28,10 +28,12 @@ import (
"golang.org/x/xerrors"
// Import library scanner
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/c/conan"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/dotnet/deps"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/dotnet/nuget"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/golang/binary"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/golang/mod"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/gradle"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/jar"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/pom"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/nodejs/npm"
@@ -361,7 +363,6 @@ func (l *base) detectPlatform() {
//TODO Azure, GCP...
l.setPlatform(models.Platform{Name: "other"})
return
}
var dsFingerPrintPrefix = "AgentStatus.agentCertHash: "
@@ -582,12 +583,6 @@ func (l *base) parseSystemctlStatus(stdout string) string {
return ss[1]
}
// LibFile : library file content
type LibFile struct {
Contents []byte
Filemode os.FileMode
}
func (l *base) scanLibraries() (err error) {
if len(l.LibraryScanners) != 0 {
return nil
@@ -598,9 +593,9 @@ func (l *base) scanLibraries() (err error) {
return nil
}
l.log.Info("Scanning Lockfile...")
l.log.Info("Scanning Language-specific Packages...")
libFilemap := map[string]LibFile{}
found := map[string]bool{}
detectFiles := l.ServerInfo.Lockfiles
priv := noSudo
@@ -615,9 +610,17 @@ func (l *base) scanLibraries() (err error) {
findopt += fmt.Sprintf("-name %q -o ", filename)
}
dir := "/"
if len(l.ServerInfo.FindLockDirs) != 0 {
dir = strings.Join(l.ServerInfo.FindLockDirs, " ")
} else {
l.log.Infof("It's recommended to specify FindLockDirs in config.toml. If FindLockDirs is not specified, all directories under / will be searched, which may increase CPU load")
}
l.log.Infof("Finding files under %s", dir)
// delete last "-o "
// find / -type f -and \( -name "package-lock.json" -o -name "yarn.lock" ... \) 2>&1 | grep -v "find: "
cmd := fmt.Sprintf(`find / -type f -and \( ` + findopt[:len(findopt)-3] + ` \) 2>&1 | grep -v "find: "`)
cmd := fmt.Sprintf(`find %s -type f -and \( `+findopt[:len(findopt)-3]+` \) 2>&1 | grep -v "find: "`, dir)
r := exec(l.ServerInfo, cmd, priv)
if r.ExitStatus != 0 && r.ExitStatus != 1 {
return xerrors.Errorf("Failed to find lock files")
@@ -635,154 +638,167 @@ func (l *base) scanLibraries() (err error) {
}
// skip already exist
if _, ok := libFilemap[path]; ok {
if _, ok := found[path]; ok {
continue
}
var f LibFile
var contents []byte
var filemode os.FileMode
switch l.Distro.Family {
case constant.ServerTypePseudo:
fileinfo, err := os.Stat(path)
if err != nil {
return xerrors.Errorf("Failed to get target file info. err: %w, filepath: %s", err, path)
l.log.Warnf("Failed to get target file info. err: %s, filepath: %s", err, path)
continue
}
f.Filemode = fileinfo.Mode().Perm()
f.Contents, err = os.ReadFile(path)
filemode = fileinfo.Mode().Perm()
contents, err = os.ReadFile(path)
if err != nil {
return xerrors.Errorf("Failed to read target file contents. err: %w, filepath: %s", err, path)
l.log.Warnf("Failed to read target file contents. err: %s, filepath: %s", err, path)
continue
}
default:
l.log.Debugf("Analyzing file: %s", path)
cmd := fmt.Sprintf(`stat -c "%%a" %s`, path)
r := exec(l.ServerInfo, cmd, priv)
r := exec(l.ServerInfo, cmd, priv, logging.NewIODiscardLogger())
if !r.isSuccess() {
return xerrors.Errorf("Failed to get target file permission: %s, filepath: %s", r, path)
l.log.Warnf("Failed to get target file permission: %s, filepath: %s", r, path)
continue
}
permStr := fmt.Sprintf("0%s", strings.ReplaceAll(r.Stdout, "\n", ""))
perm, err := strconv.ParseUint(permStr, 8, 32)
if err != nil {
return xerrors.Errorf("Failed to parse permission string. err: %w, permission string: %s", err, permStr)
l.log.Warnf("Failed to parse permission string. err: %s, permission string: %s", err, permStr)
continue
}
f.Filemode = os.FileMode(perm)
filemode = os.FileMode(perm)
cmd = fmt.Sprintf("cat %s", path)
r = exec(l.ServerInfo, cmd, priv)
r = exec(l.ServerInfo, cmd, priv, logging.NewIODiscardLogger())
if !r.isSuccess() {
return xerrors.Errorf("Failed to get target file contents: %s, filepath: %s", r, path)
l.log.Warnf("Failed to get target file contents: %s, filepath: %s", r, path)
continue
}
f.Contents = []byte(r.Stdout)
contents = []byte(r.Stdout)
}
libFilemap[path] = f
found[path] = true
var libraryScanners []models.LibraryScanner
if libraryScanners, err = AnalyzeLibrary(context.Background(), path, contents, filemode, l.ServerInfo.Mode.IsOffline()); err != nil {
return err
}
l.LibraryScanners = append(l.LibraryScanners, libraryScanners...)
}
var libraryScanners []models.LibraryScanner
if libraryScanners, err = AnalyzeLibraries(context.Background(), libFilemap, l.ServerInfo.Mode.IsOffline()); err != nil {
return err
}
l.LibraryScanners = append(l.LibraryScanners, libraryScanners...)
return nil
}
// AnalyzeLibraries : detects libs defined in lockfile
func AnalyzeLibraries(ctx context.Context, libFilemap map[string]LibFile, isOffline bool) (libraryScanners []models.LibraryScanner, err error) {
// https://github.com/aquasecurity/trivy/blob/84677903a6fa1b707a32d0e8b2bffc23dde52afa/pkg/fanal/analyzer/const.go
disabledAnalyzers := []analyzer.Type{
// ======
// OS
// ======
analyzer.TypeOSRelease,
analyzer.TypeAlpine,
analyzer.TypeAmazon,
analyzer.TypeCBLMariner,
analyzer.TypeDebian,
analyzer.TypePhoton,
analyzer.TypeCentOS,
analyzer.TypeRocky,
analyzer.TypeAlma,
analyzer.TypeFedora,
analyzer.TypeOracle,
analyzer.TypeRedHatBase,
analyzer.TypeSUSE,
analyzer.TypeUbuntu,
// OS Package
analyzer.TypeApk,
analyzer.TypeDpkg,
analyzer.TypeDpkgLicense,
analyzer.TypeRpm,
analyzer.TypeRpmqa,
// OS Package Repository
analyzer.TypeApkRepo,
// ============
// Image Config
// ============
analyzer.TypeApkCommand,
// =================
// Structured Config
// =================
analyzer.TypeYaml,
analyzer.TypeJSON,
analyzer.TypeDockerfile,
analyzer.TypeTerraform,
analyzer.TypeCloudFormation,
analyzer.TypeHelm,
// ========
// License
// ========
analyzer.TypeLicenseFile,
// ========
// Secrets
// ========
analyzer.TypeSecret,
// =======
// Red Hat
// =======
analyzer.TypeRedHatContentManifestType,
analyzer.TypeRedHatDockerfileType,
// AnalyzeLibrary : detects library defined in artifact such as lockfile or jar
func AnalyzeLibrary(ctx context.Context, path string, contents []byte, filemode os.FileMode, isOffline bool) (libraryScanners []models.LibraryScanner, err error) {
anal, err := analyzer.NewAnalyzerGroup(analyzer.AnalyzerOptions{
Group: analyzer.GroupBuiltin,
DisabledAnalyzers: disabledAnalyzers,
})
if err != nil {
return nil, xerrors.Errorf("Failed to new analyzer group. err: %w", err)
}
anal := analyzer.NewAnalyzerGroup(analyzer.GroupBuiltin, disabledAnalyzers)
for path, f := range libFilemap {
var wg sync.WaitGroup
result := new(analyzer.AnalysisResult)
if err := anal.AnalyzeFile(
ctx,
&wg,
semaphore.NewWeighted(1),
result,
"",
path,
&DummyFileInfo{size: int64(len(f.Contents)), filemode: f.Filemode},
func() (dio.ReadSeekCloserAt, error) { return dio.NopCloser(bytes.NewReader(f.Contents)), nil },
nil,
analyzer.AnalysisOptions{Offline: isOffline},
); err != nil {
return nil, xerrors.Errorf("Failed to get libs. err: %w", err)
}
wg.Wait()
libscan, err := convertLibWithScanner(result.Applications)
if err != nil {
return nil, xerrors.Errorf("Failed to convert libs. err: %w", err)
}
libraryScanners = append(libraryScanners, libscan...)
var wg sync.WaitGroup
result := new(analyzer.AnalysisResult)
if err := anal.AnalyzeFile(
ctx,
&wg,
semaphore.NewWeighted(1),
result,
"",
path,
&DummyFileInfo{name: filepath.Base(path), size: int64(len(contents)), filemode: filemode},
func() (dio.ReadSeekCloserAt, error) { return dio.NopCloser(bytes.NewReader(contents)), nil },
nil,
analyzer.AnalysisOptions{Offline: isOffline},
); err != nil {
return nil, xerrors.Errorf("Failed to get libs. err: %w", err)
}
wg.Wait()
libscan, err := convertLibWithScanner(result.Applications)
if err != nil {
return nil, xerrors.Errorf("Failed to convert libs. err: %w", err)
}
libraryScanners = append(libraryScanners, libscan...)
return libraryScanners, nil
}
// https://github.com/aquasecurity/trivy/blob/84677903a6fa1b707a32d0e8b2bffc23dde52afa/pkg/fanal/analyzer/const.go
var disabledAnalyzers = []analyzer.Type{
// ======
// OS
// ======
analyzer.TypeOSRelease,
analyzer.TypeAlpine,
analyzer.TypeAmazon,
analyzer.TypeCBLMariner,
analyzer.TypeDebian,
analyzer.TypePhoton,
analyzer.TypeCentOS,
analyzer.TypeRocky,
analyzer.TypeAlma,
analyzer.TypeFedora,
analyzer.TypeOracle,
analyzer.TypeRedHatBase,
analyzer.TypeSUSE,
analyzer.TypeUbuntu,
// OS Package
analyzer.TypeApk,
analyzer.TypeDpkg,
analyzer.TypeDpkgLicense,
analyzer.TypeRpm,
analyzer.TypeRpmqa,
// OS Package Repository
analyzer.TypeApkRepo,
// ============
// Image Config
// ============
analyzer.TypeApkCommand,
// =================
// Structured Config
// =================
analyzer.TypeYaml,
analyzer.TypeJSON,
analyzer.TypeDockerfile,
analyzer.TypeTerraform,
analyzer.TypeCloudFormation,
analyzer.TypeHelm,
// ========
// License
// ========
analyzer.TypeLicenseFile,
// ========
// Secrets
// ========
analyzer.TypeSecret,
// =======
// Red Hat
// =======
analyzer.TypeRedHatContentManifestType,
analyzer.TypeRedHatDockerfileType,
}
// DummyFileInfo is a dummy struct for libscan
type DummyFileInfo struct {
name string
size int64
filemode os.FileMode
}
// Name is
func (d *DummyFileInfo) Name() string { return "dummy" }
func (d *DummyFileInfo) Name() string { return d.name }
// Size is
func (d *DummyFileInfo) Size() int64 { return d.size }

View File

@@ -1155,7 +1155,7 @@ func (o *debian) checkrestart() error {
o.Packages[p.Name] = pack
for j, proc := range p.NeedRestartProcs {
if proc.HasInit == false {
if !proc.HasInit {
continue
}
packs[i].NeedRestartProcs[j].InitSystem = initName

View File

@@ -62,7 +62,7 @@ const sudo = true
// noSudo is Const value for normal user mode
const noSudo = false
// Issue commands to the target servers in parallel via SSH or local execution. If execution fails, the server will be excluded from the target server list(servers) and added to the error server list(errServers).
// Issue commands to the target servers in parallel via SSH or local execution. If execution fails, the server will be excluded from the target server list(servers) and added to the error server list(errServers).
func parallelExec(fn func(osTypeInterface) error, timeoutSec ...int) {
resChan := make(chan osTypeInterface, len(servers))
defer close(resChan)
@@ -128,7 +128,6 @@ func parallelExec(fn func(osTypeInterface) error, timeoutSec ...int) {
}
}
servers = successes
return
}
func exec(c config.ServerInfo, cmd string, sudo bool, log ...logging.Logger) (result execResult) {

View File

@@ -34,7 +34,7 @@ func newBsd(c config.ServerInfo) *bsd {
return d
}
//https://github.com/mizzy/specinfra/blob/master/lib/specinfra/helper/detect_os/freebsd.rb
// https://github.com/mizzy/specinfra/blob/master/lib/specinfra/helper/detect_os/freebsd.rb
func detectFreebsd(c config.ServerInfo) (bool, osTypeInterface) {
// Prevent from adding `set -o pipefail` option
c.Distro = config.Distro{Family: constant.FreeBSD}

View File

@@ -188,6 +188,39 @@ func detectRedhat(c config.ServerInfo) (bool, osTypeInterface) {
}
}
if r := exec(c, "ls /etc/amazon-linux-release", noSudo); r.isSuccess() {
// $ cat /etc/amazon-linux-release
// Amazon Linux release 2022 (Amazon Linux)
// Amazon Linux release 2023 (Amazon Linux)
if r := exec(c, "cat /etc/amazon-linux-release", noSudo); r.isSuccess() {
amazon := newAmazon(c)
result := releasePattern.FindStringSubmatch(strings.TrimSpace(r.Stdout))
if len(result) != 3 {
amazon.setErrs([]error{xerrors.Errorf("Failed to parse /etc/amazon-linux-release. r.Stdout: %s", r.Stdout)})
return true, amazon
}
release := result[2]
major, err := strconv.Atoi(util.Major(release))
if err != nil {
amazon.setErrs([]error{xerrors.Errorf("Failed to parse major version from release: %s", release)})
return true, amazon
}
if major < 2022 {
amazon.setErrs([]error{xerrors.Errorf("Failed to init Amazon Linux. err: not supported major version. versions prior to Amazon Linux 2022 are not supported, detected version is %s", release)})
return true, amazon
}
switch strings.ToLower(result[1]) {
case "amazon", "amazon linux":
amazon.setDistro(constant.Amazon, release)
return true, amazon
default:
amazon.setErrs([]error{xerrors.Errorf("Failed to parse Amazon Linux Name. release: %s", release)})
return true, amazon
}
}
}
if r := exec(c, "ls /etc/redhat-release", noSudo); r.isSuccess() {
// https://www.rackaid.com/blog/how-to-determine-centos-or-red-hat-version/
// e.g.
@@ -266,19 +299,24 @@ func detectRedhat(c config.ServerInfo) (bool, osTypeInterface) {
family := constant.Amazon
release := "unknown"
if r := exec(c, "cat /etc/system-release", noSudo); r.isSuccess() {
if strings.HasPrefix(r.Stdout, "Amazon Linux release 2022") {
fields := strings.Fields(r.Stdout)
release = strings.Join(fields[3:], " ")
} else if strings.HasPrefix(r.Stdout, "Amazon Linux 2022") {
fields := strings.Fields(r.Stdout)
release = strings.Join(fields[2:], " ")
} else if strings.HasPrefix(r.Stdout, "Amazon Linux release 2") {
fields := strings.Fields(r.Stdout)
release = fmt.Sprintf("%s %s", fields[3], fields[4])
} else if strings.HasPrefix(r.Stdout, "Amazon Linux 2") {
fields := strings.Fields(r.Stdout)
release = strings.Join(fields[2:], " ")
} else {
switch {
case strings.HasPrefix(r.Stdout, "Amazon Linux AMI release"):
// Amazon Linux AMI release 2017.09
// Amazon Linux AMI release 2018.03
release = "1"
case strings.HasPrefix(r.Stdout, "Amazon Linux 2"), strings.HasPrefix(r.Stdout, "Amazon Linux release 2"):
// Amazon Linux 2 (Karoo)
// Amazon Linux release 2 (Karoo)
release = "2"
case strings.HasPrefix(r.Stdout, "Amazon Linux 2022"), strings.HasPrefix(r.Stdout, "Amazon Linux release 2022"):
// Amazon Linux 2022 (Amazon Linux)
// Amazon Linux release 2022 (Amazon Linux)
release = "2022"
case strings.HasPrefix(r.Stdout, "Amazon Linux 2023"), strings.HasPrefix(r.Stdout, "Amazon Linux release 2023"):
// Amazon Linux 2023 (Amazon Linux)
// Amazon Linux release 2023 (Amazon Linux)
release = "2023"
default:
fields := strings.Fields(r.Stdout)
if len(fields) == 5 {
release = fields[4]
@@ -801,7 +839,7 @@ func (o *redhatBase) parseNeedsRestarting(stdout string) (procs []models.NeedRes
return
}
//TODO refactor
// TODO refactor
// procPathToFQPN returns Fully-Qualified-Package-Name from the command
func (o *redhatBase) procPathToFQPN(execCommand string) (string, error) {
execCommand = strings.Replace(execCommand, "\x00", " ", -1) // for CentOS6.9

View File

@@ -603,7 +603,7 @@ func Test_redhatBase_parseRpmQfLine(t *testing.T) {
{
name: "valid line",
fields: fields{base: base{}},
args: args{line: "Percona-Server-shared-56 1 5.6.19 rel67.0.el6 x86_64"},
args: args{line: "Percona-Server-shared-56 1 5.6.19 rel67.0.el6 x86_64"},
wantPkg: &models.Package{
Name: "Percona-Server-shared-56",
Version: "1:5.6.19",

View File

@@ -129,7 +129,7 @@ func (h VulsHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
return
}
// sever subcmd doesn't have diff option
// server subcmd doesn't have diff option
reports = append(reports, reporter.LocalFileWriter{
CurrentDir: dir,
FormatJSON: true,

View File

@@ -216,6 +216,7 @@ host = "{{$ip}}"
#type = "pseudo"
#memo = "DB Server"
#findLock = true
#findLockDirs = [ "/path/to/prject/lib" ]
#lockfiles = ["/path/to/package-lock.json"]
#cpeNames = [ "cpe:/a:rubyonrails:ruby_on_rails:4.2.1" ]
#owaspDCXMLPath = "/path/to/dependency-check-report.xml"

View File

@@ -10,26 +10,29 @@ import (
"path/filepath"
"github.com/aquasecurity/trivy/pkg/utils"
"github.com/google/subcommands"
"github.com/k0kubun/pp"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/detector"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/reporter"
"github.com/google/subcommands"
"github.com/k0kubun/pp"
)
// ReportCmd is subcommand for reporting
type ReportCmd struct {
configPath string
formatJSON bool
formatOneEMail bool
formatCsv bool
formatFullText bool
formatOneLineText bool
formatList bool
gzip bool
formatJSON bool
formatOneEMail bool
formatCsv bool
formatFullText bool
formatOneLineText bool
formatList bool
formatCycloneDXJSON bool
formatCycloneDXXML bool
gzip bool
toSlack bool
toChatWork bool
@@ -80,6 +83,9 @@ func (*ReportCmd) Usage() string {
[-format-one-line-text]
[-format-list]
[-format-full-text]
[-format-csv]
[-format-cyclonedx-json]
[-format-cyclonedx-xml]
[-gzip]
[-http-proxy=http://192.168.0.1:8080]
[-debug]
@@ -150,6 +156,8 @@ func (p *ReportCmd) SetFlags(f *flag.FlagSet) {
f.BoolVar(&p.formatList, "format-list", false, "Display as list format")
f.BoolVar(&p.formatFullText, "format-full-text", false,
"Detail report in plain text")
f.BoolVar(&p.formatCycloneDXJSON, "format-cyclonedx-json", false, "CycloneDX JSON format")
f.BoolVar(&p.formatCycloneDXXML, "format-cyclonedx-xml", false, "CycloneDX XML format")
f.BoolVar(&p.toSlack, "to-slack", false, "Send report via Slack")
f.BoolVar(&p.toChatWork, "to-chatwork", false, "Send report via chatwork")
@@ -225,7 +233,8 @@ func (p *ReportCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}
}
if !(p.formatJSON || p.formatOneLineText ||
p.formatList || p.formatFullText || p.formatCsv) {
p.formatList || p.formatFullText || p.formatCsv ||
p.formatCycloneDXJSON || p.formatCycloneDXXML) {
p.formatList = true
}
@@ -310,15 +319,17 @@ func (p *ReportCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}
if p.toLocalFile {
reports = append(reports, reporter.LocalFileWriter{
CurrentDir: dir,
DiffPlus: config.Conf.DiffPlus,
DiffMinus: config.Conf.DiffMinus,
FormatJSON: p.formatJSON,
FormatCsv: p.formatCsv,
FormatFullText: p.formatFullText,
FormatOneLineText: p.formatOneLineText,
FormatList: p.formatList,
Gzip: p.gzip,
CurrentDir: dir,
DiffPlus: config.Conf.DiffPlus,
DiffMinus: config.Conf.DiffMinus,
FormatJSON: p.formatJSON,
FormatCsv: p.formatCsv,
FormatFullText: p.formatFullText,
FormatOneLineText: p.formatOneLineText,
FormatList: p.formatList,
FormatCycloneDXJSON: p.formatCycloneDXJSON,
FormatCycloneDXXML: p.formatCycloneDXXML,
Gzip: p.gzip,
})
}

View File

@@ -91,9 +91,8 @@ func (p *SaaSCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{})
var res models.ScanResults
hasError := false
for _, r := range loaded {
if len(r.Errors) == 0 {
res = append(res, r)
} else {
res = append(res, r)
if len(r.Errors) != 0 {
logging.Log.Errorf("Ignored since errors occurred during scanning: %s, err: %v",
r.ServerName, r.Errors)
hasError = true

View File

@@ -745,7 +745,7 @@ func setChangelogLayout(g *gocui.Gui) error {
}
for _, alert := range vinfo.GitHubSecurityAlerts {
lines = append(lines, "* "+alert.PackageName)
lines = append(lines, "* "+alert.RepoURLPackageName())
}
r := currentScanResult