Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
554ecc437e | ||
|
|
f6cd4d9223 | ||
|
|
03c59866d4 | ||
|
|
1d97e91341 | ||
|
|
96333f38c9 | ||
|
|
8b5d1c8e92 | ||
|
|
dea80f860c | ||
|
|
6eb4c5a5fe | ||
|
|
b219a8495e | ||
|
|
eb87d5d4e1 | ||
|
|
6963442a5e | ||
|
|
f7299b9dba | ||
|
|
379fc8a1a1 | ||
|
|
947fbbb29e | ||
|
|
06d2032c9c | ||
|
|
d055c48827 | ||
|
|
2a00339da1 | ||
|
|
2d959b3af8 | ||
|
|
595e26db41 |
@@ -88,7 +88,7 @@ NOW=$(shell date --iso-8601=seconds)
|
||||
NOW_JSON_DIR := '${BASE_DIR}/$(NOW)'
|
||||
ONE_SEC_AFTER=$(shell date -d '+1 second' --iso-8601=seconds)
|
||||
ONE_SEC_AFTER_JSON_DIR := '${BASE_DIR}/$(ONE_SEC_AFTER)'
|
||||
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'pnpm' 'cargo' 'gomod' 'gosum' 'gobinary' 'jar' 'pom' 'nuget-lock' 'nuget-config' 'dotnet-deps' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
|
||||
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'pnpm' 'cargo' 'gomod' 'gosum' 'gobinary' 'jar' 'pom' 'gradle' 'nuget-lock' 'nuget-config' 'dotnet-deps' 'conan' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
|
||||
|
||||
diff:
|
||||
# git clone git@github.com:vulsio/vulsctl.git
|
||||
|
||||
10
README.md
10
README.md
@@ -9,8 +9,8 @@
|
||||
|
||||

|
||||
|
||||
Vulnerability scanner for Linux/FreeBSD, agent-less, written in Go.
|
||||
We have a slack team. [Join slack team](http://goo.gl/forms/xm5KFo35tu)
|
||||
Vulnerability scanner for Linux/FreeBSD, agent-less, written in Go.
|
||||
We have a slack team. [Join slack team](https://join.slack.com/t/vuls-github/shared_invite/zt-1fculjwj4-6nex2JNE7DpOSiKZ1ztDFw)
|
||||
Twitter: [@vuls_en](https://twitter.com/vuls_en)
|
||||
|
||||

|
||||
@@ -95,11 +95,7 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
- [mitre/cti](https://github.com/mitre/cti)
|
||||
|
||||
- Libraries
|
||||
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
|
||||
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
|
||||
- [Safety DB(Python)](https://github.com/pyupio/safety-db)
|
||||
- [PHP Security Advisories Database](https://github.com/FriendsOfPHP/security-advisories)
|
||||
- [RustSec Advisory Database](https://github.com/RustSec/advisory-db)
|
||||
- [aquasecurity/vuln-list](https://github.com/aquasecurity/vuln-list)
|
||||
|
||||
- WordPress
|
||||
- [wpscan](https://wpscan.com/api)
|
||||
|
||||
@@ -240,6 +240,7 @@ type ServerInfo struct {
|
||||
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
|
||||
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
|
||||
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
|
||||
FindLockDirs []string `toml:"findLockDirs,omitempty" json:"findLockDirs,omitempty"`
|
||||
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
|
||||
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
|
||||
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
|
||||
|
||||
@@ -166,6 +166,9 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
StandardSupportUntil: time.Date(2027, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2032, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"22.10": {
|
||||
StandardSupportUntil: time.Date(2023, 7, 20, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[release]
|
||||
case constant.OpenSUSE:
|
||||
// https://en.opensuse.org/Lifetime
|
||||
|
||||
@@ -339,6 +339,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 22.10 supported",
|
||||
fields: fields{family: Ubuntu, release: "22.10"},
|
||||
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
//Debian
|
||||
{
|
||||
name: "Debian 9 supported",
|
||||
|
||||
139
cti/cti.go
139
cti/cti.go
@@ -660,7 +660,7 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "CAPEC-35: Leverage Executable Code in Non-Executable Files",
|
||||
},
|
||||
"CAPEC-36": {
|
||||
Name: "CAPEC-36: Using Unpublished Interfaces",
|
||||
Name: "CAPEC-36: Using Unpublished Interfaces or Functionality",
|
||||
},
|
||||
"CAPEC-37": {
|
||||
Name: "CAPEC-37: Retrieve Embedded Sensitive Data",
|
||||
@@ -831,7 +831,7 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "CAPEC-442: Infected Software",
|
||||
},
|
||||
"CAPEC-443": {
|
||||
Name: "CAPEC-443: Malicious Logic Inserted Into Product Software by Authorized Developer",
|
||||
Name: "CAPEC-443: Malicious Logic Inserted Into Product by Authorized Developer",
|
||||
},
|
||||
"CAPEC-444": {
|
||||
Name: "CAPEC-444: Development Alteration",
|
||||
@@ -840,7 +840,7 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "CAPEC-445: Malicious Logic Insertion into Product Software via Configuration Management Manipulation",
|
||||
},
|
||||
"CAPEC-446": {
|
||||
Name: "CAPEC-446: Malicious Logic Insertion into Product Software via Inclusion of 3rd Party Component Dependency",
|
||||
Name: "CAPEC-446: Malicious Logic Insertion into Product via Inclusion of Third-Party Component",
|
||||
},
|
||||
"CAPEC-447": {
|
||||
Name: "CAPEC-447: Design Alteration",
|
||||
@@ -1382,9 +1382,6 @@ var TechniqueDict = map[string]Technique{
|
||||
"CAPEC-628": {
|
||||
Name: "CAPEC-628: Carry-Off GPS Attack",
|
||||
},
|
||||
"CAPEC-629": {
|
||||
Name: "CAPEC-629: Unauthorized Use of Device Resources",
|
||||
},
|
||||
"CAPEC-63": {
|
||||
Name: "CAPEC-63: Cross-Site Scripting (XSS)",
|
||||
},
|
||||
@@ -1464,7 +1461,7 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "CAPEC-652: Use of Known Kerberos Credentials",
|
||||
},
|
||||
"CAPEC-653": {
|
||||
Name: "CAPEC-653: Use of Known Windows Credentials",
|
||||
Name: "CAPEC-653: Use of Known Operating System Credentials",
|
||||
},
|
||||
"CAPEC-654": {
|
||||
Name: "CAPEC-654: Credential Prompt Impersonation",
|
||||
@@ -1553,9 +1550,39 @@ var TechniqueDict = map[string]Technique{
|
||||
"CAPEC-681": {
|
||||
Name: "CAPEC-681: Exploitation of Improperly Controlled Hardware Security Identifiers",
|
||||
},
|
||||
"CAPEC-682": {
|
||||
Name: "CAPEC-682: Exploitation of Firmware or ROM Code with Unpatchable Vulnerabilities",
|
||||
},
|
||||
"CAPEC-69": {
|
||||
Name: "CAPEC-69: Target Programs with Elevated Privileges",
|
||||
},
|
||||
"CAPEC-690": {
|
||||
Name: "CAPEC-690: Metadata Spoofing",
|
||||
},
|
||||
"CAPEC-691": {
|
||||
Name: "CAPEC-691: Spoof Open-Source Software Metadata",
|
||||
},
|
||||
"CAPEC-692": {
|
||||
Name: "CAPEC-692: Spoof Version Control System Commit Metadata",
|
||||
},
|
||||
"CAPEC-693": {
|
||||
Name: "CAPEC-693: StarJacking",
|
||||
},
|
||||
"CAPEC-694": {
|
||||
Name: "CAPEC-694: System Location Discovery",
|
||||
},
|
||||
"CAPEC-695": {
|
||||
Name: "CAPEC-695: Repo Jacking",
|
||||
},
|
||||
"CAPEC-696": {
|
||||
Name: "CAPEC-696: Load Value Injection",
|
||||
},
|
||||
"CAPEC-697": {
|
||||
Name: "CAPEC-697: DHCP Spoofing",
|
||||
},
|
||||
"CAPEC-698": {
|
||||
Name: "CAPEC-698: Install Malicious Extension",
|
||||
},
|
||||
"CAPEC-7": {
|
||||
Name: "CAPEC-7: Blind SQL Injection",
|
||||
},
|
||||
@@ -1596,7 +1623,7 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "CAPEC-80: Using UTF-8 Encoding to Bypass Validation Logic",
|
||||
},
|
||||
"CAPEC-81": {
|
||||
Name: "CAPEC-81: Web Logs Tampering",
|
||||
Name: "CAPEC-81: Web Server Logs Tampering",
|
||||
},
|
||||
"CAPEC-83": {
|
||||
Name: "CAPEC-83: XPath Injection",
|
||||
@@ -1814,6 +1841,18 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0005: Defense Evasion => T1027.006: HTML Smuggling",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1027.007": {
|
||||
Name: "TA0005: Defense Evasion => T1027.007: Dynamic API Resolution",
|
||||
Platforms: []string{"Windows"},
|
||||
},
|
||||
"T1027.008": {
|
||||
Name: "TA0005: Defense Evasion => T1027.008: Stripped Payloads",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1027.009": {
|
||||
Name: "TA0005: Defense Evasion => T1027.009: Embedded Payloads",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1029": {
|
||||
Name: "TA0010: Exfiltration => T1029: Scheduled Transfer",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
@@ -2087,8 +2126,8 @@ var TechniqueDict = map[string]Technique{
|
||||
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Office 365", "SaaS"},
|
||||
},
|
||||
"T1070": {
|
||||
Name: "TA0005: Defense Evasion => T1070: Indicator Removal on Host",
|
||||
Platforms: []string{"Containers", "Linux", "Network", "Windows", "macOS"},
|
||||
Name: "TA0005: Defense Evasion => T1070: Indicator Removal",
|
||||
Platforms: []string{"Containers", "Google Workspace", "Linux", "Network", "Office 365", "Windows", "macOS"},
|
||||
},
|
||||
"T1070.001": {
|
||||
Name: "TA0005: Defense Evasion => T1070.001: Clear Windows Event Logs",
|
||||
@@ -2114,6 +2153,18 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0005: Defense Evasion => T1070.006: Timestomp",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1070.007": {
|
||||
Name: "TA0005: Defense Evasion => T1070.007: Clear Network Connection History and Configurations",
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
},
|
||||
"T1070.008": {
|
||||
Name: "TA0005: Defense Evasion => T1070.008: Clear Mailbox Data",
|
||||
Platforms: []string{"Google Workspace", "Linux", "Office 365", "Windows", "macOS"},
|
||||
},
|
||||
"T1070.009": {
|
||||
Name: "TA0005: Defense Evasion => T1070.009: Clear Persistence",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1071": {
|
||||
Name: "TA0011: Command and Control => T1071: Application Layer Protocol",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
@@ -2152,7 +2203,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1078": {
|
||||
Name: "TA0001: Initial Access, TA0003: Persistence, TA0004: Privilege Escalation, TA0005: Defense Evasion => T1078: Valid Accounts",
|
||||
Platforms: []string{"Azure AD", "Containers", "Google Workspace", "IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
|
||||
Platforms: []string{"Azure AD", "Containers", "Google Workspace", "IaaS", "Linux", "Network", "Office 365", "SaaS", "Windows", "macOS"},
|
||||
},
|
||||
"T1078.001": {
|
||||
Name: "TA0001: Initial Access, TA0003: Persistence, TA0004: Privilege Escalation, TA0005: Defense Evasion => T1078.001: Default Accounts",
|
||||
@@ -2504,7 +2555,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1199": {
|
||||
Name: "TA0001: Initial Access => T1199: Trusted Relationship",
|
||||
Platforms: []string{"IaaS", "Linux", "SaaS", "Windows", "macOS"},
|
||||
Platforms: []string{"IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
|
||||
},
|
||||
"T1200": {
|
||||
Name: "TA0001: Initial Access => T1200: Hardware Additions",
|
||||
@@ -2546,6 +2597,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0011: Command and Control => T1205.001: Port Knocking",
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
},
|
||||
"T1205.002": {
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0011: Command and Control => T1205.002: Socket Filters",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1207": {
|
||||
Name: "TA0005: Defense Evasion => T1207: Rogue Domain Controller",
|
||||
Platforms: []string{"Windows"},
|
||||
@@ -2780,7 +2835,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1505": {
|
||||
Name: "TA0003: Persistence => T1505: Server Software Component",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
},
|
||||
"T1505.001": {
|
||||
Name: "TA0003: Persistence => T1505.001: SQL Stored Procedures",
|
||||
@@ -2792,7 +2847,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1505.003": {
|
||||
Name: "TA0003: Persistence => T1505.003: Web Shell",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
},
|
||||
"T1505.004": {
|
||||
Name: "TA0003: Persistence => T1505.004: IIS Components",
|
||||
@@ -2827,8 +2882,8 @@ var TechniqueDict = map[string]Technique{
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
},
|
||||
"T1530": {
|
||||
Name: "TA0009: Collection => T1530: Data from Cloud Storage Object",
|
||||
Platforms: []string{"IaaS"},
|
||||
Name: "TA0009: Collection => T1530: Data from Cloud Storage",
|
||||
Platforms: []string{"IaaS", "SaaS"},
|
||||
},
|
||||
"T1531": {
|
||||
Name: "TA0040: Impact => T1531: Account Access Removal",
|
||||
@@ -2900,7 +2955,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1546": {
|
||||
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546: Event Triggered Execution",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
Platforms: []string{"IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
|
||||
},
|
||||
"T1546.001": {
|
||||
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546.001: Change Default File Association",
|
||||
@@ -2962,6 +3017,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546.015: Component Object Model Hijacking",
|
||||
Platforms: []string{"Windows"},
|
||||
},
|
||||
"T1546.016": {
|
||||
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1546.016: Installer Packages",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
},
|
||||
"T1547": {
|
||||
Name: "TA0003: Persistence, TA0004: Privilege Escalation => T1547: Boot or Logon Autostart Execution",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
@@ -3048,7 +3107,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1550.001": {
|
||||
Name: "TA0005: Defense Evasion, TA0008: Lateral Movement => T1550.001: Application Access Token",
|
||||
Platforms: []string{"Containers", "Google Workspace", "Office 365", "SaaS"},
|
||||
Platforms: []string{"Azure AD", "Containers", "Google Workspace", "IaaS", "Office 365", "SaaS"},
|
||||
},
|
||||
"T1550.002": {
|
||||
Name: "TA0005: Defense Evasion, TA0008: Lateral Movement => T1550.002: Pass the Hash",
|
||||
@@ -3152,7 +3211,7 @@ var TechniqueDict = map[string]Technique{
|
||||
},
|
||||
"T1556": {
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556: Modify Authentication Process",
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Linux", "Network", "Office 365", "SaaS", "Windows", "macOS"},
|
||||
},
|
||||
"T1556.001": {
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.001: Domain Controller Authentication",
|
||||
@@ -3174,9 +3233,17 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.005: Reversible Encryption",
|
||||
Platforms: []string{"Windows"},
|
||||
},
|
||||
"T1556.006": {
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.006: Multi-Factor Authentication",
|
||||
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Linux", "Office 365", "SaaS", "Windows", "macOS"},
|
||||
},
|
||||
"T1556.007": {
|
||||
Name: "TA0003: Persistence, TA0005: Defense Evasion, TA0006: Credential Access => T1556.007: Hybrid Identity",
|
||||
Platforms: []string{"Azure AD", "Google Workspace", "IaaS", "Office 365", "SaaS", "Windows"},
|
||||
},
|
||||
"T1557": {
|
||||
Name: "TA0006: Credential Access, TA0009: Collection => T1557: Adversary-in-the-Middle",
|
||||
Platforms: []string{"Linux", "Windows", "macOS"},
|
||||
Platforms: []string{"Linux", "Network", "Windows", "macOS"},
|
||||
},
|
||||
"T1557.001": {
|
||||
Name: "TA0006: Credential Access, TA0009: Collection => T1557.001: LLMNR/NBT-NS Poisoning and SMB Relay",
|
||||
@@ -3550,6 +3617,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0042: Resource Development => T1583.006: Web Services",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1583.007": {
|
||||
Name: "TA0042: Resource Development => T1583.007: Serverless",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1584": {
|
||||
Name: "TA0042: Resource Development => T1584: Compromise Infrastructure",
|
||||
Platforms: []string{"PRE"},
|
||||
@@ -3578,6 +3649,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0042: Resource Development => T1584.006: Web Services",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1584.007": {
|
||||
Name: "TA0042: Resource Development => T1584.007: Serverless",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1585": {
|
||||
Name: "TA0042: Resource Development => T1585: Establish Accounts",
|
||||
Platforms: []string{"PRE"},
|
||||
@@ -3590,6 +3665,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0042: Resource Development => T1585.002: Email Accounts",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1585.003": {
|
||||
Name: "TA0042: Resource Development => T1585.003: Cloud Accounts",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1586": {
|
||||
Name: "TA0042: Resource Development => T1586: Compromise Accounts",
|
||||
Platforms: []string{"PRE"},
|
||||
@@ -3602,6 +3681,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0042: Resource Development => T1586.002: Email Accounts",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1586.003": {
|
||||
Name: "TA0042: Resource Development => T1586.003: Cloud Accounts",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1587": {
|
||||
Name: "TA0042: Resource Development => T1587: Develop Capabilities",
|
||||
Platforms: []string{"PRE"},
|
||||
@@ -3746,6 +3829,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0043: Reconnaissance => T1593.002: Search Engines",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1593.003": {
|
||||
Name: "TA0043: Reconnaissance => T1593.003: Code Repositories",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1594": {
|
||||
Name: "TA0043: Reconnaissance => T1594: Search Victim-Owned Websites",
|
||||
Platforms: []string{"PRE"},
|
||||
@@ -3898,6 +3985,10 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0042: Resource Development => T1608.005: Link Target",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1608.006": {
|
||||
Name: "TA0042: Resource Development => T1608.006: SEO Poisoning",
|
||||
Platforms: []string{"PRE"},
|
||||
},
|
||||
"T1609": {
|
||||
Name: "TA0002: Execution => T1609: Container Administration Command",
|
||||
Platforms: []string{"Containers"},
|
||||
@@ -3950,4 +4041,12 @@ var TechniqueDict = map[string]Technique{
|
||||
Name: "TA0005: Defense Evasion => T1647: Plist File Modification",
|
||||
Platforms: []string{"macOS"},
|
||||
},
|
||||
"T1648": {
|
||||
Name: "TA0002: Execution => T1648: Serverless Execution",
|
||||
Platforms: []string{"IaaS", "Office 365", "SaaS"},
|
||||
},
|
||||
"T1649": {
|
||||
Name: "TA0006: Credential Access => T1649: Steal or Forge Authentication Certificates",
|
||||
Platforms: []string{"Azure AD", "Linux", "Windows", "macOS"},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -261,21 +261,23 @@ func DetectPkgCves(r *models.ScanResult, ovalCnf config.GovalDictConf, gostCnf c
|
||||
|
||||
// isPkgCvesDetactable checks whether CVEs is detactable with gost and oval from the result
|
||||
func isPkgCvesDetactable(r *models.ScanResult) bool {
|
||||
if r.Release == "" {
|
||||
logging.Log.Infof("r.Release is empty. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
|
||||
if r.ScannedVia == "trivy" {
|
||||
logging.Log.Infof("r.ScannedVia is trivy. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
|
||||
switch r.Family {
|
||||
case constant.FreeBSD, constant.ServerTypePseudo:
|
||||
logging.Log.Infof("%s type. Skip OVAL and gost detection", r.Family)
|
||||
return false
|
||||
case constant.Windows:
|
||||
return true
|
||||
default:
|
||||
if r.ScannedVia == "trivy" {
|
||||
logging.Log.Infof("r.ScannedVia is trivy. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
|
||||
if r.Release == "" {
|
||||
logging.Log.Infof("r.Release is empty. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
|
||||
if len(r.Packages)+len(r.SrcPackages) == 0 {
|
||||
logging.Log.Infof("Number of packages is 0. Skip OVAL and gost detection")
|
||||
return false
|
||||
|
||||
@@ -29,7 +29,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string,
|
||||
// TODO Use `https://github.com/shurcooL/githubv4` if the tool supports vulnerabilityAlerts Endpoint
|
||||
// Memo : https://developer.github.com/v4/explorer/
|
||||
const jsonfmt = `{"query":
|
||||
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
|
||||
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, states:[OPEN], %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
|
||||
after := ""
|
||||
|
||||
for {
|
||||
@@ -148,7 +148,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string,
|
||||
return nCVEs, err
|
||||
}
|
||||
|
||||
//SecurityAlerts has detected CVE-IDs, PackageNames, Refs
|
||||
// SecurityAlerts has detected CVE-IDs, PackageNames, Refs
|
||||
type SecurityAlerts struct {
|
||||
Data struct {
|
||||
Repository struct {
|
||||
|
||||
124
go.mod
124
go.mod
@@ -4,19 +4,21 @@ go 1.18
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go v66.0.0+incompatible
|
||||
github.com/BurntSushi/toml v1.1.0
|
||||
github.com/BurntSushi/toml v1.2.0
|
||||
github.com/CycloneDX/cyclonedx-go v0.7.0
|
||||
github.com/Ullaakut/nmap/v2 v2.1.2-0.20210406060955-59a52fe80a4f
|
||||
github.com/aquasecurity/go-dep-parser v0.0.0-20220626060741-179d0b167e5f
|
||||
github.com/aquasecurity/trivy v0.30.4
|
||||
github.com/aquasecurity/go-dep-parser v0.0.0-20221114145626-35ef808901e8
|
||||
github.com/aquasecurity/trivy v0.35.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20220627104749-930461748b63
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
|
||||
github.com/aws/aws-sdk-go v1.44.46
|
||||
github.com/aws/aws-sdk-go v1.44.136
|
||||
github.com/c-robinson/iplib v1.0.3
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible
|
||||
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
|
||||
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
|
||||
github.com/emersion/go-smtp v0.14.0
|
||||
github.com/google/subcommands v1.2.0
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gosuri/uitable v0.0.4
|
||||
github.com/hashicorp/go-uuid v1.0.3
|
||||
github.com/hashicorp/go-version v1.6.0
|
||||
@@ -31,41 +33,39 @@ require (
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/nlopes/slack v0.6.0
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/package-url/packageurl-go v0.1.1-0.20220203205134-d70459300c8a
|
||||
github.com/parnurzeal/gorequest v0.2.16
|
||||
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
|
||||
github.com/sirupsen/logrus v1.9.0
|
||||
github.com/spf13/cobra v1.5.0
|
||||
github.com/spf13/cobra v1.6.0
|
||||
github.com/vulsio/go-cti v0.0.2-0.20220613013115-8c7e57a6aa86
|
||||
github.com/vulsio/go-cve-dictionary v0.8.2-0.20211028094424-0a854f8e8f85
|
||||
github.com/vulsio/go-cve-dictionary v0.8.2
|
||||
github.com/vulsio/go-exploitdb v0.4.2
|
||||
github.com/vulsio/go-kev v0.1.1-0.20220118062020-5f69b364106f
|
||||
github.com/vulsio/go-msfdb v0.2.1-0.20211028071756-4a9759bd9f14
|
||||
github.com/vulsio/gost v0.4.2-0.20220630181607-2ed593791ec3
|
||||
github.com/vulsio/goval-dictionary v0.8.0
|
||||
go.etcd.io/bbolt v1.3.6
|
||||
golang.org/x/exp v0.0.0-20220613132600-b0d781184e0d
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5
|
||||
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f
|
||||
golang.org/x/exp v0.0.0-20220823124025-807a23277127
|
||||
golang.org/x/oauth2 v0.1.0
|
||||
golang.org/x/sync v0.1.0
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.100.2 // indirect
|
||||
cloud.google.com/go/compute v1.6.1 // indirect
|
||||
cloud.google.com/go v0.103.0 // indirect
|
||||
cloud.google.com/go/compute v1.10.0 // indirect
|
||||
cloud.google.com/go/iam v0.3.0 // indirect
|
||||
cloud.google.com/go/storage v1.14.0 // indirect
|
||||
cloud.google.com/go/storage v1.23.0 // indirect
|
||||
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||
github.com/Azure/go-autorest/autorest v0.11.27 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.20 // indirect
|
||||
github.com/Azure/go-autorest/autorest v0.11.28 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.21 // indirect
|
||||
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/autorest/to v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
||||
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.5.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.6.1 // indirect
|
||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||
github.com/acomagu/bufpipe v1.0.3 // indirect
|
||||
github.com/andybalholm/cascadia v1.2.0 // indirect
|
||||
github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce // indirect
|
||||
github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 // indirect
|
||||
@@ -73,24 +73,18 @@ require (
|
||||
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect
|
||||
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
|
||||
github.com/briandowns/spinner v1.18.1 // indirect
|
||||
github.com/caarlos0/env/v6 v6.9.3 // indirect
|
||||
github.com/caarlos0/env/v6 v6.10.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/cheggaaa/pb/v3 v3.1.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-minhash v0.0.0-20170608043002-7fe510aff544 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/cli v20.10.17+incompatible // indirect
|
||||
github.com/dnaeon/go-vcr v1.2.0 // indirect
|
||||
github.com/docker/cli v20.10.20+incompatible // indirect
|
||||
github.com/docker/distribution v2.8.1+incompatible // indirect
|
||||
github.com/docker/docker v20.10.17+incompatible // indirect
|
||||
github.com/docker/docker-credential-helpers v0.6.4 // indirect
|
||||
github.com/ekzhu/minhash-lsh v0.0.0-20171225071031-5c06ee8586a1 // indirect
|
||||
github.com/emirpasic/gods v1.12.0 // indirect
|
||||
github.com/docker/docker v20.10.20+incompatible // indirect
|
||||
github.com/docker/docker-credential-helpers v0.7.0 // indirect
|
||||
github.com/fatih/color v1.13.0 // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.4 // indirect
|
||||
github.com/go-enry/go-license-detector/v4 v4.3.0 // indirect
|
||||
github.com/go-git/gcfg v1.5.0 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.3.1 // indirect
|
||||
github.com/go-git/go-git/v5 v5.4.2 // indirect
|
||||
github.com/go-redis/redis/v8 v8.11.5 // indirect
|
||||
github.com/go-sql-driver/mysql v1.6.0 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
@@ -98,9 +92,12 @@ require (
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/go-containerregistry v0.8.0 // indirect
|
||||
github.com/google/licenseclassifier/v2 v2.0.0-pre5 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.4.0 // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/google/go-containerregistry v0.12.0 // indirect
|
||||
github.com/google/licenseclassifier/v2 v2.0.0-pre6 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.1.0 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.5.1 // indirect
|
||||
github.com/googleapis/go-type-adapters v1.0.0 // indirect
|
||||
github.com/gorilla/websocket v1.4.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
@@ -109,10 +106,8 @@ require (
|
||||
github.com/hashicorp/go-retryablehttp v0.7.1 // indirect
|
||||
github.com/hashicorp/go-safetemp v1.0.0 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/hhatto/gorst v0.0.0-20181029133204-ca9f730cac5b // indirect
|
||||
github.com/imdario/mergo v0.3.13 // indirect
|
||||
github.com/inconshreveable/log15 v0.0.0-20201112154412-8562bdadbbac // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.12.1 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
@@ -121,16 +116,14 @@ require (
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||
github.com/jackc/pgtype v1.11.0 // indirect
|
||||
github.com/jackc/pgx/v4 v4.16.1 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/jdkato/prose v1.1.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/kevinburke/ssh_config v1.1.0 // indirect
|
||||
github.com/klauspost/compress v1.15.6 // indirect
|
||||
github.com/lib/pq v1.10.5 // indirect
|
||||
github.com/klauspost/compress v1.15.11 // indirect
|
||||
github.com/liamg/jfather v0.0.7 // indirect
|
||||
github.com/magiconair/properties v1.8.6 // indirect
|
||||
github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 // indirect
|
||||
github.com/masahiro331/go-xfs-filesystem v0.0.0-20221127135739-051c25f1becd // indirect
|
||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||
@@ -138,57 +131,52 @@ require (
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/mitchellh/go-testing-interface v1.0.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/montanaflynn/stats v0.0.0-20151014174947-eeaced052adb // indirect
|
||||
github.com/nsf/termbox-go v1.1.1 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.0.3-0.20220303224323-02efb9a75ee1 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.0-rc2 // indirect
|
||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.5 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.3.1 // indirect
|
||||
github.com/rogpeppe/go-internal v1.8.1 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/samber/lo v1.33.0 // indirect
|
||||
github.com/sergi/go-diff v1.2.0 // indirect
|
||||
github.com/shogo82148/go-shuffle v0.0.0-20170808115208-59829097ff3b // indirect
|
||||
github.com/spdx/tools-golang v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.9.2 // indirect
|
||||
github.com/spf13/cast v1.5.0 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/viper v1.12.0 // indirect
|
||||
github.com/stretchr/objx v0.4.0 // indirect
|
||||
github.com/stretchr/testify v1.8.0 // indirect
|
||||
github.com/subosito/gotenv v1.4.0 // indirect
|
||||
github.com/spf13/viper v1.13.0 // indirect
|
||||
github.com/stretchr/objx v0.5.0 // indirect
|
||||
github.com/stretchr/testify v1.8.1 // indirect
|
||||
github.com/subosito/gotenv v1.4.1 // indirect
|
||||
github.com/ulikunitz/xz v0.5.10 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
||||
go.opencensus.io v0.23.0 // indirect
|
||||
go.uber.org/atomic v1.7.0 // indirect
|
||||
go.uber.org/atomic v1.9.0 // indirect
|
||||
go.uber.org/goleak v1.1.12 // indirect
|
||||
go.uber.org/multierr v1.7.0 // indirect
|
||||
go.uber.org/zap v1.21.0 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
|
||||
golang.org/x/net v0.0.0-20220802222814-0bcc04d9c69b // indirect
|
||||
golang.org/x/sys v0.0.0-20220731174439-a90be440212d // indirect
|
||||
golang.org/x/term v0.0.0-20220526004731-065cf7ba2467 // indirect
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
go.uber.org/multierr v1.8.0 // indirect
|
||||
go.uber.org/zap v1.23.0 // indirect
|
||||
golang.org/x/crypto v0.1.0 // indirect
|
||||
golang.org/x/mod v0.6.0 // indirect
|
||||
golang.org/x/net v0.1.0 // indirect
|
||||
golang.org/x/sys v0.1.0 // indirect
|
||||
golang.org/x/term v0.1.0 // indirect
|
||||
golang.org/x/text v0.4.0 // indirect
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect
|
||||
gonum.org/v1/gonum v0.7.0 // indirect
|
||||
google.golang.org/api v0.81.0 // indirect
|
||||
google.golang.org/api v0.98.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f // indirect
|
||||
google.golang.org/grpc v1.47.0 // indirect
|
||||
google.golang.org/protobuf v1.28.0 // indirect
|
||||
gopkg.in/ini.v1 v1.66.6 // indirect
|
||||
gopkg.in/neurosnap/sentences.v1 v1.0.6 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
google.golang.org/genproto v0.0.0-20221018160656-63c7b68cfc55 // indirect
|
||||
google.golang.org/grpc v1.50.1 // indirect
|
||||
google.golang.org/protobuf v1.28.1 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
gorm.io/driver/mysql v1.3.5 // indirect
|
||||
gorm.io/driver/postgres v1.3.8 // indirect
|
||||
gorm.io/driver/sqlite v1.3.6 // indirect
|
||||
gorm.io/gorm v1.23.8 // indirect
|
||||
k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 // indirect
|
||||
k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed // indirect
|
||||
moul.io/http2curl v1.0.0 // indirect
|
||||
)
|
||||
|
||||
|
||||
Submodule integration updated: b40375c4df...a36b4595ee
@@ -45,6 +45,13 @@ func NewNormalLogger() Logger {
|
||||
return Logger{Entry: logrus.Entry{Logger: logrus.New()}}
|
||||
}
|
||||
|
||||
// NewNormalLogger creates normal logger
|
||||
func NewIODiscardLogger() Logger {
|
||||
l := logrus.New()
|
||||
l.Out = io.Discard
|
||||
return Logger{Entry: logrus.Entry{Logger: l}}
|
||||
}
|
||||
|
||||
// NewCustomLogger creates logrus
|
||||
func NewCustomLogger(debug, quiet, logToFile bool, logDir, logMsgAnsiColor, serverName string) Logger {
|
||||
log := logrus.New()
|
||||
|
||||
@@ -146,7 +146,9 @@ var FindLockFiles = []string{
|
||||
// gomod
|
||||
ftypes.GoMod, ftypes.GoSum,
|
||||
// java
|
||||
ftypes.MavenPom, "*.jar", "*.war", "*.ear", "*.par",
|
||||
ftypes.MavenPom, "*.jar", "*.war", "*.ear", "*.par", "*gradle.lockfile",
|
||||
// C / C++
|
||||
ftypes.ConanLock,
|
||||
}
|
||||
|
||||
// GetLibraryKey returns target library key
|
||||
@@ -160,7 +162,7 @@ func (s LibraryScanner) GetLibraryKey() string {
|
||||
return "php"
|
||||
case ftypes.GoBinary, ftypes.GoModule:
|
||||
return "gomod"
|
||||
case ftypes.Jar, ftypes.Pom:
|
||||
case ftypes.Jar, ftypes.Pom, ftypes.Gradle:
|
||||
return "java"
|
||||
case ftypes.Npm, ftypes.Yarn, ftypes.Pnpm, ftypes.NodePkg, ftypes.JavaScript:
|
||||
return "node"
|
||||
@@ -168,6 +170,8 @@ func (s LibraryScanner) GetLibraryKey() string {
|
||||
return ".net"
|
||||
case ftypes.Pipenv, ftypes.Poetry, ftypes.Pip, ftypes.PythonPkg:
|
||||
return "python"
|
||||
case ftypes.ConanLock:
|
||||
return "c"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ func (w EMailWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
if w.FormatOneEMail {
|
||||
message += formatFullPlainText(r) + "\r\n\r\n"
|
||||
mm := r.ScannedCves.CountGroupBySeverity()
|
||||
keys := []string{"High", "Medium", "Low", "Unknown"}
|
||||
keys := []string{"Critical", "High", "Medium", "Low", "Unknown"}
|
||||
for _, k := range keys {
|
||||
m[k] += mm[k]
|
||||
}
|
||||
@@ -60,9 +60,9 @@ func (w EMailWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
}
|
||||
}
|
||||
|
||||
summary := fmt.Sprintf("Total: %d (High:%d Medium:%d Low:%d ?:%d)",
|
||||
m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
m["High"], m["Medium"], m["Low"], m["Unknown"])
|
||||
summary := fmt.Sprintf("Total: %d (Critical:%d High:%d Medium:%d Low:%d ?:%d)",
|
||||
m["Critical"]+m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
m["Critical"], m["High"], m["Medium"], m["Low"], m["Unknown"])
|
||||
|
||||
origmessage := message
|
||||
if w.FormatOneEMail {
|
||||
|
||||
@@ -2,24 +2,30 @@ package reporter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/CycloneDX/cyclonedx-go"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/reporter/sbom"
|
||||
)
|
||||
|
||||
// LocalFileWriter writes results to a local file.
|
||||
type LocalFileWriter struct {
|
||||
CurrentDir string
|
||||
DiffPlus bool
|
||||
DiffMinus bool
|
||||
FormatJSON bool
|
||||
FormatCsv bool
|
||||
FormatFullText bool
|
||||
FormatOneLineText bool
|
||||
FormatList bool
|
||||
Gzip bool
|
||||
CurrentDir string
|
||||
DiffPlus bool
|
||||
DiffMinus bool
|
||||
FormatJSON bool
|
||||
FormatCsv bool
|
||||
FormatFullText bool
|
||||
FormatOneLineText bool
|
||||
FormatList bool
|
||||
FormatCycloneDXJSON bool
|
||||
FormatCycloneDXXML bool
|
||||
Gzip bool
|
||||
}
|
||||
|
||||
func (w LocalFileWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
@@ -86,6 +92,28 @@ func (w LocalFileWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
}
|
||||
}
|
||||
|
||||
if w.FormatCycloneDXJSON {
|
||||
bs, err := sbom.GenerateCycloneDX(cyclonedx.BOMFileFormatJSON, r)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to generate CycloneDX JSON. err: %w", err)
|
||||
}
|
||||
p := fmt.Sprintf("%s_cyclonedx.json", path)
|
||||
if err := w.writeFile(p, bs, 0600); err != nil {
|
||||
return xerrors.Errorf("Failed to write CycloneDX JSON. path: %s, err: %w", p, err)
|
||||
}
|
||||
}
|
||||
|
||||
if w.FormatCycloneDXXML {
|
||||
bs, err := sbom.GenerateCycloneDX(cyclonedx.BOMFileFormatXML, r)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to generate CycloneDX XML. err: %w", err)
|
||||
}
|
||||
p := fmt.Sprintf("%s_cyclonedx.xml", path)
|
||||
if err := w.writeFile(p, bs, 0600); err != nil {
|
||||
return xerrors.Errorf("Failed to write CycloneDX XML. path: %s, err: %w", p, err)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
510
reporter/sbom/cyclonedx.go
Normal file
510
reporter/sbom/cyclonedx.go
Normal file
@@ -0,0 +1,510 @@
|
||||
package sbom
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
cdx "github.com/CycloneDX/cyclonedx-go"
|
||||
"github.com/google/uuid"
|
||||
"github.com/package-url/packageurl-go"
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
func GenerateCycloneDX(format cdx.BOMFileFormat, r models.ScanResult) ([]byte, error) {
|
||||
bom := cdx.NewBOM()
|
||||
bom.SerialNumber = uuid.New().URN()
|
||||
bom.Metadata = cdxMetadata(r)
|
||||
bom.Components, bom.Dependencies, bom.Vulnerabilities = cdxComponents(r, bom.Metadata.Component.BOMRef)
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
enc := cdx.NewBOMEncoder(buf, format)
|
||||
enc.SetPretty(true)
|
||||
if err := enc.Encode(bom); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to encode CycloneDX. err: %w", err)
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func cdxMetadata(result models.ScanResult) *cdx.Metadata {
|
||||
metadata := cdx.Metadata{
|
||||
Timestamp: result.ReportedAt.Format(time.RFC3339),
|
||||
Tools: &[]cdx.Tool{
|
||||
{
|
||||
Vendor: "future-architect",
|
||||
Name: "vuls",
|
||||
Version: fmt.Sprintf("%s-%s", result.ReportedVersion, result.ReportedRevision),
|
||||
},
|
||||
},
|
||||
Component: &cdx.Component{
|
||||
BOMRef: uuid.NewString(),
|
||||
Type: cdx.ComponentTypeOS,
|
||||
Name: result.ServerName,
|
||||
},
|
||||
}
|
||||
return &metadata
|
||||
}
|
||||
|
||||
func cdxComponents(result models.ScanResult, metaBomRef string) (*[]cdx.Component, *[]cdx.Dependency, *[]cdx.Vulnerability) {
|
||||
var components []cdx.Component
|
||||
bomRefs := map[string][]string{}
|
||||
|
||||
ospkgToPURL := map[string]string{}
|
||||
if ospkgComps := ospkgToCdxComponents(result.Family, result.Release, result.RunningKernel, result.Packages, result.SrcPackages, ospkgToPURL); ospkgComps != nil {
|
||||
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], ospkgComps[0].BOMRef)
|
||||
for _, comp := range ospkgComps[1:] {
|
||||
bomRefs[ospkgComps[0].BOMRef] = append(bomRefs[ospkgComps[0].BOMRef], comp.BOMRef)
|
||||
}
|
||||
components = append(components, ospkgComps...)
|
||||
}
|
||||
|
||||
if cpeComps := cpeToCdxComponents(result.ScannedCves); cpeComps != nil {
|
||||
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], cpeComps[0].BOMRef)
|
||||
for _, comp := range cpeComps[1:] {
|
||||
bomRefs[cpeComps[0].BOMRef] = append(bomRefs[cpeComps[0].BOMRef], comp.BOMRef)
|
||||
}
|
||||
components = append(components, cpeComps...)
|
||||
}
|
||||
|
||||
libpkgToPURL := map[string]map[string]string{}
|
||||
for _, libscanner := range result.LibraryScanners {
|
||||
libpkgToPURL[libscanner.LockfilePath] = map[string]string{}
|
||||
|
||||
libpkgComps := libpkgToCdxComponents(libscanner, libpkgToPURL)
|
||||
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], libpkgComps[0].BOMRef)
|
||||
for _, comp := range libpkgComps[1:] {
|
||||
bomRefs[libpkgComps[0].BOMRef] = append(bomRefs[libpkgComps[0].BOMRef], comp.BOMRef)
|
||||
}
|
||||
components = append(components, libpkgComps...)
|
||||
}
|
||||
|
||||
wppkgToPURL := map[string]string{}
|
||||
if wppkgComps := wppkgToCdxComponents(result.WordPressPackages, wppkgToPURL); wppkgComps != nil {
|
||||
bomRefs[metaBomRef] = append(bomRefs[metaBomRef], wppkgComps[0].BOMRef)
|
||||
for _, comp := range wppkgComps[1:] {
|
||||
bomRefs[wppkgComps[0].BOMRef] = append(bomRefs[wppkgComps[0].BOMRef], comp.BOMRef)
|
||||
}
|
||||
components = append(components, wppkgComps...)
|
||||
}
|
||||
|
||||
return &components, cdxDependencies(bomRefs), cdxVulnerabilities(result, ospkgToPURL, libpkgToPURL, wppkgToPURL)
|
||||
}
|
||||
|
||||
func osToCdxComponent(family, release, runningKernelRelease, runningKernelVersion string) cdx.Component {
|
||||
props := []cdx.Property{
|
||||
{
|
||||
Name: "future-architect:vuls:Type",
|
||||
Value: "Package",
|
||||
},
|
||||
}
|
||||
if runningKernelRelease != "" {
|
||||
props = append(props, cdx.Property{
|
||||
Name: "RunningKernelRelease",
|
||||
Value: runningKernelRelease,
|
||||
})
|
||||
}
|
||||
if runningKernelVersion != "" {
|
||||
props = append(props, cdx.Property{
|
||||
Name: "RunningKernelVersion",
|
||||
Value: runningKernelVersion,
|
||||
})
|
||||
}
|
||||
return cdx.Component{
|
||||
BOMRef: uuid.NewString(),
|
||||
Type: cdx.ComponentTypeOS,
|
||||
Name: family,
|
||||
Version: release,
|
||||
Properties: &props,
|
||||
}
|
||||
}
|
||||
|
||||
func ospkgToCdxComponents(family, release string, runningKernel models.Kernel, binpkgs models.Packages, srcpkgs models.SrcPackages, ospkgToPURL map[string]string) []cdx.Component {
|
||||
if family == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
components := []cdx.Component{
|
||||
osToCdxComponent(family, release, runningKernel.Release, runningKernel.Version),
|
||||
}
|
||||
|
||||
if len(binpkgs) == 0 {
|
||||
return components
|
||||
}
|
||||
|
||||
type srcpkg struct {
|
||||
name string
|
||||
version string
|
||||
arch string
|
||||
}
|
||||
binToSrc := map[string]srcpkg{}
|
||||
for _, pack := range srcpkgs {
|
||||
for _, binpkg := range pack.BinaryNames {
|
||||
binToSrc[binpkg] = srcpkg{
|
||||
name: pack.Name,
|
||||
version: pack.Version,
|
||||
arch: pack.Arch,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, pack := range binpkgs {
|
||||
var props []cdx.Property
|
||||
if p, ok := binToSrc[pack.Name]; ok {
|
||||
if p.name != "" {
|
||||
props = append(props, cdx.Property{
|
||||
Name: "future-architect:vuls:SrcName",
|
||||
Value: p.name,
|
||||
})
|
||||
}
|
||||
if p.version != "" {
|
||||
props = append(props, cdx.Property{
|
||||
Name: "future-architect:vuls:SrcVersion",
|
||||
Value: p.version,
|
||||
})
|
||||
}
|
||||
if p.arch != "" {
|
||||
props = append(props, cdx.Property{
|
||||
Name: "future-architect:vuls:SrcArch",
|
||||
Value: p.arch,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
purl := toPkgPURL(family, release, pack.Name, pack.Version, pack.Release, pack.Arch, pack.Repository)
|
||||
components = append(components, cdx.Component{
|
||||
BOMRef: purl,
|
||||
Type: cdx.ComponentTypeLibrary,
|
||||
Name: pack.Name,
|
||||
Version: pack.Version,
|
||||
PackageURL: purl,
|
||||
Properties: &props,
|
||||
})
|
||||
|
||||
ospkgToPURL[pack.Name] = purl
|
||||
}
|
||||
return components
|
||||
}
|
||||
|
||||
func cpeToCdxComponents(scannedCves models.VulnInfos) []cdx.Component {
|
||||
cpes := map[string]struct{}{}
|
||||
for _, cve := range scannedCves {
|
||||
for _, cpe := range cve.CpeURIs {
|
||||
cpes[cpe] = struct{}{}
|
||||
}
|
||||
}
|
||||
if len(cpes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
components := []cdx.Component{
|
||||
{
|
||||
BOMRef: uuid.NewString(),
|
||||
Type: cdx.ComponentTypeApplication,
|
||||
Name: "CPEs",
|
||||
Properties: &[]cdx.Property{
|
||||
{
|
||||
Name: "future-architect:vuls:Type",
|
||||
Value: "CPE",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for cpe := range cpes {
|
||||
components = append(components, cdx.Component{
|
||||
BOMRef: cpe,
|
||||
Type: cdx.ComponentTypeLibrary,
|
||||
Name: cpe,
|
||||
CPE: cpe,
|
||||
})
|
||||
}
|
||||
|
||||
return components
|
||||
}
|
||||
|
||||
func libpkgToCdxComponents(libscanner models.LibraryScanner, libpkgToPURL map[string]map[string]string) []cdx.Component {
|
||||
components := []cdx.Component{
|
||||
{
|
||||
BOMRef: uuid.NewString(),
|
||||
Type: cdx.ComponentTypeApplication,
|
||||
Name: libscanner.LockfilePath,
|
||||
Properties: &[]cdx.Property{
|
||||
{
|
||||
Name: "future-architect:vuls:Type",
|
||||
Value: libscanner.Type,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, lib := range libscanner.Libs {
|
||||
purl := packageurl.NewPackageURL(libscanner.Type, "", lib.Name, lib.Version, packageurl.Qualifiers{{Key: "file_path", Value: libscanner.LockfilePath}}, "").ToString()
|
||||
components = append(components, cdx.Component{
|
||||
BOMRef: purl,
|
||||
Type: cdx.ComponentTypeLibrary,
|
||||
Name: lib.Name,
|
||||
Version: lib.Version,
|
||||
PackageURL: purl,
|
||||
})
|
||||
|
||||
libpkgToPURL[libscanner.LockfilePath][lib.Name] = purl
|
||||
}
|
||||
|
||||
return components
|
||||
}
|
||||
|
||||
func wppkgToCdxComponents(wppkgs models.WordPressPackages, wppkgToPURL map[string]string) []cdx.Component {
|
||||
if len(wppkgs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
components := []cdx.Component{
|
||||
{
|
||||
BOMRef: uuid.NewString(),
|
||||
Type: cdx.ComponentTypeApplication,
|
||||
Name: "wordpress",
|
||||
Properties: &[]cdx.Property{
|
||||
{
|
||||
Name: "future-architect:vuls:Type",
|
||||
Value: "WordPress",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, wppkg := range wppkgs {
|
||||
purl := packageurl.NewPackageURL("wordpress", wppkg.Type, wppkg.Name, wppkg.Version, packageurl.Qualifiers{{Key: "status", Value: wppkg.Status}}, "").ToString()
|
||||
components = append(components, cdx.Component{
|
||||
BOMRef: purl,
|
||||
Type: cdx.ComponentTypeLibrary,
|
||||
Name: wppkg.Name,
|
||||
Version: wppkg.Version,
|
||||
PackageURL: purl,
|
||||
})
|
||||
|
||||
wppkgToPURL[wppkg.Name] = purl
|
||||
}
|
||||
|
||||
return components
|
||||
}
|
||||
|
||||
func cdxDependencies(bomRefs map[string][]string) *[]cdx.Dependency {
|
||||
dependencies := make([]cdx.Dependency, 0, len(bomRefs))
|
||||
for ref, depRefs := range bomRefs {
|
||||
ds := depRefs
|
||||
dependencies = append(dependencies, cdx.Dependency{
|
||||
Ref: ref,
|
||||
Dependencies: &ds,
|
||||
})
|
||||
}
|
||||
return &dependencies
|
||||
}
|
||||
|
||||
func toPkgPURL(osFamily, osVersion, packName, packVersion, packRelease, packArch, packRepository string) string {
|
||||
var purlType string
|
||||
switch osFamily {
|
||||
case constant.Alma, constant.Amazon, constant.CentOS, constant.Fedora, constant.OpenSUSE, constant.OpenSUSELeap, constant.Oracle, constant.RedHat, constant.Rocky, constant.SUSEEnterpriseDesktop, constant.SUSEEnterpriseServer:
|
||||
purlType = "rpm"
|
||||
case constant.Alpine:
|
||||
purlType = "apk"
|
||||
case constant.Debian, constant.Raspbian, constant.Ubuntu:
|
||||
purlType = "deb"
|
||||
case constant.FreeBSD:
|
||||
purlType = "pkg"
|
||||
case constant.Windows:
|
||||
purlType = "win"
|
||||
case constant.ServerTypePseudo:
|
||||
purlType = "pseudo"
|
||||
default:
|
||||
purlType = "unknown"
|
||||
}
|
||||
|
||||
version := packVersion
|
||||
if packRelease != "" {
|
||||
version = fmt.Sprintf("%s-%s", packVersion, packRelease)
|
||||
}
|
||||
|
||||
var qualifiers packageurl.Qualifiers
|
||||
if osVersion != "" {
|
||||
qualifiers = append(qualifiers, packageurl.Qualifier{
|
||||
Key: "distro",
|
||||
Value: osVersion,
|
||||
})
|
||||
}
|
||||
if packArch != "" {
|
||||
qualifiers = append(qualifiers, packageurl.Qualifier{
|
||||
Key: "arch",
|
||||
Value: packArch,
|
||||
})
|
||||
}
|
||||
if packRepository != "" {
|
||||
qualifiers = append(qualifiers, packageurl.Qualifier{
|
||||
Key: "repo",
|
||||
Value: packRepository,
|
||||
})
|
||||
}
|
||||
|
||||
return packageurl.NewPackageURL(purlType, osFamily, packName, version, qualifiers, "").ToString()
|
||||
}
|
||||
|
||||
func cdxVulnerabilities(result models.ScanResult, ospkgToPURL map[string]string, libpkgToPURL map[string]map[string]string, wppkgToPURL map[string]string) *[]cdx.Vulnerability {
|
||||
vulnerabilities := make([]cdx.Vulnerability, 0, len(result.ScannedCves))
|
||||
for _, cve := range result.ScannedCves {
|
||||
vulnerabilities = append(vulnerabilities, cdx.Vulnerability{
|
||||
ID: cve.CveID,
|
||||
Ratings: cdxRatings(cve.CveContents),
|
||||
CWEs: cdxCWEs(cve.CveContents),
|
||||
Description: cdxDescription(cve.CveContents),
|
||||
Advisories: cdxAdvisories(cve.CveContents),
|
||||
Affects: cdxAffects(cve, ospkgToPURL, libpkgToPURL, wppkgToPURL),
|
||||
})
|
||||
}
|
||||
return &vulnerabilities
|
||||
}
|
||||
|
||||
func cdxRatings(cveContents models.CveContents) *[]cdx.VulnerabilityRating {
|
||||
var ratings []cdx.VulnerabilityRating
|
||||
for _, contents := range cveContents {
|
||||
for _, content := range contents {
|
||||
if content.Cvss2Score != 0 || content.Cvss2Vector != "" || content.Cvss2Severity != "" {
|
||||
ratings = append(ratings, cdxCVSS2Rating(string(content.Type), content.Cvss2Vector, content.Cvss2Score, content.Cvss2Severity))
|
||||
}
|
||||
if content.Cvss3Score != 0 || content.Cvss3Vector != "" || content.Cvss3Severity != "" {
|
||||
ratings = append(ratings, cdxCVSS3Rating(string(content.Type), content.Cvss3Vector, content.Cvss3Score, content.Cvss3Severity))
|
||||
}
|
||||
}
|
||||
}
|
||||
return &ratings
|
||||
}
|
||||
|
||||
func cdxCVSS2Rating(source, vector string, score float64, severity string) cdx.VulnerabilityRating {
|
||||
r := cdx.VulnerabilityRating{
|
||||
Source: &cdx.Source{Name: source},
|
||||
Method: cdx.ScoringMethodCVSSv2,
|
||||
Vector: vector,
|
||||
}
|
||||
if score != 0 {
|
||||
r.Score = &score
|
||||
}
|
||||
switch strings.ToLower(severity) {
|
||||
case "high":
|
||||
r.Severity = cdx.SeverityHigh
|
||||
case "medium":
|
||||
r.Severity = cdx.SeverityMedium
|
||||
case "low":
|
||||
r.Severity = cdx.SeverityLow
|
||||
default:
|
||||
r.Severity = cdx.SeverityUnknown
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func cdxCVSS3Rating(source, vector string, score float64, severity string) cdx.VulnerabilityRating {
|
||||
r := cdx.VulnerabilityRating{
|
||||
Source: &cdx.Source{Name: source},
|
||||
Method: cdx.ScoringMethodCVSSv3,
|
||||
Vector: vector,
|
||||
}
|
||||
if strings.HasPrefix(vector, "CVSS:3.1") {
|
||||
r.Method = cdx.ScoringMethodCVSSv31
|
||||
}
|
||||
if score != 0 {
|
||||
r.Score = &score
|
||||
}
|
||||
switch strings.ToLower(severity) {
|
||||
case "critical":
|
||||
r.Severity = cdx.SeverityCritical
|
||||
case "high":
|
||||
r.Severity = cdx.SeverityHigh
|
||||
case "medium":
|
||||
r.Severity = cdx.SeverityMedium
|
||||
case "low":
|
||||
r.Severity = cdx.SeverityLow
|
||||
case "none":
|
||||
r.Severity = cdx.SeverityNone
|
||||
default:
|
||||
r.Severity = cdx.SeverityUnknown
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func cdxAffects(cve models.VulnInfo, ospkgToPURL map[string]string, libpkgToPURL map[string]map[string]string, wppkgToPURL map[string]string) *[]cdx.Affects {
|
||||
affects := make([]cdx.Affects, 0, len(cve.AffectedPackages)+len(cve.CpeURIs)+len(cve.LibraryFixedIns)+len(cve.WpPackageFixStats))
|
||||
|
||||
for _, p := range cve.AffectedPackages {
|
||||
affects = append(affects, cdx.Affects{
|
||||
Ref: ospkgToPURL[p.Name],
|
||||
})
|
||||
}
|
||||
for _, cpe := range cve.CpeURIs {
|
||||
affects = append(affects, cdx.Affects{
|
||||
Ref: cpe,
|
||||
})
|
||||
}
|
||||
for _, lib := range cve.LibraryFixedIns {
|
||||
affects = append(affects, cdx.Affects{
|
||||
Ref: libpkgToPURL[lib.Path][lib.Name],
|
||||
})
|
||||
|
||||
}
|
||||
for _, wppack := range cve.WpPackageFixStats {
|
||||
affects = append(affects, cdx.Affects{
|
||||
Ref: wppkgToPURL[wppack.Name],
|
||||
})
|
||||
}
|
||||
|
||||
return &affects
|
||||
}
|
||||
|
||||
func cdxCWEs(cveContents models.CveContents) *[]int {
|
||||
m := map[int]struct{}{}
|
||||
for _, contents := range cveContents {
|
||||
for _, content := range contents {
|
||||
for _, cweID := range content.CweIDs {
|
||||
if !strings.HasPrefix(cweID, "CWE-") {
|
||||
continue
|
||||
}
|
||||
i, err := strconv.Atoi(strings.TrimPrefix(cweID, "CWE-"))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
m[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
cweIDs := maps.Keys(m)
|
||||
return &cweIDs
|
||||
}
|
||||
|
||||
func cdxDescription(cveContents models.CveContents) string {
|
||||
if contents, ok := cveContents[models.Nvd]; ok {
|
||||
return contents[0].Summary
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func cdxAdvisories(cveContents models.CveContents) *[]cdx.Advisory {
|
||||
urls := map[string]struct{}{}
|
||||
for _, contents := range cveContents {
|
||||
for _, content := range contents {
|
||||
if content.SourceLink != "" {
|
||||
urls[content.SourceLink] = struct{}{}
|
||||
}
|
||||
for _, r := range content.References {
|
||||
urls[r.Link] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
advisories := make([]cdx.Advisory, 0, len(urls))
|
||||
for u := range urls {
|
||||
advisories = append(advisories, cdx.Advisory{
|
||||
URL: u,
|
||||
})
|
||||
}
|
||||
return &advisories
|
||||
}
|
||||
@@ -35,10 +35,10 @@ type message struct {
|
||||
|
||||
func (w SlackWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
|
||||
channel := w.Cnf.Channel
|
||||
for _, r := range rs {
|
||||
w.lang, w.osFamily = r.Lang, r.Family
|
||||
if channel == "${servername}" {
|
||||
channel := w.Cnf.Channel
|
||||
if w.Cnf.Channel == "${servername}" {
|
||||
channel = fmt.Sprintf("#%s", r.ServerName)
|
||||
}
|
||||
|
||||
|
||||
@@ -103,6 +103,9 @@ func writeToFile(cnf config.Config, path string) error {
|
||||
if cnf.Default.WordPress != nil && cnf.Default.WordPress.IsZero() {
|
||||
cnf.Default.WordPress = nil
|
||||
}
|
||||
if cnf.Default.PortScan != nil && cnf.Default.PortScan.IsZero() {
|
||||
cnf.Default.PortScan = nil
|
||||
}
|
||||
|
||||
c := struct {
|
||||
Saas *config.SaasConf `toml:"saas"`
|
||||
@@ -198,5 +201,11 @@ func cleanForTOMLEncoding(server config.ServerInfo, def config.ServerInfo) confi
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan != nil {
|
||||
if server.PortScan.IsZero() || reflect.DeepEqual(server.PortScan, def.PortScan) {
|
||||
server.PortScan = nil
|
||||
}
|
||||
}
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
283
scanner/base.go
283
scanner/base.go
@@ -28,10 +28,12 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
// Import library scanner
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/c/conan"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/dotnet/deps"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/dotnet/nuget"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/golang/binary"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/golang/mod"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/gradle"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/jar"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/pom"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/nodejs/npm"
|
||||
@@ -361,7 +363,6 @@ func (l *base) detectPlatform() {
|
||||
|
||||
//TODO Azure, GCP...
|
||||
l.setPlatform(models.Platform{Name: "other"})
|
||||
return
|
||||
}
|
||||
|
||||
var dsFingerPrintPrefix = "AgentStatus.agentCertHash: "
|
||||
@@ -398,10 +399,24 @@ func (l *base) detectRunningOnAws() (ok bool, instanceID string, err error) {
|
||||
r := l.exec(cmd, noSudo)
|
||||
if r.isSuccess() {
|
||||
id := strings.TrimSpace(r.Stdout)
|
||||
if !l.isAwsInstanceID(id) {
|
||||
return false, "", nil
|
||||
if l.isAwsInstanceID(id) {
|
||||
return true, id, nil
|
||||
}
|
||||
}
|
||||
|
||||
cmd = "curl -X PUT --max-time 1 --noproxy 169.254.169.254 -H \"X-aws-ec2-metadata-token-ttl-seconds: 300\" http://169.254.169.254/latest/api/token"
|
||||
r = l.exec(cmd, noSudo)
|
||||
if r.isSuccess() {
|
||||
token := strings.TrimSpace(r.Stdout)
|
||||
cmd = fmt.Sprintf("curl -H \"X-aws-ec2-metadata-token: %s\" --max-time 1 --noproxy 169.254.169.254 http://169.254.169.254/latest/meta-data/instance-id", token)
|
||||
r = l.exec(cmd, noSudo)
|
||||
if r.isSuccess() {
|
||||
id := strings.TrimSpace(r.Stdout)
|
||||
if !l.isAwsInstanceID(id) {
|
||||
return false, "", nil
|
||||
}
|
||||
return true, id, nil
|
||||
}
|
||||
return true, id, nil
|
||||
}
|
||||
|
||||
switch r.ExitStatus {
|
||||
@@ -568,12 +583,6 @@ func (l *base) parseSystemctlStatus(stdout string) string {
|
||||
return ss[1]
|
||||
}
|
||||
|
||||
// LibFile : library file content
|
||||
type LibFile struct {
|
||||
Contents []byte
|
||||
Filemode os.FileMode
|
||||
}
|
||||
|
||||
func (l *base) scanLibraries() (err error) {
|
||||
if len(l.LibraryScanners) != 0 {
|
||||
return nil
|
||||
@@ -584,11 +593,16 @@ func (l *base) scanLibraries() (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
l.log.Info("Scanning Lockfile...")
|
||||
l.log.Info("Scanning Language-specific Packages...")
|
||||
|
||||
libFilemap := map[string]LibFile{}
|
||||
found := map[string]bool{}
|
||||
detectFiles := l.ServerInfo.Lockfiles
|
||||
|
||||
priv := noSudo
|
||||
if l.getServerInfo().Mode.IsFastRoot() || l.getServerInfo().Mode.IsDeep() {
|
||||
priv = sudo
|
||||
}
|
||||
|
||||
// auto detect lockfile
|
||||
if l.ServerInfo.FindLock {
|
||||
findopt := ""
|
||||
@@ -596,10 +610,18 @@ func (l *base) scanLibraries() (err error) {
|
||||
findopt += fmt.Sprintf("-name %q -o ", filename)
|
||||
}
|
||||
|
||||
dir := "/"
|
||||
if len(l.ServerInfo.FindLockDirs) != 0 {
|
||||
dir = strings.Join(l.ServerInfo.FindLockDirs, " ")
|
||||
} else {
|
||||
l.log.Infof("It's recommended to specify FindLockDirs in config.toml. If FindLockDirs is not specified, all directories under / will be searched, which may increase CPU load")
|
||||
}
|
||||
l.log.Infof("Finding files under %s", dir)
|
||||
|
||||
// delete last "-o "
|
||||
// find / -type f -and \( -name "package-lock.json" -o -name "yarn.lock" ... \) 2>&1 | grep -v "find: "
|
||||
cmd := fmt.Sprintf(`find / -type f -and \( ` + findopt[:len(findopt)-3] + ` \) 2>&1 | grep -v "find: "`)
|
||||
r := exec(l.ServerInfo, cmd, noSudo)
|
||||
cmd := fmt.Sprintf(`find %s -type f -and \( `+findopt[:len(findopt)-3]+` \) 2>&1 | grep -v "find: "`, dir)
|
||||
r := exec(l.ServerInfo, cmd, priv)
|
||||
if r.ExitStatus != 0 && r.ExitStatus != 1 {
|
||||
return xerrors.Errorf("Failed to find lock files")
|
||||
}
|
||||
@@ -616,154 +638,167 @@ func (l *base) scanLibraries() (err error) {
|
||||
}
|
||||
|
||||
// skip already exist
|
||||
if _, ok := libFilemap[path]; ok {
|
||||
if _, ok := found[path]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
var f LibFile
|
||||
var contents []byte
|
||||
var filemode os.FileMode
|
||||
|
||||
switch l.Distro.Family {
|
||||
case constant.ServerTypePseudo:
|
||||
fileinfo, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to get target file info. err: %w, filepath: %s", err, path)
|
||||
l.log.Warnf("Failed to get target file info. err: %s, filepath: %s", err, path)
|
||||
continue
|
||||
}
|
||||
f.Filemode = fileinfo.Mode().Perm()
|
||||
f.Contents, err = os.ReadFile(path)
|
||||
filemode = fileinfo.Mode().Perm()
|
||||
contents, err = os.ReadFile(path)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to read target file contents. err: %w, filepath: %s", err, path)
|
||||
l.log.Warnf("Failed to read target file contents. err: %s, filepath: %s", err, path)
|
||||
continue
|
||||
}
|
||||
default:
|
||||
l.log.Debugf("Analyzing file: %s", path)
|
||||
cmd := fmt.Sprintf(`stat -c "%%a" %s`, path)
|
||||
r := exec(l.ServerInfo, cmd, noSudo)
|
||||
r := exec(l.ServerInfo, cmd, priv, logging.NewIODiscardLogger())
|
||||
if !r.isSuccess() {
|
||||
return xerrors.Errorf("Failed to get target file permission: %s, filepath: %s", r, path)
|
||||
l.log.Warnf("Failed to get target file permission: %s, filepath: %s", r, path)
|
||||
continue
|
||||
}
|
||||
permStr := fmt.Sprintf("0%s", strings.ReplaceAll(r.Stdout, "\n", ""))
|
||||
perm, err := strconv.ParseUint(permStr, 8, 32)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse permission string. err: %w, permission string: %s", err, permStr)
|
||||
l.log.Warnf("Failed to parse permission string. err: %s, permission string: %s", err, permStr)
|
||||
continue
|
||||
}
|
||||
f.Filemode = os.FileMode(perm)
|
||||
filemode = os.FileMode(perm)
|
||||
|
||||
cmd = fmt.Sprintf("cat %s", path)
|
||||
r = exec(l.ServerInfo, cmd, noSudo)
|
||||
r = exec(l.ServerInfo, cmd, priv, logging.NewIODiscardLogger())
|
||||
if !r.isSuccess() {
|
||||
return xerrors.Errorf("Failed to get target file contents: %s, filepath: %s", r, path)
|
||||
l.log.Warnf("Failed to get target file contents: %s, filepath: %s", r, path)
|
||||
continue
|
||||
}
|
||||
f.Contents = []byte(r.Stdout)
|
||||
contents = []byte(r.Stdout)
|
||||
}
|
||||
libFilemap[path] = f
|
||||
found[path] = true
|
||||
var libraryScanners []models.LibraryScanner
|
||||
if libraryScanners, err = AnalyzeLibrary(context.Background(), path, contents, filemode, l.ServerInfo.Mode.IsOffline()); err != nil {
|
||||
return err
|
||||
}
|
||||
l.LibraryScanners = append(l.LibraryScanners, libraryScanners...)
|
||||
}
|
||||
|
||||
var libraryScanners []models.LibraryScanner
|
||||
if libraryScanners, err = AnalyzeLibraries(context.Background(), libFilemap, l.ServerInfo.Mode.IsOffline()); err != nil {
|
||||
return err
|
||||
}
|
||||
l.LibraryScanners = append(l.LibraryScanners, libraryScanners...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// AnalyzeLibraries : detects libs defined in lockfile
|
||||
func AnalyzeLibraries(ctx context.Context, libFilemap map[string]LibFile, isOffline bool) (libraryScanners []models.LibraryScanner, err error) {
|
||||
// https://github.com/aquasecurity/trivy/blob/84677903a6fa1b707a32d0e8b2bffc23dde52afa/pkg/fanal/analyzer/const.go
|
||||
disabledAnalyzers := []analyzer.Type{
|
||||
// ======
|
||||
// OS
|
||||
// ======
|
||||
analyzer.TypeOSRelease,
|
||||
analyzer.TypeAlpine,
|
||||
analyzer.TypeAmazon,
|
||||
analyzer.TypeCBLMariner,
|
||||
analyzer.TypeDebian,
|
||||
analyzer.TypePhoton,
|
||||
analyzer.TypeCentOS,
|
||||
analyzer.TypeRocky,
|
||||
analyzer.TypeAlma,
|
||||
analyzer.TypeFedora,
|
||||
analyzer.TypeOracle,
|
||||
analyzer.TypeRedHatBase,
|
||||
analyzer.TypeSUSE,
|
||||
analyzer.TypeUbuntu,
|
||||
|
||||
// OS Package
|
||||
analyzer.TypeApk,
|
||||
analyzer.TypeDpkg,
|
||||
analyzer.TypeDpkgLicense,
|
||||
analyzer.TypeRpm,
|
||||
analyzer.TypeRpmqa,
|
||||
|
||||
// OS Package Repository
|
||||
analyzer.TypeApkRepo,
|
||||
|
||||
// ============
|
||||
// Image Config
|
||||
// ============
|
||||
analyzer.TypeApkCommand,
|
||||
|
||||
// =================
|
||||
// Structured Config
|
||||
// =================
|
||||
analyzer.TypeYaml,
|
||||
analyzer.TypeJSON,
|
||||
analyzer.TypeDockerfile,
|
||||
analyzer.TypeTerraform,
|
||||
analyzer.TypeCloudFormation,
|
||||
analyzer.TypeHelm,
|
||||
|
||||
// ========
|
||||
// License
|
||||
// ========
|
||||
analyzer.TypeLicenseFile,
|
||||
|
||||
// ========
|
||||
// Secrets
|
||||
// ========
|
||||
analyzer.TypeSecret,
|
||||
|
||||
// =======
|
||||
// Red Hat
|
||||
// =======
|
||||
analyzer.TypeRedHatContentManifestType,
|
||||
analyzer.TypeRedHatDockerfileType,
|
||||
// AnalyzeLibrary : detects library defined in artifact such as lockfile or jar
|
||||
func AnalyzeLibrary(ctx context.Context, path string, contents []byte, filemode os.FileMode, isOffline bool) (libraryScanners []models.LibraryScanner, err error) {
|
||||
anal, err := analyzer.NewAnalyzerGroup(analyzer.AnalyzerOptions{
|
||||
Group: analyzer.GroupBuiltin,
|
||||
DisabledAnalyzers: disabledAnalyzers,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to new analyzer group. err: %w", err)
|
||||
}
|
||||
anal := analyzer.NewAnalyzerGroup(analyzer.GroupBuiltin, disabledAnalyzers)
|
||||
|
||||
for path, f := range libFilemap {
|
||||
var wg sync.WaitGroup
|
||||
result := new(analyzer.AnalysisResult)
|
||||
if err := anal.AnalyzeFile(
|
||||
ctx,
|
||||
&wg,
|
||||
semaphore.NewWeighted(1),
|
||||
result,
|
||||
"",
|
||||
path,
|
||||
&DummyFileInfo{size: int64(len(f.Contents)), filemode: f.Filemode},
|
||||
func() (dio.ReadSeekCloserAt, error) { return dio.NopCloser(bytes.NewReader(f.Contents)), nil },
|
||||
nil,
|
||||
analyzer.AnalysisOptions{Offline: isOffline},
|
||||
); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to get libs. err: %w", err)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
libscan, err := convertLibWithScanner(result.Applications)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to convert libs. err: %w", err)
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscan...)
|
||||
var wg sync.WaitGroup
|
||||
result := new(analyzer.AnalysisResult)
|
||||
if err := anal.AnalyzeFile(
|
||||
ctx,
|
||||
&wg,
|
||||
semaphore.NewWeighted(1),
|
||||
result,
|
||||
"",
|
||||
path,
|
||||
&DummyFileInfo{name: filepath.Base(path), size: int64(len(contents)), filemode: filemode},
|
||||
func() (dio.ReadSeekCloserAt, error) { return dio.NopCloser(bytes.NewReader(contents)), nil },
|
||||
nil,
|
||||
analyzer.AnalysisOptions{Offline: isOffline},
|
||||
); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to get libs. err: %w", err)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
libscan, err := convertLibWithScanner(result.Applications)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to convert libs. err: %w", err)
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscan...)
|
||||
return libraryScanners, nil
|
||||
}
|
||||
|
||||
// https://github.com/aquasecurity/trivy/blob/84677903a6fa1b707a32d0e8b2bffc23dde52afa/pkg/fanal/analyzer/const.go
|
||||
var disabledAnalyzers = []analyzer.Type{
|
||||
// ======
|
||||
// OS
|
||||
// ======
|
||||
analyzer.TypeOSRelease,
|
||||
analyzer.TypeAlpine,
|
||||
analyzer.TypeAmazon,
|
||||
analyzer.TypeCBLMariner,
|
||||
analyzer.TypeDebian,
|
||||
analyzer.TypePhoton,
|
||||
analyzer.TypeCentOS,
|
||||
analyzer.TypeRocky,
|
||||
analyzer.TypeAlma,
|
||||
analyzer.TypeFedora,
|
||||
analyzer.TypeOracle,
|
||||
analyzer.TypeRedHatBase,
|
||||
analyzer.TypeSUSE,
|
||||
analyzer.TypeUbuntu,
|
||||
|
||||
// OS Package
|
||||
analyzer.TypeApk,
|
||||
analyzer.TypeDpkg,
|
||||
analyzer.TypeDpkgLicense,
|
||||
analyzer.TypeRpm,
|
||||
analyzer.TypeRpmqa,
|
||||
|
||||
// OS Package Repository
|
||||
analyzer.TypeApkRepo,
|
||||
|
||||
// ============
|
||||
// Image Config
|
||||
// ============
|
||||
analyzer.TypeApkCommand,
|
||||
|
||||
// =================
|
||||
// Structured Config
|
||||
// =================
|
||||
analyzer.TypeYaml,
|
||||
analyzer.TypeJSON,
|
||||
analyzer.TypeDockerfile,
|
||||
analyzer.TypeTerraform,
|
||||
analyzer.TypeCloudFormation,
|
||||
analyzer.TypeHelm,
|
||||
|
||||
// ========
|
||||
// License
|
||||
// ========
|
||||
analyzer.TypeLicenseFile,
|
||||
|
||||
// ========
|
||||
// Secrets
|
||||
// ========
|
||||
analyzer.TypeSecret,
|
||||
|
||||
// =======
|
||||
// Red Hat
|
||||
// =======
|
||||
analyzer.TypeRedHatContentManifestType,
|
||||
analyzer.TypeRedHatDockerfileType,
|
||||
}
|
||||
|
||||
// DummyFileInfo is a dummy struct for libscan
|
||||
type DummyFileInfo struct {
|
||||
name string
|
||||
size int64
|
||||
filemode os.FileMode
|
||||
}
|
||||
|
||||
// Name is
|
||||
func (d *DummyFileInfo) Name() string { return "dummy" }
|
||||
func (d *DummyFileInfo) Name() string { return d.name }
|
||||
|
||||
// Size is
|
||||
func (d *DummyFileInfo) Size() int64 { return d.size }
|
||||
@@ -771,13 +806,13 @@ func (d *DummyFileInfo) Size() int64 { return d.size }
|
||||
// Mode is
|
||||
func (d *DummyFileInfo) Mode() os.FileMode { return d.filemode }
|
||||
|
||||
//ModTime is
|
||||
// ModTime is
|
||||
func (d *DummyFileInfo) ModTime() time.Time { return time.Now() }
|
||||
|
||||
// IsDir is
|
||||
func (d *DummyFileInfo) IsDir() bool { return false }
|
||||
|
||||
//Sys is
|
||||
// Sys is
|
||||
func (d *DummyFileInfo) Sys() interface{} { return nil }
|
||||
|
||||
func (l *base) scanWordPress() error {
|
||||
|
||||
@@ -1155,7 +1155,7 @@ func (o *debian) checkrestart() error {
|
||||
o.Packages[p.Name] = pack
|
||||
|
||||
for j, proc := range p.NeedRestartProcs {
|
||||
if proc.HasInit == false {
|
||||
if !proc.HasInit {
|
||||
continue
|
||||
}
|
||||
packs[i].NeedRestartProcs[j].InitSystem = initName
|
||||
|
||||
@@ -128,7 +128,6 @@ func parallelExec(fn func(osTypeInterface) error, timeoutSec ...int) {
|
||||
}
|
||||
}
|
||||
servers = successes
|
||||
return
|
||||
}
|
||||
|
||||
func exec(c config.ServerInfo, cmd string, sudo bool, log ...logging.Logger) (result execResult) {
|
||||
|
||||
@@ -200,68 +200,64 @@ func detectRedhat(c config.ServerInfo) (bool, osTypeInterface) {
|
||||
// Fedora release 35 (Thirty Five)
|
||||
if r := exec(c, "cat /etc/redhat-release", noSudo); r.isSuccess() {
|
||||
result := releasePattern.FindStringSubmatch(strings.TrimSpace(r.Stdout))
|
||||
if len(result) != 3 {
|
||||
rhel := newRHEL(c)
|
||||
rhel.setErrs([]error{xerrors.Errorf("Failed to parse /etc/redhat-release. r.Stdout: %s", r.Stdout)})
|
||||
return true, rhel
|
||||
}
|
||||
|
||||
release := result[2]
|
||||
major, err := strconv.Atoi(util.Major(release))
|
||||
if err != nil {
|
||||
rhel := newRHEL(c)
|
||||
rhel.setErrs([]error{xerrors.Errorf("Failed to parse major version from release: %s", release)})
|
||||
return true, rhel
|
||||
}
|
||||
switch strings.ToLower(result[1]) {
|
||||
case "fedora":
|
||||
fed := newFedora(c)
|
||||
if major < 32 {
|
||||
fed.setErrs([]error{xerrors.Errorf("Failed to init Fedora. err: not supported major version. versions prior to Fedora 32 are not supported, detected version is %s", release)})
|
||||
return true, fed
|
||||
}
|
||||
fed.setDistro(constant.Fedora, release)
|
||||
return true, fed
|
||||
case "centos", "centos linux":
|
||||
cent := newCentOS(c)
|
||||
if major < 5 {
|
||||
cent.setErrs([]error{xerrors.Errorf("Failed to init CentOS. err: not supported major version. versions prior to CentOS 5 are not supported, detected version is %s", release)})
|
||||
return true, cent
|
||||
}
|
||||
cent.setDistro(constant.CentOS, release)
|
||||
return true, cent
|
||||
case "centos stream":
|
||||
cent := newCentOS(c)
|
||||
if major < 8 {
|
||||
cent.setErrs([]error{xerrors.Errorf("Failed to init CentOS Stream. err: not supported major version. versions prior to CentOS Stream 8 are not supported, detected version is %s", release)})
|
||||
return true, cent
|
||||
}
|
||||
cent.setDistro(constant.CentOS, fmt.Sprintf("stream%s", release))
|
||||
return true, cent
|
||||
case "alma", "almalinux":
|
||||
alma := newAlma(c)
|
||||
if major < 8 {
|
||||
alma.setErrs([]error{xerrors.Errorf("Failed to init AlmaLinux. err: not supported major version. versions prior to AlmaLinux 8 are not supported, detected version is %s", release)})
|
||||
return true, alma
|
||||
}
|
||||
alma.setDistro(constant.Alma, release)
|
||||
return true, alma
|
||||
case "rocky", "rocky linux":
|
||||
rocky := newRocky(c)
|
||||
if major < 8 {
|
||||
rocky.setErrs([]error{xerrors.Errorf("Failed to init Rocky Linux. err: not supported major version. versions prior to Rocky Linux 8 are not supported, detected version is %s", release)})
|
||||
return true, rocky
|
||||
}
|
||||
rocky.setDistro(constant.Rocky, release)
|
||||
return true, rocky
|
||||
default:
|
||||
rhel := newRHEL(c)
|
||||
if major < 5 {
|
||||
rhel.setErrs([]error{xerrors.Errorf("Failed to init RedHat Enterprise Linux. err: not supported major version. versions prior to RedHat Enterprise Linux 5 are not supported, detected version is %s", release)})
|
||||
if len(result) == 3 {
|
||||
release := result[2]
|
||||
major, err := strconv.Atoi(util.Major(release))
|
||||
if err != nil {
|
||||
rhel := newRHEL(c)
|
||||
rhel.setErrs([]error{xerrors.Errorf("Failed to parse major version from release: %s", release)})
|
||||
return true, rhel
|
||||
}
|
||||
switch strings.ToLower(result[1]) {
|
||||
case "fedora":
|
||||
fed := newFedora(c)
|
||||
if major < 32 {
|
||||
fed.setErrs([]error{xerrors.Errorf("Failed to init Fedora. err: not supported major version. versions prior to Fedora 32 are not supported, detected version is %s", release)})
|
||||
return true, fed
|
||||
}
|
||||
fed.setDistro(constant.Fedora, release)
|
||||
return true, fed
|
||||
case "centos", "centos linux":
|
||||
cent := newCentOS(c)
|
||||
if major < 5 {
|
||||
cent.setErrs([]error{xerrors.Errorf("Failed to init CentOS. err: not supported major version. versions prior to CentOS 5 are not supported, detected version is %s", release)})
|
||||
return true, cent
|
||||
}
|
||||
cent.setDistro(constant.CentOS, release)
|
||||
return true, cent
|
||||
case "centos stream":
|
||||
cent := newCentOS(c)
|
||||
if major < 8 {
|
||||
cent.setErrs([]error{xerrors.Errorf("Failed to init CentOS Stream. err: not supported major version. versions prior to CentOS Stream 8 are not supported, detected version is %s", release)})
|
||||
return true, cent
|
||||
}
|
||||
cent.setDistro(constant.CentOS, fmt.Sprintf("stream%s", release))
|
||||
return true, cent
|
||||
case "alma", "almalinux":
|
||||
alma := newAlma(c)
|
||||
if major < 8 {
|
||||
alma.setErrs([]error{xerrors.Errorf("Failed to init AlmaLinux. err: not supported major version. versions prior to AlmaLinux 8 are not supported, detected version is %s", release)})
|
||||
return true, alma
|
||||
}
|
||||
alma.setDistro(constant.Alma, release)
|
||||
return true, alma
|
||||
case "rocky", "rocky linux":
|
||||
rocky := newRocky(c)
|
||||
if major < 8 {
|
||||
rocky.setErrs([]error{xerrors.Errorf("Failed to init Rocky Linux. err: not supported major version. versions prior to Rocky Linux 8 are not supported, detected version is %s", release)})
|
||||
return true, rocky
|
||||
}
|
||||
rocky.setDistro(constant.Rocky, release)
|
||||
return true, rocky
|
||||
default:
|
||||
rhel := newRHEL(c)
|
||||
if major < 5 {
|
||||
rhel.setErrs([]error{xerrors.Errorf("Failed to init RedHat Enterprise Linux. err: not supported major version. versions prior to RedHat Enterprise Linux 5 are not supported, detected version is %s", release)})
|
||||
return true, rhel
|
||||
}
|
||||
rhel.setDistro(constant.RedHat, release)
|
||||
return true, rhel
|
||||
}
|
||||
rhel.setDistro(constant.RedHat, release)
|
||||
return true, rhel
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,6 +216,7 @@ host = "{{$ip}}"
|
||||
#type = "pseudo"
|
||||
#memo = "DB Server"
|
||||
#findLock = true
|
||||
#findLockDirs = [ "/path/to/prject/lib" ]
|
||||
#lockfiles = ["/path/to/package-lock.json"]
|
||||
#cpeNames = [ "cpe:/a:rubyonrails:ruby_on_rails:4.2.1" ]
|
||||
#owaspDCXMLPath = "/path/to/dependency-check-report.xml"
|
||||
|
||||
@@ -10,26 +10,29 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/utils"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/k0kubun/pp"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/detector"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/reporter"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/k0kubun/pp"
|
||||
)
|
||||
|
||||
// ReportCmd is subcommand for reporting
|
||||
type ReportCmd struct {
|
||||
configPath string
|
||||
|
||||
formatJSON bool
|
||||
formatOneEMail bool
|
||||
formatCsv bool
|
||||
formatFullText bool
|
||||
formatOneLineText bool
|
||||
formatList bool
|
||||
gzip bool
|
||||
formatJSON bool
|
||||
formatOneEMail bool
|
||||
formatCsv bool
|
||||
formatFullText bool
|
||||
formatOneLineText bool
|
||||
formatList bool
|
||||
formatCycloneDXJSON bool
|
||||
formatCycloneDXXML bool
|
||||
gzip bool
|
||||
|
||||
toSlack bool
|
||||
toChatWork bool
|
||||
@@ -80,6 +83,9 @@ func (*ReportCmd) Usage() string {
|
||||
[-format-one-line-text]
|
||||
[-format-list]
|
||||
[-format-full-text]
|
||||
[-format-csv]
|
||||
[-format-cyclonedx-json]
|
||||
[-format-cyclonedx-xml]
|
||||
[-gzip]
|
||||
[-http-proxy=http://192.168.0.1:8080]
|
||||
[-debug]
|
||||
@@ -150,6 +156,8 @@ func (p *ReportCmd) SetFlags(f *flag.FlagSet) {
|
||||
f.BoolVar(&p.formatList, "format-list", false, "Display as list format")
|
||||
f.BoolVar(&p.formatFullText, "format-full-text", false,
|
||||
"Detail report in plain text")
|
||||
f.BoolVar(&p.formatCycloneDXJSON, "format-cyclonedx-json", false, "CycloneDX JSON format")
|
||||
f.BoolVar(&p.formatCycloneDXXML, "format-cyclonedx-xml", false, "CycloneDX XML format")
|
||||
|
||||
f.BoolVar(&p.toSlack, "to-slack", false, "Send report via Slack")
|
||||
f.BoolVar(&p.toChatWork, "to-chatwork", false, "Send report via chatwork")
|
||||
@@ -225,7 +233,8 @@ func (p *ReportCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}
|
||||
}
|
||||
|
||||
if !(p.formatJSON || p.formatOneLineText ||
|
||||
p.formatList || p.formatFullText || p.formatCsv) {
|
||||
p.formatList || p.formatFullText || p.formatCsv ||
|
||||
p.formatCycloneDXJSON || p.formatCycloneDXXML) {
|
||||
p.formatList = true
|
||||
}
|
||||
|
||||
@@ -310,15 +319,17 @@ func (p *ReportCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}
|
||||
|
||||
if p.toLocalFile {
|
||||
reports = append(reports, reporter.LocalFileWriter{
|
||||
CurrentDir: dir,
|
||||
DiffPlus: config.Conf.DiffPlus,
|
||||
DiffMinus: config.Conf.DiffMinus,
|
||||
FormatJSON: p.formatJSON,
|
||||
FormatCsv: p.formatCsv,
|
||||
FormatFullText: p.formatFullText,
|
||||
FormatOneLineText: p.formatOneLineText,
|
||||
FormatList: p.formatList,
|
||||
Gzip: p.gzip,
|
||||
CurrentDir: dir,
|
||||
DiffPlus: config.Conf.DiffPlus,
|
||||
DiffMinus: config.Conf.DiffMinus,
|
||||
FormatJSON: p.formatJSON,
|
||||
FormatCsv: p.formatCsv,
|
||||
FormatFullText: p.formatFullText,
|
||||
FormatOneLineText: p.formatOneLineText,
|
||||
FormatList: p.formatList,
|
||||
FormatCycloneDXJSON: p.formatCycloneDXJSON,
|
||||
FormatCycloneDXXML: p.formatCycloneDXXML,
|
||||
Gzip: p.gzip,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user