Compare commits
1 Commits
v0.25.1-be
...
nightly
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3605645ff6 |
@@ -1,6 +0,0 @@
|
||||
.dockerignore
|
||||
Dockerfile
|
||||
vendor/
|
||||
*.sqlite3*
|
||||
setup/
|
||||
img/
|
||||
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: kotakanbe
|
||||
43
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
43
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
@@ -1,43 +0,0 @@
|
||||
---
|
||||
name: Bug Report
|
||||
labels: bug
|
||||
about: If something isn't working as expected.
|
||||
---
|
||||
|
||||
# What did you do? (required. The issue will be **closed** when not provided.)
|
||||
|
||||
|
||||
# What did you expect to happen?
|
||||
|
||||
|
||||
# What happened instead?
|
||||
|
||||
* Current Output
|
||||
|
||||
Please re-run the command using ```-debug``` and provide the output below.
|
||||
|
||||
# Steps to reproduce the behaviour
|
||||
|
||||
|
||||
# Configuration (**MUST** fill this out):
|
||||
|
||||
* Go version (`go version`):
|
||||
|
||||
* Go environment (`go env`):
|
||||
|
||||
* Vuls environment:
|
||||
|
||||
Hash : ____
|
||||
|
||||
To check the commit hash of HEAD
|
||||
$ vuls -v
|
||||
|
||||
or
|
||||
|
||||
$ cd $GOPATH/src/github.com/future-architect/vuls
|
||||
$ git rev-parse --short HEAD
|
||||
|
||||
* config.toml:
|
||||
|
||||
* command:
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
9
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
@@ -1,9 +0,0 @@
|
||||
---
|
||||
name: Feature Request
|
||||
labels: enhancement
|
||||
about: I have a suggestion (and might want to implement myself)!
|
||||
---
|
||||
|
||||
<!--
|
||||
If this is a FEATURE REQUEST, request format does not matter!
|
||||
-->
|
||||
10
.github/ISSUE_TEMPLATE/SUPPORT_QUESTION.md
vendored
10
.github/ISSUE_TEMPLATE/SUPPORT_QUESTION.md
vendored
@@ -1,10 +0,0 @@
|
||||
---
|
||||
name: Support Question
|
||||
labels: question
|
||||
about: If you have a question about Vuls.
|
||||
---
|
||||
|
||||
<!--
|
||||
If you have a trouble, feel free to ask.
|
||||
Make sure you're not asking duplicate question by searching on the issues lists.
|
||||
-->
|
||||
7
.github/ISSUE_TEMPLATE/VULSREPO.md
vendored
7
.github/ISSUE_TEMPLATE/VULSREPO.md
vendored
@@ -1,7 +0,0 @@
|
||||
---
|
||||
name: Vuls Repo
|
||||
labels: vulsrepo
|
||||
about: If something isn't working as expected.
|
||||
---
|
||||
|
||||
|
||||
40
.github/PULL_REQUEST_TEMPLATE.md
vendored
40
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,40 +0,0 @@
|
||||
|
||||
If this Pull Request is work in progress, Add a prefix of “[WIP]” in the title.
|
||||
|
||||
# What did you implement:
|
||||
|
||||
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context.
|
||||
|
||||
Fixes # (issue)
|
||||
|
||||
## Type of change
|
||||
|
||||
Please delete options that are not relevant.
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] This change requires a documentation update
|
||||
|
||||
# How Has This Been Tested?
|
||||
|
||||
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce.
|
||||
|
||||
# Checklist:
|
||||
You don't have to satisfy all of the following.
|
||||
|
||||
- [ ] Write tests
|
||||
- [ ] Write documentation
|
||||
- [ ] Check that there aren't other open pull requests for the same issue/feature
|
||||
- [ ] Format your source code by `make fmt`
|
||||
- [ ] Pass the test by `make test`
|
||||
- [ ] Provide verification config / commands
|
||||
- [ ] Enable "Allow edits from maintainers" for this PR
|
||||
- [ ] Update the messages below
|
||||
|
||||
***Is this ready for review?:*** NO
|
||||
|
||||
# Reference
|
||||
|
||||
* https://blog.github.com/2015-01-21-how-to-write-the-perfect-pull-request/
|
||||
|
||||
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -1,12 +0,0 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "master"
|
||||
67
.github/workflows/codeql-analysis.yml
vendored
67
.github/workflows/codeql-analysis.yml
vendored
@@ -1,67 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '32 20 * * 0'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
69
.github/workflows/docker-publish.yml
vendored
69
.github/workflows/docker-publish.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: Publish Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: vuls/vuls image meta
|
||||
id: oss-meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: vuls/vuls
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: vuls/fvuls image meta
|
||||
id: fvuls-meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: vuls/fvuls
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: OSS image build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
vuls/vuls:latest
|
||||
${{ steps.oss-meta.outputs.tags }}
|
||||
secrets: |
|
||||
"github_token=${{ secrets.GITHUB_TOKEN }}"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: FutureVuls image build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./contrib/Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
vuls/fvuls:latest
|
||||
${{ steps.fvuls-meta.outputs.tags }}
|
||||
secrets: |
|
||||
"github_token=${{ secrets.GITHUB_TOKEN }}"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
32
.github/workflows/golangci.yml
vendored
32
.github/workflows/golangci.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: golangci-lint
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
jobs:
|
||||
golangci:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.18
|
||||
- uses: actions/checkout@v3
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
||||
version: v1.46
|
||||
args: --timeout=10m
|
||||
|
||||
# Optional: working directory, useful for monorepos
|
||||
# working-directory: somedir
|
||||
|
||||
# Optional: golangci-lint command line arguments.
|
||||
# args: --issues-exit-code=0
|
||||
|
||||
# Optional: show only new issues if it's a pull request. The default value is `false`.
|
||||
# only-new-issues: true
|
||||
34
.github/workflows/goreleaser.yml
vendored
34
.github/workflows/goreleaser.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: goreleaser
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: install package for cross compile
|
||||
run: sudo apt update && sudo apt install -y gcc-aarch64-linux-gnu
|
||||
-
|
||||
name: Unshallow
|
||||
run: git fetch --prune --unshallow
|
||||
-
|
||||
name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.18
|
||||
-
|
||||
name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
with:
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
25
.github/workflows/test.yml
vendored
25
.github/workflows/test.yml
vendored
@@ -1,21 +1,26 @@
|
||||
name: Test
|
||||
|
||||
on: [pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.18.x
|
||||
id: go
|
||||
go-version-file: 'go.mod'
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Test
|
||||
run: make test
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout=10m
|
||||
|
||||
- name: testing
|
||||
run: go test -race ./...
|
||||
40
.gitignore
vendored
40
.gitignore
vendored
@@ -1,22 +1,24 @@
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
.vscode
|
||||
*.txt
|
||||
*.swp
|
||||
*.sqlite3*
|
||||
*.db
|
||||
tags
|
||||
.gitmodules
|
||||
coverage.out
|
||||
issues/
|
||||
vendor/
|
||||
log/
|
||||
results
|
||||
config.toml
|
||||
!setup/docker/*
|
||||
.DS_Store
|
||||
dist/
|
||||
.idea
|
||||
vuls.*
|
||||
|
||||
# Vuls
|
||||
vuls
|
||||
!cmd/vuls
|
||||
future-vuls
|
||||
trivy-to-vuls
|
||||
vuls.db
|
||||
config.json
|
||||
results
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,3 +0,0 @@
|
||||
[submodule "integration"]
|
||||
path = integration
|
||||
url = https://github.com/vulsio/integration
|
||||
@@ -1,54 +0,0 @@
|
||||
name: golang-ci
|
||||
|
||||
run:
|
||||
timeout: 10m
|
||||
go: '1.18'
|
||||
|
||||
linters-settings:
|
||||
revive:
|
||||
# see https://github.com/mgechev/revive#available-rules for details.
|
||||
ignore-generated-header: true
|
||||
severity: warning
|
||||
confidence: 0.8
|
||||
rules:
|
||||
- name: blank-imports
|
||||
- name: context-as-argument
|
||||
- name: context-keys-type
|
||||
- name: dot-imports
|
||||
- name: error-return
|
||||
- name: error-strings
|
||||
- name: error-naming
|
||||
- name: exported
|
||||
- name: if-return
|
||||
- name: increment-decrement
|
||||
- name: var-naming
|
||||
- name: var-declaration
|
||||
- name: package-comments
|
||||
- name: range
|
||||
- name: receiver-naming
|
||||
- name: time-naming
|
||||
- name: unexported-return
|
||||
- name: indent-error-flow
|
||||
- name: errorf
|
||||
- name: empty-block
|
||||
- name: superfluous-else
|
||||
- name: unused-parameter
|
||||
- name: unreachable-code
|
||||
- name: redefines-builtin-id
|
||||
staticcheck:
|
||||
# https://staticcheck.io/docs/options#checks
|
||||
checks: ["all", "-SA1019"]
|
||||
# errcheck:
|
||||
#exclude: /path/to/file.txt
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- goimports
|
||||
- revive
|
||||
- govet
|
||||
- misspell
|
||||
- errcheck
|
||||
- staticcheck
|
||||
- prealloc
|
||||
- ineffassign
|
||||
133
.goreleaser.yml
133
.goreleaser.yml
@@ -1,133 +0,0 @@
|
||||
project_name: vuls
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
release:
|
||||
github:
|
||||
owner: future-architect
|
||||
name: vuls
|
||||
builds:
|
||||
- id: vuls-amd64
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=x86_64-linux-gnu-gcc
|
||||
main: ./cmd/vuls/main.go
|
||||
flags:
|
||||
- -a
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls
|
||||
|
||||
- id: vuls-arm64
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=aarch64-linux-gnu-gcc
|
||||
main: ./cmd/vuls/main.go
|
||||
flags:
|
||||
- -a
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls
|
||||
|
||||
- id: vuls-scanner
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
main: ./cmd/scanner/main.go
|
||||
flags:
|
||||
- -a
|
||||
tags:
|
||||
- scanner
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls-scanner
|
||||
|
||||
- id: trivy-to-vuls
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
tags:
|
||||
- scanner
|
||||
main: ./contrib/trivy/cmd/main.go
|
||||
binary: trivy-to-vuls
|
||||
|
||||
- id: future-vuls
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
flags:
|
||||
- -a
|
||||
tags:
|
||||
- scanner
|
||||
main: ./contrib/future-vuls/cmd/main.go
|
||||
binary: future-vuls
|
||||
|
||||
archives:
|
||||
|
||||
- id: vuls
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- vuls-amd64
|
||||
- vuls-arm64
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
- id: vuls-scanner
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- vuls-scanner
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
- id: trivy-to-vuls
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- trivy-to-vuls
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
- id: future-vuls
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- future-vuls
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
snapshot:
|
||||
name_template: SNAPSHOT-{{ .Commit }}
|
||||
30
.revive.toml
30
.revive.toml
@@ -1,30 +0,0 @@
|
||||
ignoreGeneratedHeader = false
|
||||
severity = "warning"
|
||||
confidence = 0.8
|
||||
errorCode = 0
|
||||
warningCode = 0
|
||||
|
||||
[rule.blank-imports]
|
||||
[rule.context-as-argument]
|
||||
[rule.context-keys-type]
|
||||
[rule.dot-imports]
|
||||
[rule.error-return]
|
||||
[rule.error-strings]
|
||||
[rule.error-naming]
|
||||
[rule.exported]
|
||||
[rule.if-return]
|
||||
[rule.increment-decrement]
|
||||
[rule.var-naming]
|
||||
[rule.var-declaration]
|
||||
[rule.package-comments]
|
||||
[rule.range]
|
||||
[rule.receiver-naming]
|
||||
[rule.time-naming]
|
||||
[rule.unexported-return]
|
||||
[rule.indent-error-flow]
|
||||
[rule.errorf]
|
||||
[rule.empty-block]
|
||||
[rule.superfluous-else]
|
||||
[rule.unused-parameter]
|
||||
[rule.unreachable-code]
|
||||
[rule.redefines-builtin-id]
|
||||
514
CHANGELOG.md
514
CHANGELOG.md
@@ -1,514 +0,0 @@
|
||||
# Change Log
|
||||
|
||||
## v0.4.1 and later, see [GitHub release](https://github.com/future-architect/vuls/releases)
|
||||
|
||||
## [v0.4.0](https://github.com/future-architect/vuls/tree/v0.4.0) (2017-08-25)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.3.0...v0.4.0)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Output changelog in report, TUI and JSON for RHEL [\#367](https://github.com/future-architect/vuls/issues/367)
|
||||
- Output changelog in report, TUI and JSON for Amazon Linux [\#366](https://github.com/future-architect/vuls/issues/366)
|
||||
- Improve scanning accuracy by checking package versions [\#256](https://github.com/future-architect/vuls/issues/256)
|
||||
- Improve SSH [\#415](https://github.com/future-architect/vuls/issues/415)
|
||||
- Enable to scan even if target server can not connect to the Internet [\#258](https://github.com/future-architect/vuls/issues/258)
|
||||
- SSH Hostkey check [\#417](https://github.com/future-architect/vuls/pull/417) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- v0.4.0 [\#449](https://github.com/future-architect/vuls/pull/449) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Change default ssh method from go library to external command [\#416](https://github.com/future-architect/vuls/pull/416) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add containers-only option to configtest [\#411](https://github.com/future-architect/vuls/pull/411) ([knqyf263](https://github.com/knqyf263))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Running Vuls tui before vuls report does not show vulnerabilities checked by CPE [\#396](https://github.com/future-architect/vuls/issues/396)
|
||||
- With a long package name, Local shell mode \(stty dont' work\) [\#444](https://github.com/future-architect/vuls/issues/444)
|
||||
- Improve SSH [\#415](https://github.com/future-architect/vuls/issues/415)
|
||||
- Report that a vulnerability exists in the wrong package [\#408](https://github.com/future-architect/vuls/issues/408)
|
||||
- With a long package name, a parse error occurs. [\#391](https://github.com/future-architect/vuls/issues/391)
|
||||
- Ubuntu failed to scan vulnerable packages [\#205](https://github.com/future-architect/vuls/issues/205)
|
||||
- CVE-ID in changelog can't be picked up. [\#154](https://github.com/future-architect/vuls/issues/154)
|
||||
- v0.4.0 [\#449](https://github.com/future-architect/vuls/pull/449) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix SSH dial error [\#413](https://github.com/future-architect/vuls/pull/413) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update deps, Change deps tool from glide to dep [\#412](https://github.com/future-architect/vuls/pull/412) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix report option Loaded error-info [\#406](https://github.com/future-architect/vuls/pull/406) ([hogehogehugahuga](https://github.com/hogehogehugahuga))
|
||||
- Add --user root to docker exec command [\#389](https://github.com/future-architect/vuls/pull/389) ([PaulFurtado](https://github.com/PaulFurtado))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- README.md.ja not include "Oracle Linux, FreeBSD" [\#465](https://github.com/future-architect/vuls/issues/465)
|
||||
- Can't scan remote server - \(centos 7 - updated\) [\#451](https://github.com/future-architect/vuls/issues/451)
|
||||
- An abnormality in the result of vuls tui [\#439](https://github.com/future-architect/vuls/issues/439)
|
||||
- compile faild [\#436](https://github.com/future-architect/vuls/issues/436)
|
||||
- Can't install vuls on CentOS 7 [\#432](https://github.com/future-architect/vuls/issues/432)
|
||||
- Vuls scan doesn't show severity score in any of the vulnerable packages [\#430](https://github.com/future-architect/vuls/issues/430)
|
||||
- Load config failedtoml: cannot load TOML value of type string into a Go slice [\#429](https://github.com/future-architect/vuls/issues/429)
|
||||
- vuls scan not running check-update with sudo for Centos 7 [\#428](https://github.com/future-architect/vuls/issues/428)
|
||||
- options for configtest not being activated [\#422](https://github.com/future-architect/vuls/issues/422)
|
||||
- "could not find project Gopkg.toml, use dep init to initiate a manifest" when installing vuls [\#420](https://github.com/future-architect/vuls/issues/420)
|
||||
- go get not get [\#407](https://github.com/future-architect/vuls/issues/407)
|
||||
- Failed to scan via docker. err: Unknown format [\#404](https://github.com/future-architect/vuls/issues/404)
|
||||
- Failed to scan - kernel-xxx is an installed security update [\#403](https://github.com/future-architect/vuls/issues/403)
|
||||
- 169.254.169.254 port 80: Connection refused [\#402](https://github.com/future-architect/vuls/issues/402)
|
||||
- vuls scan --debug cause `invalid memory address` error [\#397](https://github.com/future-architect/vuls/issues/397)
|
||||
- Provide a command line flag that will automatically install aptitude on debian? [\#390](https://github.com/future-architect/vuls/issues/390)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- export fill cve info [\#467](https://github.com/future-architect/vuls/pull/467) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add oval docker [\#466](https://github.com/future-architect/vuls/pull/466) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix typos in commands. [\#464](https://github.com/future-architect/vuls/pull/464) ([ymomoi](https://github.com/ymomoi))
|
||||
- Update README [\#463](https://github.com/future-architect/vuls/pull/463) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- export FillWithOval [\#462](https://github.com/future-architect/vuls/pull/462) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add serveruuid field [\#458](https://github.com/future-architect/vuls/pull/458) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add s3 dirctory option [\#457](https://github.com/future-architect/vuls/pull/457) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Extract Advisory.Description on RHEL, Amazon, Oracle [\#450](https://github.com/future-architect/vuls/pull/450) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- nosudo on CentOS and Fetch Changelogs on Amazon, RHEL [\#448](https://github.com/future-architect/vuls/pull/448) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- change logrus package to lowercase and update other packages [\#446](https://github.com/future-architect/vuls/pull/446) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add db backend redis [\#445](https://github.com/future-architect/vuls/pull/445) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fast test [\#435](https://github.com/future-architect/vuls/pull/435) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix typo [\#433](https://github.com/future-architect/vuls/pull/433) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Add support for PostgreSQL as a DB storage back-end [\#431](https://github.com/future-architect/vuls/pull/431) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- typo README.js.md [\#426](https://github.com/future-architect/vuls/pull/426) ([ryurock](https://github.com/ryurock))
|
||||
- Add TOC to README [\#425](https://github.com/future-architect/vuls/pull/425) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fixing \#420 where lock and manifest have moved to TOML [\#421](https://github.com/future-architect/vuls/pull/421) ([elfgoh](https://github.com/elfgoh))
|
||||
- Define timeout for vulnerabilities scan and platform detection [\#414](https://github.com/future-architect/vuls/pull/414) ([s7anley](https://github.com/s7anley))
|
||||
- Enable -timeout option when detecting OS [\#410](https://github.com/future-architect/vuls/pull/410) ([knqyf263](https://github.com/knqyf263))
|
||||
- Remove duplicate command in README [\#401](https://github.com/future-architect/vuls/pull/401) ([knqyf263](https://github.com/knqyf263))
|
||||
- Fix to read config.toml at tui [\#441](https://github.com/future-architect/vuls/pull/441) ([usiusi360](https://github.com/usiusi360))
|
||||
- Change NVD URL to new one [\#419](https://github.com/future-architect/vuls/pull/419) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add some testcases [\#418](https://github.com/future-architect/vuls/pull/418) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.3.0](https://github.com/future-architect/vuls/tree/v0.3.0) (2017-03-24)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.2.0...v0.3.0)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Changelog parsing fails when package maintainers aren't consistent regarding versions [\#327](https://github.com/future-architect/vuls/issues/327)
|
||||
- Docker scan doesn't report image name [\#325](https://github.com/future-architect/vuls/issues/325)
|
||||
- vuls report -to-email only one E-Mail [\#295](https://github.com/future-architect/vuls/issues/295)
|
||||
- Support RHEL5 [\#286](https://github.com/future-architect/vuls/issues/286)
|
||||
- Continue scanning even when some hosts have tech issues? [\#264](https://github.com/future-architect/vuls/issues/264)
|
||||
- Normalization of JSON output [\#259](https://github.com/future-architect/vuls/issues/259)
|
||||
- Add report subcommand, change scan subcommand options [\#239](https://github.com/future-architect/vuls/issues/239)
|
||||
- scan localhost? [\#210](https://github.com/future-architect/vuls/issues/210)
|
||||
- Can Vuls show details about updateable packages [\#341](https://github.com/future-architect/vuls/issues/341)
|
||||
- Scan all containers except [\#285](https://github.com/future-architect/vuls/issues/285)
|
||||
- Notify the difference from the previous scan result [\#255](https://github.com/future-architect/vuls/issues/255)
|
||||
- EC2RoleCreds support? [\#250](https://github.com/future-architect/vuls/issues/250)
|
||||
- Output confidence score of detection accuracy and detection method to JSON or Reporting [\#350](https://github.com/future-architect/vuls/pull/350) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Avoid null slice being null in JSON [\#345](https://github.com/future-architect/vuls/pull/345) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -format-one-email option [\#331](https://github.com/future-architect/vuls/pull/331) ([knqyf263](https://github.com/knqyf263))
|
||||
- Support Raspbian [\#330](https://github.com/future-architect/vuls/pull/330) ([knqyf263](https://github.com/knqyf263))
|
||||
- Add leniancy to the version matching for debian to account for versio… [\#328](https://github.com/future-architect/vuls/pull/328) ([jsulinski](https://github.com/jsulinski))
|
||||
- Add image information for docker containers [\#326](https://github.com/future-architect/vuls/pull/326) ([jsulinski](https://github.com/jsulinski))
|
||||
- Continue scanning even when some hosts have tech issues [\#309](https://github.com/future-architect/vuls/pull/309) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -log-dir option [\#301](https://github.com/future-architect/vuls/pull/301) ([knqyf263](https://github.com/knqyf263))
|
||||
- Use --assumeno option [\#300](https://github.com/future-architect/vuls/pull/300) ([knqyf263](https://github.com/knqyf263))
|
||||
- Add local scan mode\(Scan without SSH when target server is localhost\) [\#291](https://github.com/future-architect/vuls/pull/291) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support RHEL5 [\#289](https://github.com/future-architect/vuls/pull/289) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add LXD support [\#288](https://github.com/future-architect/vuls/pull/288) ([jiazio](https://github.com/jiazio))
|
||||
- Add timeout option to configtest [\#400](https://github.com/future-architect/vuls/pull/400) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Notify the difference from the previous scan result [\#392](https://github.com/future-architect/vuls/pull/392) ([knqyf263](https://github.com/knqyf263))
|
||||
- Add Oracle Linux support [\#386](https://github.com/future-architect/vuls/pull/386) ([Djelibeybi](https://github.com/Djelibeybi))
|
||||
- Change container scan format in config.toml [\#381](https://github.com/future-architect/vuls/pull/381) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Obsolete CentOS5 support [\#378](https://github.com/future-architect/vuls/pull/378) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Deprecate prepare subcommand to minimize the root authority defined by /etc/sudoers [\#375](https://github.com/future-architect/vuls/pull/375) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support IAM role for report to S3. [\#370](https://github.com/future-architect/vuls/pull/370) ([ohsawa0515](https://github.com/ohsawa0515))
|
||||
- Add .travis.yml [\#363](https://github.com/future-architect/vuls/pull/363) ([knqyf263](https://github.com/knqyf263))
|
||||
- Output changelog in report, TUI and JSON for Ubuntu/Debian/CentOS [\#356](https://github.com/future-architect/vuls/pull/356) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Debian scans failing in docker [\#323](https://github.com/future-architect/vuls/issues/323)
|
||||
- Local CVE DB is still checked, even if a CVE Dictionary URL is defined [\#316](https://github.com/future-architect/vuls/issues/316)
|
||||
- vuls needs gmake. [\#313](https://github.com/future-architect/vuls/issues/313)
|
||||
- patch request for FreeBSD [\#312](https://github.com/future-architect/vuls/issues/312)
|
||||
- Report: failed to read from json \(Docker\) [\#294](https://github.com/future-architect/vuls/issues/294)
|
||||
- -report-mail option does not output required mail header [\#282](https://github.com/future-architect/vuls/issues/282)
|
||||
- PackInfo not found error when vuls scan. [\#281](https://github.com/future-architect/vuls/issues/281)
|
||||
- Normalize character set [\#279](https://github.com/future-architect/vuls/issues/279)
|
||||
- The number of Updatable Packages is different from the number of yum check-update [\#373](https://github.com/future-architect/vuls/issues/373)
|
||||
- sudo is needed when exec yum check-update on RHEL7 [\#371](https://github.com/future-architect/vuls/issues/371)
|
||||
- `123-3ubuntu4` should be marked as ChangelogLenientMatch [\#362](https://github.com/future-architect/vuls/issues/362)
|
||||
- CentOS multi package invalid result [\#360](https://github.com/future-architect/vuls/issues/360)
|
||||
- Parse error after check-update. \(Unknown format\) [\#359](https://github.com/future-architect/vuls/issues/359)
|
||||
- Fix candidate to confidence. [\#354](https://github.com/future-architect/vuls/pull/354) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Bug fix: not send e-mail to cc address [\#346](https://github.com/future-architect/vuls/pull/346) ([knqyf263](https://github.com/knqyf263))
|
||||
- Change the command used for os detection from uname to freebsd-version [\#340](https://github.com/future-architect/vuls/pull/340) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling of detectOS [\#337](https://github.com/future-architect/vuls/pull/337) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix infinite retry at size overrun error in Slack report [\#329](https://github.com/future-architect/vuls/pull/329) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- aptitude changelog defaults to using more, which is not interactive a… [\#324](https://github.com/future-architect/vuls/pull/324) ([jsulinski](https://github.com/jsulinski))
|
||||
- Do not use sudo when echo [\#322](https://github.com/future-architect/vuls/pull/322) ([knqyf263](https://github.com/knqyf263))
|
||||
- Reduce privilege requirements for commands that don't need sudo on Ubuntu/Debian [\#319](https://github.com/future-architect/vuls/pull/319) ([jsulinski](https://github.com/jsulinski))
|
||||
- Don't check for a CVE DB when CVE Dictionary URL is defined [\#317](https://github.com/future-architect/vuls/pull/317) ([jsulinski](https://github.com/jsulinski))
|
||||
- Fix typo contianer -\> container [\#314](https://github.com/future-architect/vuls/pull/314) ([justyns](https://github.com/justyns))
|
||||
- Fix the changelog cache logic for ubuntu/debian [\#305](https://github.com/future-architect/vuls/pull/305) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix yum updateinfo options [\#304](https://github.com/future-architect/vuls/pull/304) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update glide.lock to fix create-log-dir error. [\#303](https://github.com/future-architect/vuls/pull/303) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a bug in logging \(file output\) at scan command [\#302](https://github.com/future-architect/vuls/pull/302) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -pipe flag \#294 [\#299](https://github.com/future-architect/vuls/pull/299) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix RHEL5 scan stopped halfway [\#293](https://github.com/future-architect/vuls/pull/293) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix amazon linux scan stopped halfway [\#292](https://github.com/future-architect/vuls/pull/292) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix nil-ponter in TUI [\#388](https://github.com/future-architect/vuls/pull/388) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix Bug of Mysql Backend [\#384](https://github.com/future-architect/vuls/pull/384) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix scan confidence on Ubuntu/Debian/Raspbian \#362 [\#379](https://github.com/future-architect/vuls/pull/379) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix updatalbe packages count \#373 [\#374](https://github.com/future-architect/vuls/pull/374) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- sudo yum check-update on RHEL [\#372](https://github.com/future-architect/vuls/pull/372) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Change ssh option from -t to -tt [\#369](https://github.com/future-architect/vuls/pull/369) ([knqyf263](https://github.com/knqyf263))
|
||||
- Increase the width of RequestPty [\#364](https://github.com/future-architect/vuls/pull/364) ([knqyf263](https://github.com/knqyf263))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- vuls configtest --debugがsudoのチェックで止まってしまう [\#395](https://github.com/future-architect/vuls/issues/395)
|
||||
- Add support for Oracle Linux [\#385](https://github.com/future-architect/vuls/issues/385)
|
||||
- error on install - Ubuntu 16.04 [\#376](https://github.com/future-architect/vuls/issues/376)
|
||||
- Unknown OS Type [\#335](https://github.com/future-architect/vuls/issues/335)
|
||||
- mac os 10.12.3 make install error [\#334](https://github.com/future-architect/vuls/issues/334)
|
||||
- assumeYes doesn't work because there is no else condition [\#320](https://github.com/future-architect/vuls/issues/320)
|
||||
- Debian scan uses sudo where unnecessary [\#318](https://github.com/future-architect/vuls/issues/318)
|
||||
- Add FreeBSD 11 to supported OS on documents. [\#311](https://github.com/future-architect/vuls/issues/311)
|
||||
- docker fetchnvd failing [\#274](https://github.com/future-architect/vuls/issues/274)
|
||||
- Latest version of labstack echo breaks installation [\#268](https://github.com/future-architect/vuls/issues/268)
|
||||
- fetchnvd Fails using example loop [\#267](https://github.com/future-architect/vuls/issues/267)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- fix typo in README.ja.md [\#394](https://github.com/future-architect/vuls/pull/394) ([lv7777](https://github.com/lv7777))
|
||||
- Update Tutorial in README [\#387](https://github.com/future-architect/vuls/pull/387) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix README [\#383](https://github.com/future-architect/vuls/pull/383) ([usiusi360](https://github.com/usiusi360))
|
||||
- s/dictinary/dictionary typo [\#382](https://github.com/future-architect/vuls/pull/382) ([beuno](https://github.com/beuno))
|
||||
- Fix Japanese typo [\#377](https://github.com/future-architect/vuls/pull/377) ([IMAI-Yuji](https://github.com/IMAI-Yuji))
|
||||
- Improve kanji character [\#351](https://github.com/future-architect/vuls/pull/351) ([hasegawa-tomoki](https://github.com/hasegawa-tomoki))
|
||||
- Add PULL\_REQUEST\_TEMPLATE.md [\#348](https://github.com/future-architect/vuls/pull/348) ([knqyf263](https://github.com/knqyf263))
|
||||
- Update README [\#347](https://github.com/future-architect/vuls/pull/347) ([knqyf263](https://github.com/knqyf263))
|
||||
- Fix test case [\#344](https://github.com/future-architect/vuls/pull/344) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix typo [\#343](https://github.com/future-architect/vuls/pull/343) ([knqyf263](https://github.com/knqyf263))
|
||||
- Rename Makefile to GNUmakefile \#313 [\#339](https://github.com/future-architect/vuls/pull/339) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update README [\#338](https://github.com/future-architect/vuls/pull/338) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- add error handling [\#332](https://github.com/future-architect/vuls/pull/332) ([kazuminn](https://github.com/kazuminn))
|
||||
- Update readme [\#308](https://github.com/future-architect/vuls/pull/308) ([lapthorn](https://github.com/lapthorn))
|
||||
- Update glide.lock to fix import error [\#306](https://github.com/future-architect/vuls/pull/306) ([knqyf263](https://github.com/knqyf263))
|
||||
- Check whether echo is executable with nopasswd [\#298](https://github.com/future-architect/vuls/pull/298) ([knqyf263](https://github.com/knqyf263))
|
||||
- Update docker README [\#297](https://github.com/future-architect/vuls/pull/297) ([knqyf263](https://github.com/knqyf263))
|
||||
- update readme [\#296](https://github.com/future-architect/vuls/pull/296) ([galigalikun](https://github.com/galigalikun))
|
||||
- remove unused import line. [\#358](https://github.com/future-architect/vuls/pull/358) ([ymomoi](https://github.com/ymomoi))
|
||||
|
||||
## [v0.2.0](https://github.com/future-architect/vuls/tree/v0.2.0) (2017-01-10)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.7...v0.2.0)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Add report subcommand, change scan options. \#239 [\#270](https://github.com/future-architect/vuls/pull/270) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add --assume-yes to prepare \#260 [\#266](https://github.com/future-architect/vuls/pull/266) ([Code0x58](https://github.com/Code0x58))
|
||||
- Use RFC3339 timestamps in the results [\#265](https://github.com/future-architect/vuls/pull/265) ([Code0x58](https://github.com/Code0x58))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- vuls prepare failed to centos7 [\#275](https://github.com/future-architect/vuls/issues/275)
|
||||
- Failed to scan on RHEL5 [\#94](https://github.com/future-architect/vuls/issues/94)
|
||||
- Fix container os detection [\#287](https://github.com/future-architect/vuls/pull/287) ([jiazio](https://github.com/jiazio))
|
||||
- Add date header to report mail. [\#283](https://github.com/future-architect/vuls/pull/283) ([ymomoi](https://github.com/ymomoi))
|
||||
- Add Content-Type header to report/mail.go . [\#280](https://github.com/future-architect/vuls/pull/280) ([hogehogehugahuga](https://github.com/hogehogehugahuga))
|
||||
- Keep output of "vuls scan -report-\*" to be same every times [\#272](https://github.com/future-architect/vuls/pull/272) ([yoheimuta](https://github.com/yoheimuta))
|
||||
- Fix JSON-dir regex pattern \#265 [\#271](https://github.com/future-architect/vuls/pull/271) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Stop quietly ignoring `--ssh-external` on Windows [\#263](https://github.com/future-architect/vuls/pull/263) ([Code0x58](https://github.com/Code0x58))
|
||||
- Fix non-interactive `apt-get install` \#251 [\#253](https://github.com/future-architect/vuls/pull/253) ([Code0x58](https://github.com/Code0x58))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- gocui.NewGui now takes a parameter [\#261](https://github.com/future-architect/vuls/issues/261)
|
||||
- Add a `--yes` flag to bypass interactive prompt for `vuls prepare` [\#260](https://github.com/future-architect/vuls/issues/260)
|
||||
- `vuls prepare` doesn't work on Debian host due to apt-get confirmation prompt [\#251](https://github.com/future-architect/vuls/issues/251)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Fix gocui.NewGui after signature change \#261 [\#262](https://github.com/future-architect/vuls/pull/262) ([Code0x58](https://github.com/Code0x58))
|
||||
- Replace inconsistent tabs with spaces [\#254](https://github.com/future-architect/vuls/pull/254) ([Code0x58](https://github.com/Code0x58))
|
||||
- Fix README [\#249](https://github.com/future-architect/vuls/pull/249) ([usiusi360](https://github.com/usiusi360))
|
||||
|
||||
## [v0.1.7](https://github.com/future-architect/vuls/tree/v0.1.7) (2016-11-08)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.6...v0.1.7)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Enable to scan only docker container, without docker host [\#122](https://github.com/future-architect/vuls/issues/122)
|
||||
- Add -skip-broken option \[CentOS only\] \#245 [\#248](https://github.com/future-architect/vuls/pull/248) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Display unknown CVEs to TUI [\#244](https://github.com/future-architect/vuls/pull/244) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add the XML output [\#240](https://github.com/future-architect/vuls/pull/240) ([gleentea](https://github.com/gleentea))
|
||||
- add '-ssh-external' option to prepare subcommand [\#234](https://github.com/future-architect/vuls/pull/234) ([mykstmhr](https://github.com/mykstmhr))
|
||||
- Integrate OWASP Dependency Check [\#232](https://github.com/future-architect/vuls/pull/232) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add support for reading CVE data from MySQL. [\#225](https://github.com/future-architect/vuls/pull/225) ([oswell](https://github.com/oswell))
|
||||
- Remove base docker image, -v shows commit hash [\#223](https://github.com/future-architect/vuls/pull/223) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Support ignore CveIDs in config [\#222](https://github.com/future-architect/vuls/pull/222) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Confirm before installing dependencies on prepare [\#219](https://github.com/future-architect/vuls/pull/219) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove all.json [\#218](https://github.com/future-architect/vuls/pull/218) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add GitHub issue template [\#217](https://github.com/future-architect/vuls/pull/217) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Improve makefile, -version shows git hash, fix README [\#216](https://github.com/future-architect/vuls/pull/216) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- change e-mail package from gomail to net/smtp [\#211](https://github.com/future-architect/vuls/pull/211) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Add only-containers option to scan subcommand \#122 [\#190](https://github.com/future-architect/vuls/pull/190) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix -results-dir option of scan subcommand [\#185](https://github.com/future-architect/vuls/pull/185) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Show error when no scannable servers are detected. [\#177](https://github.com/future-architect/vuls/pull/177) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add sudo check to prepare subcommand [\#176](https://github.com/future-architect/vuls/pull/176) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Supports yum --enablerepo option \(supports only base,updates for now\) [\#147](https://github.com/future-architect/vuls/pull/147) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Debian 8.6 \(jessie\) scan does not show vulnerable packages [\#235](https://github.com/future-architect/vuls/issues/235)
|
||||
- panic: runtime error: index out of range - ubuntu 16.04 + vuls history [\#180](https://github.com/future-architect/vuls/issues/180)
|
||||
- Moved golang.org/x/net/context to context [\#243](https://github.com/future-architect/vuls/pull/243) ([yoheimuta](https://github.com/yoheimuta))
|
||||
- Fix changelog cache bug on Ubuntu and Debian \#235 [\#238](https://github.com/future-architect/vuls/pull/238) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- add '-ssh-external' option to prepare subcommand [\#234](https://github.com/future-architect/vuls/pull/234) ([mykstmhr](https://github.com/mykstmhr))
|
||||
- Fixed error for the latest version of gocui [\#231](https://github.com/future-architect/vuls/pull/231) ([ymd38](https://github.com/ymd38))
|
||||
- Handle the refactored gocui SetCurrentView method. [\#229](https://github.com/future-architect/vuls/pull/229) ([oswell](https://github.com/oswell))
|
||||
- Fix locale env var LANG to LANGUAGE [\#215](https://github.com/future-architect/vuls/pull/215) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fixed bug with parsing update line on CentOS/RHEL [\#206](https://github.com/future-architect/vuls/pull/206) ([andyone](https://github.com/andyone))
|
||||
- Fix defer cache.DB.close [\#201](https://github.com/future-architect/vuls/pull/201) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a help message of -report-azure-blob option [\#195](https://github.com/future-architect/vuls/pull/195) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling in tui [\#193](https://github.com/future-architect/vuls/pull/193) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix not working changelog cache on Container [\#189](https://github.com/future-architect/vuls/pull/189) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix release version detection on FreeBSD [\#184](https://github.com/future-architect/vuls/pull/184) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix defer cahce.DB.close\(\) [\#183](https://github.com/future-architect/vuls/pull/183) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a mode of files/dir \(report, log\) [\#182](https://github.com/future-architect/vuls/pull/182) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a error when no json dirs are found under results \#180 [\#181](https://github.com/future-architect/vuls/pull/181) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- ssh-external option of configtest is not working \#178 [\#179](https://github.com/future-architect/vuls/pull/179) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- --enable-repos of yum option [\#246](https://github.com/future-architect/vuls/issues/246)
|
||||
- --skip-broken at yum option [\#245](https://github.com/future-architect/vuls/issues/245)
|
||||
- Recent changes to gobui cause build failures [\#228](https://github.com/future-architect/vuls/issues/228)
|
||||
- https://hub.docker.com/r/vuls/go-cve-dictionary/ is empty [\#208](https://github.com/future-architect/vuls/issues/208)
|
||||
- Not able to install gomail fails [\#202](https://github.com/future-architect/vuls/issues/202)
|
||||
- No results file created - vuls tui failed [\#199](https://github.com/future-architect/vuls/issues/199)
|
||||
- Wrong file permissions for results/\*.json in official Docker container [\#197](https://github.com/future-architect/vuls/issues/197)
|
||||
- Failed: Unknown OS Type [\#196](https://github.com/future-architect/vuls/issues/196)
|
||||
- Segmentation fault with configtest [\#192](https://github.com/future-architect/vuls/issues/192)
|
||||
- Failed to scan. err: No server defined. Check the configuration [\#187](https://github.com/future-architect/vuls/issues/187)
|
||||
- vuls configtest -ssh-external doesnt work [\#178](https://github.com/future-architect/vuls/issues/178)
|
||||
- apt-get update: time out [\#175](https://github.com/future-architect/vuls/issues/175)
|
||||
- scanning on Centos6, but vuls recognizes debian. [\#174](https://github.com/future-architect/vuls/issues/174)
|
||||
- Fix READMEja \#164 [\#173](https://github.com/future-architect/vuls/issues/173)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Update README \#225 [\#242](https://github.com/future-architect/vuls/pull/242) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix readme [\#241](https://github.com/future-architect/vuls/pull/241) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix README \#234 [\#237](https://github.com/future-architect/vuls/pull/237) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update glide files [\#236](https://github.com/future-architect/vuls/pull/236) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix README [\#226](https://github.com/future-architect/vuls/pull/226) ([usiusi360](https://github.com/usiusi360))
|
||||
- fix some misspelling. [\#221](https://github.com/future-architect/vuls/pull/221) ([ymomoi](https://github.com/ymomoi))
|
||||
- fix docker readme [\#214](https://github.com/future-architect/vuls/pull/214) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix ja document about typo [\#213](https://github.com/future-architect/vuls/pull/213) ([shokohara](https://github.com/shokohara))
|
||||
- fix readme [\#212](https://github.com/future-architect/vuls/pull/212) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix README [\#207](https://github.com/future-architect/vuls/pull/207) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix typo [\#204](https://github.com/future-architect/vuls/pull/204) ([usiusi360](https://github.com/usiusi360))
|
||||
- fix gitignore [\#191](https://github.com/future-architect/vuls/pull/191) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Update glide.lock [\#188](https://github.com/future-architect/vuls/pull/188) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix path in setup/docker/README [\#186](https://github.com/future-architect/vuls/pull/186) ([dladuke](https://github.com/dladuke))
|
||||
- Vuls and vulsrepo are now separated [\#163](https://github.com/future-architect/vuls/pull/163) ([hikachan](https://github.com/hikachan))
|
||||
|
||||
## [v0.1.6](https://github.com/future-architect/vuls/tree/v0.1.6) (2016-09-12)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.5...v0.1.6)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- High speed scan on Ubuntu/Debian [\#172](https://github.com/future-architect/vuls/pull/172) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support CWE\(Common Weakness Enumeration\) [\#169](https://github.com/future-architect/vuls/pull/169) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Enable to scan without sudo on amazon linux [\#167](https://github.com/future-architect/vuls/pull/167) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove deprecated options -use-unattended-upgrades,-use-yum-plugin-security [\#161](https://github.com/future-architect/vuls/pull/161) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- delete sqlite3 [\#152](https://github.com/future-architect/vuls/pull/152) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Failed to setup vuls docker [\#170](https://github.com/future-architect/vuls/issues/170)
|
||||
- yum check-update error occurred when no reboot after kernel updating [\#165](https://github.com/future-architect/vuls/issues/165)
|
||||
- error thrown from 'docker build .' [\#157](https://github.com/future-architect/vuls/issues/157)
|
||||
- CVE-ID is truncated to 4 digits [\#153](https://github.com/future-architect/vuls/issues/153)
|
||||
- 'yum update --changelog' stalled in 'vuls scan'. if ssh user is not 'root'. [\#150](https://github.com/future-architect/vuls/issues/150)
|
||||
- Panic on packet scan [\#131](https://github.com/future-architect/vuls/issues/131)
|
||||
- Update glide.lock \#170 [\#171](https://github.com/future-architect/vuls/pull/171) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix detecting a platform on Azure [\#168](https://github.com/future-architect/vuls/pull/168) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix parse error for yum check-update \#165 [\#166](https://github.com/future-architect/vuls/pull/166) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix bug: Vuls on Docker [\#159](https://github.com/future-architect/vuls/pull/159) ([tjinjin](https://github.com/tjinjin))
|
||||
- Fix CVE-ID is truncated to 4 digits [\#155](https://github.com/future-architect/vuls/pull/155) ([usiusi360](https://github.com/usiusi360))
|
||||
- Fix yum update --changelog stalled when non-root ssh user on CentOS \#150 [\#151](https://github.com/future-architect/vuls/pull/151) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- Support su for root privilege escalation [\#44](https://github.com/future-architect/vuls/issues/44)
|
||||
- Support FreeBSD [\#34](https://github.com/future-architect/vuls/issues/34)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Change scripts for data fetching from jvn [\#164](https://github.com/future-architect/vuls/pull/164) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix: setup vulsrepo [\#162](https://github.com/future-architect/vuls/pull/162) ([tjinjin](https://github.com/tjinjin))
|
||||
- Fix-docker-vulsrepo-install [\#160](https://github.com/future-architect/vuls/pull/160) ([usiusi360](https://github.com/usiusi360))
|
||||
- Reduce regular expression compilation [\#158](https://github.com/future-architect/vuls/pull/158) ([itchyny](https://github.com/itchyny))
|
||||
- Add testcases for \#153 [\#156](https://github.com/future-architect/vuls/pull/156) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.5](https://github.com/future-architect/vuls/tree/v0.1.5) (2016-08-16)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.4...v0.1.5)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Enable to scan without running go-cve-dictionary as server mode [\#84](https://github.com/future-architect/vuls/issues/84)
|
||||
- Support high-speed scanning for CentOS [\#138](https://github.com/future-architect/vuls/pull/138) ([tai-ga](https://github.com/tai-ga))
|
||||
- Add configtest subcommand. skip un-ssh-able servers. [\#134](https://github.com/future-architect/vuls/pull/134) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support -report-azure-blob option [\#130](https://github.com/future-architect/vuls/pull/130) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add optional key-values that will be outputted to JSON in config [\#117](https://github.com/future-architect/vuls/pull/117) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Change dir structure [\#115](https://github.com/future-architect/vuls/pull/115) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add some validation of loading config. user, host and port [\#113](https://github.com/future-architect/vuls/pull/113) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support scanning with external ssh command [\#101](https://github.com/future-architect/vuls/pull/101) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Detect Platform and get instance-id of amazon ec2 [\#95](https://github.com/future-architect/vuls/pull/95) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -report-s3 option [\#92](https://github.com/future-architect/vuls/pull/92) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Added FreeBSD support. [\#90](https://github.com/future-architect/vuls/pull/90) ([justyntemme](https://github.com/justyntemme))
|
||||
- Add glide files for vendoring [\#89](https://github.com/future-architect/vuls/pull/89) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix README, change -cvedbpath to -cve-dictionary-dbpath \#84 [\#85](https://github.com/future-architect/vuls/pull/85) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add option for it get cve detail from cve.sqlite3. [\#81](https://github.com/future-architect/vuls/pull/81) ([ymd38](https://github.com/ymd38))
|
||||
- Add -report-text option, Fix small bug of report in japanese [\#78](https://github.com/future-architect/vuls/pull/78) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add JSONWriter, Fix CVE sort order of report [\#77](https://github.com/future-architect/vuls/pull/77) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Docker: Panic [\#76](https://github.com/future-architect/vuls/issues/76)
|
||||
- Fix apt command to scan correctly when system locale is not english [\#149](https://github.com/future-architect/vuls/pull/149) ([kit494way](https://github.com/kit494way))
|
||||
- Disable -ask-sudo-password for security reasons [\#148](https://github.com/future-architect/vuls/pull/148) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix no tty error while executing with -external-ssh option [\#143](https://github.com/future-architect/vuls/pull/143) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- wrong log packages [\#141](https://github.com/future-architect/vuls/pull/141) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix platform detection. [\#137](https://github.com/future-architect/vuls/pull/137) ([Rompei](https://github.com/Rompei))
|
||||
- Fix nil pointer when scan with -cve-dictionary-dbpath and cpeNames [\#111](https://github.com/future-architect/vuls/pull/111) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove vulndb file before pkg audit [\#110](https://github.com/future-architect/vuls/pull/110) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add error handling when unable to connect via ssh. status code: 255 [\#108](https://github.com/future-architect/vuls/pull/108) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Enable to detect vulnerabilities on FreeBSD [\#98](https://github.com/future-architect/vuls/pull/98) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix unknown format err while check-update on RHEL6.5 [\#93](https://github.com/future-architect/vuls/pull/93) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix type of SMTP Port of discovery command's output [\#88](https://github.com/future-architect/vuls/pull/88) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error msg when go-cve-dictionary is unavailable \#84 [\#86](https://github.com/future-architect/vuls/pull/86) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling to avoid nil pointer err on debian [\#83](https://github.com/future-architect/vuls/pull/83) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix nil pointer while doing apt-cache policy on ubuntu \#76 [\#82](https://github.com/future-architect/vuls/pull/82) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix log import url [\#79](https://github.com/future-architect/vuls/pull/79) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix error handling of gorequest [\#75](https://github.com/future-architect/vuls/pull/75) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix freezing forever when no args specified in TUI mode [\#73](https://github.com/future-architect/vuls/pull/73) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- mv version.go version/version.go to run main.go without compile [\#71](https://github.com/future-architect/vuls/pull/71) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- SSh password authentication failed on FreeBSD [\#99](https://github.com/future-architect/vuls/issues/99)
|
||||
- BUG: -o pipefail is not work on FreeBSD's /bin/sh. because it isn't bash [\#91](https://github.com/future-architect/vuls/issues/91)
|
||||
- Use ~/.ssh/config [\#62](https://github.com/future-architect/vuls/issues/62)
|
||||
- SSH ciphers [\#37](https://github.com/future-architect/vuls/issues/37)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Update README \#138 [\#144](https://github.com/future-architect/vuls/pull/144) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a typo [\#142](https://github.com/future-architect/vuls/pull/142) ([dtan4](https://github.com/dtan4))
|
||||
- Remove unnecessary step in readme of docker setup [\#140](https://github.com/future-architect/vuls/pull/140) ([mikkame](https://github.com/mikkame))
|
||||
- Update logo [\#139](https://github.com/future-architect/vuls/pull/139) ([chanomaru](https://github.com/chanomaru))
|
||||
- Update README.ja.md to fix wrong tips. [\#135](https://github.com/future-architect/vuls/pull/135) ([a2atsu](https://github.com/a2atsu))
|
||||
- add tips about NVD JVN issue [\#133](https://github.com/future-architect/vuls/pull/133) ([a2atsu](https://github.com/a2atsu))
|
||||
- Fix README wrong links [\#129](https://github.com/future-architect/vuls/pull/129) ([aomoriringo](https://github.com/aomoriringo))
|
||||
- Add logo [\#126](https://github.com/future-architect/vuls/pull/126) ([chanomaru](https://github.com/chanomaru))
|
||||
- Improve setup/docker [\#125](https://github.com/future-architect/vuls/pull/125) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix scan command help [\#124](https://github.com/future-architect/vuls/pull/124) ([aomoriringo](https://github.com/aomoriringo))
|
||||
- added dockernized-vuls with vulsrepo [\#121](https://github.com/future-architect/vuls/pull/121) ([hikachan](https://github.com/hikachan))
|
||||
- Fix detect platform on azure and degital ocean [\#119](https://github.com/future-architect/vuls/pull/119) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove json marshall-indent [\#118](https://github.com/future-architect/vuls/pull/118) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Improve Readme.ja [\#116](https://github.com/future-architect/vuls/pull/116) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add architecture diag to README.md [\#114](https://github.com/future-architect/vuls/pull/114) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Rename linux.go to base.go [\#100](https://github.com/future-architect/vuls/pull/100) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update README.md [\#74](https://github.com/future-architect/vuls/pull/74) ([yoshi-taka](https://github.com/yoshi-taka))
|
||||
- Refactoring debian.go [\#72](https://github.com/future-architect/vuls/pull/72) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.4](https://github.com/future-architect/vuls/tree/v0.1.4) (2016-05-24)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.3...v0.1.4)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Initial fetch from NVD is too heavy \(2.3 GB of memory consumed\) [\#27](https://github.com/future-architect/vuls/issues/27)
|
||||
- Enable to show previous scan result [\#69](https://github.com/future-architect/vuls/pull/69) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add ignore-unscored-cves option [\#68](https://github.com/future-architect/vuls/pull/68) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support dynamic scanning docker container [\#67](https://github.com/future-architect/vuls/pull/67) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add version flag [\#65](https://github.com/future-architect/vuls/pull/65) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update Dockerfile [\#57](https://github.com/future-architect/vuls/pull/57) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- Update run.sh [\#56](https://github.com/future-architect/vuls/pull/56) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- Support Windows [\#33](https://github.com/future-architect/vuls/pull/33) ([mattn](https://github.com/mattn))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- vuls scan -cvss-over does not work. [\#59](https://github.com/future-architect/vuls/issues/59)
|
||||
- `panic: runtime error: invalid memory address or nil pointer dereference` when scan CentOS5.5 [\#58](https://github.com/future-architect/vuls/issues/58)
|
||||
- It rans out of memory. [\#47](https://github.com/future-architect/vuls/issues/47)
|
||||
- BUG: vuls scan on CentOS with Japanese environment. [\#43](https://github.com/future-architect/vuls/issues/43)
|
||||
- yum --color=never [\#36](https://github.com/future-architect/vuls/issues/36)
|
||||
- Failed to parse yum check-update [\#32](https://github.com/future-architect/vuls/issues/32)
|
||||
- Pointless sudo [\#29](https://github.com/future-architect/vuls/issues/29)
|
||||
- Can't init database in a path having blanks [\#26](https://github.com/future-architect/vuls/issues/26)
|
||||
- Fix pointless sudo in debian.go \#29 [\#66](https://github.com/future-architect/vuls/pull/66) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling of httpGet in cve-client \#58 [\#64](https://github.com/future-architect/vuls/pull/64) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix nil pointer at error handling of cve\_client \#58 [\#63](https://github.com/future-architect/vuls/pull/63) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Set language en\_US. [\#61](https://github.com/future-architect/vuls/pull/61) ([pabroff](https://github.com/pabroff))
|
||||
- Fix -cvss-over flag \#59 [\#60](https://github.com/future-architect/vuls/pull/60) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix scan on Japanese environment. [\#55](https://github.com/future-architect/vuls/pull/55) ([pabroff](https://github.com/pabroff))
|
||||
- Fix a typo: replace Depricated by Deprecated. [\#54](https://github.com/future-architect/vuls/pull/54) ([jody-frankowski](https://github.com/jody-frankowski))
|
||||
- Fix yes no infinite loop while doing yum update --changelog on root@CentOS \#47 [\#50](https://github.com/future-architect/vuls/pull/50) ([pabroff](https://github.com/pabroff))
|
||||
- Fix $servername in output of discover command [\#45](https://github.com/future-architect/vuls/pull/45) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.3](https://github.com/future-architect/vuls/tree/v0.1.3) (2016-04-21)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.2...v0.1.3)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Add sudo support for prepare [\#11](https://github.com/future-architect/vuls/issues/11)
|
||||
- Dockerfile? [\#10](https://github.com/future-architect/vuls/issues/10)
|
||||
- Update README [\#41](https://github.com/future-architect/vuls/pull/41) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- Sparse dockerization [\#38](https://github.com/future-architect/vuls/pull/38) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- No password in config [\#35](https://github.com/future-architect/vuls/pull/35) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fr readme translation [\#23](https://github.com/future-architect/vuls/pull/23) ([novakin](https://github.com/novakin))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Issues updating CVE database behind https proxy [\#39](https://github.com/future-architect/vuls/issues/39)
|
||||
- Vuls failed to parse yum check-update [\#24](https://github.com/future-architect/vuls/issues/24)
|
||||
- Fix yum to yum --color=never \#36 [\#42](https://github.com/future-architect/vuls/pull/42) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix parse yum check update [\#40](https://github.com/future-architect/vuls/pull/40) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix typo [\#31](https://github.com/future-architect/vuls/pull/31) ([blue119](https://github.com/blue119))
|
||||
- Fix error while parsing yum check-update \#24 [\#30](https://github.com/future-architect/vuls/pull/30) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- Unable to scan on ubuntu because changelog.ubuntu.com is down... [\#21](https://github.com/future-architect/vuls/issues/21)
|
||||
- err: Not initialize\(d\) yet.. [\#16](https://github.com/future-architect/vuls/issues/16)
|
||||
- Errors when using fish shell [\#8](https://github.com/future-architect/vuls/issues/8)
|
||||
|
||||
## [v0.1.2](https://github.com/future-architect/vuls/tree/v0.1.2) (2016-04-12)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.1...v0.1.2)
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Maximum 6 nodes available to scan [\#12](https://github.com/future-architect/vuls/issues/12)
|
||||
- panic: runtime error: index out of range [\#5](https://github.com/future-architect/vuls/issues/5)
|
||||
- Fix sudo option on RedHat like Linux and change some messages. [\#20](https://github.com/future-architect/vuls/pull/20) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Typo fix and updated readme [\#19](https://github.com/future-architect/vuls/pull/19) ([EuanKerr](https://github.com/EuanKerr))
|
||||
- remove a period at the end of error messages. [\#18](https://github.com/future-architect/vuls/pull/18) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix error while yum updateinfo --security update on rhel@aws [\#17](https://github.com/future-architect/vuls/pull/17) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fixed typos [\#15](https://github.com/future-architect/vuls/pull/15) ([radarhere](https://github.com/radarhere))
|
||||
- Typo fix in error messages [\#14](https://github.com/future-architect/vuls/pull/14) ([Bregor](https://github.com/Bregor))
|
||||
- Fix index out of range error when the number of servers is over 6. \#12 [\#13](https://github.com/future-architect/vuls/pull/13) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Revise small grammar mistakes in serverapi.go [\#9](https://github.com/future-architect/vuls/pull/9) ([cpobrien](https://github.com/cpobrien))
|
||||
- Fix error handling in HTTP backoff function [\#7](https://github.com/future-architect/vuls/pull/7) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.1](https://github.com/future-architect/vuls/tree/v0.1.1) (2016-04-06)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.0...v0.1.1)
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Typo in Exapmle [\#6](https://github.com/future-architect/vuls/pull/6) ([toli](https://github.com/toli))
|
||||
|
||||
## [v0.1.0](https://github.com/future-architect/vuls/tree/v0.1.0) (2016-04-04)
|
||||
**Merged pull requests:**
|
||||
|
||||
- English translation [\#4](https://github.com/future-architect/vuls/pull/4) ([hikachan](https://github.com/hikachan))
|
||||
- English translation [\#3](https://github.com/future-architect/vuls/pull/3) ([chewyinping](https://github.com/chewyinping))
|
||||
- Add a Bitdeli Badge to README [\#2](https://github.com/future-architect/vuls/pull/2) ([bitdeli-chef](https://github.com/bitdeli-chef))
|
||||
|
||||
|
||||
|
||||
\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*
|
||||
32
Dockerfile
32
Dockerfile
@@ -1,32 +0,0 @@
|
||||
FROM golang:alpine as builder
|
||||
|
||||
RUN apk add --no-cache \
|
||||
git \
|
||||
make \
|
||||
gcc \
|
||||
musl-dev
|
||||
|
||||
ENV REPOSITORY github.com/future-architect/vuls
|
||||
COPY . $GOPATH/src/$REPOSITORY
|
||||
RUN cd $GOPATH/src/$REPOSITORY && make install
|
||||
|
||||
FROM alpine:3.16
|
||||
|
||||
ENV LOGDIR /var/log/vuls
|
||||
ENV WORKDIR /vuls
|
||||
|
||||
RUN apk add --no-cache \
|
||||
openssh-client \
|
||||
ca-certificates \
|
||||
git \
|
||||
nmap \
|
||||
&& mkdir -p $WORKDIR $LOGDIR
|
||||
|
||||
COPY --from=builder /go/bin/vuls /usr/local/bin/
|
||||
|
||||
VOLUME ["$WORKDIR", "$LOGDIR"]
|
||||
WORKDIR $WORKDIR
|
||||
ENV PWD $WORKDIR
|
||||
|
||||
ENTRYPOINT ["vuls"]
|
||||
CMD ["--help"]
|
||||
263
GNUmakefile
263
GNUmakefile
@@ -1,245 +1,42 @@
|
||||
.PHONY: \
|
||||
build \
|
||||
install \
|
||||
all \
|
||||
vendor \
|
||||
lint \
|
||||
vet \
|
||||
fmt \
|
||||
fmtcheck \
|
||||
pretest \
|
||||
test \
|
||||
cov \
|
||||
clean
|
||||
|
||||
SRCS = $(shell git ls-files '*.go')
|
||||
PKGS = $(shell go list ./...)
|
||||
VERSION := $(shell git describe --tags --abbrev=0)
|
||||
ifeq ($(VERSION), )
|
||||
VERSION := $(shell git rev-parse --abbrev-ref HEAD)
|
||||
endif
|
||||
ifeq ($(shell git rev-parse --abbrev-ref HEAD), nightly)
|
||||
VERSION := nightly
|
||||
endif
|
||||
REVISION := $(shell git rev-parse --short HEAD)
|
||||
BUILDTIME := $(shell date "+%Y%m%d_%H%M%S")
|
||||
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' -X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
|
||||
GO := GO111MODULE=on go
|
||||
CGO_UNABLED := CGO_ENABLED=0 go
|
||||
GO_OFF := GO111MODULE=off go
|
||||
LDFLAGS := -ldflags "-s -w -X=github.com/future-architect/vuls/pkg/cmd/version.Version=$(VERSION) -X=github.com/future-architect/vuls/pkg/cmd/version.Revision=$(REVISION)"
|
||||
|
||||
GOPATH := $(shell go env GOPATH)
|
||||
GOBIN := $(GOPATH)/bin
|
||||
|
||||
all: build test
|
||||
$(GOBIN)/golangci-lint:
|
||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
|
||||
|
||||
build: ./cmd/vuls/main.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
.PHONY: build
|
||||
build:
|
||||
go build $(LDFLAGS) ./cmd/vuls
|
||||
|
||||
install: ./cmd/vuls/main.go
|
||||
$(GO) install -ldflags "$(LDFLAGS)" ./cmd/vuls
|
||||
.PHONY: install
|
||||
install:
|
||||
go install $(LDFLAGS) ./cmd/vuls
|
||||
|
||||
build-scanner: ./cmd/scanner/main.go
|
||||
$(CGO_UNABLED) build -tags=scanner -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/scanner
|
||||
|
||||
install-scanner: ./cmd/scanner/main.go
|
||||
$(CGO_UNABLED) install -tags=scanner -ldflags "$(LDFLAGS)" ./cmd/scanner
|
||||
|
||||
lint:
|
||||
$(GO) install github.com/mgechev/revive@latest
|
||||
revive -config ./.revive.toml -formatter plain $(PKGS)
|
||||
|
||||
vet:
|
||||
echo $(PKGS) | xargs env $(GO) vet || exit;
|
||||
|
||||
golangci:
|
||||
$(GO) install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
|
||||
golangci-lint run
|
||||
|
||||
fmt:
|
||||
gofmt -s -w $(SRCS)
|
||||
|
||||
mlint:
|
||||
$(foreach file,$(SRCS),gometalinter $(file) || exit;)
|
||||
|
||||
fmtcheck:
|
||||
$(foreach file,$(SRCS),gofmt -s -d $(file);)
|
||||
.PHONY: test
|
||||
test: pretest
|
||||
go test -race ./...
|
||||
|
||||
.PHONY: pretest
|
||||
pretest: lint vet fmtcheck
|
||||
|
||||
test: pretest
|
||||
$(GO) test -cover -v ./... || exit;
|
||||
.PHONY: lint
|
||||
lint: $(GOBIN)/golangci-lint
|
||||
golangci-lint run
|
||||
|
||||
unused:
|
||||
$(foreach pkg,$(PKGS),unused $(pkg);)
|
||||
.PHONY: vet
|
||||
vet:
|
||||
go vet ./...
|
||||
|
||||
cov:
|
||||
@ go get -v github.com/axw/gocov/gocov
|
||||
@ go get golang.org/x/tools/cmd/cover
|
||||
gocov test -v ./... | gocov report
|
||||
|
||||
clean:
|
||||
echo $(PKGS) | xargs go clean || exit;
|
||||
|
||||
# trivy-to-vuls
|
||||
build-trivy-to-vuls: ./contrib/trivy/cmd/main.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o trivy-to-vuls ./contrib/trivy/cmd
|
||||
|
||||
# future-vuls
|
||||
build-future-vuls: ./contrib/future-vuls/cmd/main.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o future-vuls ./contrib/future-vuls/cmd
|
||||
|
||||
# integration-test
|
||||
BASE_DIR := '${PWD}/integration/results'
|
||||
# $(shell mkdir -p ${BASE_DIR})
|
||||
NOW=$(shell date --iso-8601=seconds)
|
||||
NOW_JSON_DIR := '${BASE_DIR}/$(NOW)'
|
||||
ONE_SEC_AFTER=$(shell date -d '+1 second' --iso-8601=seconds)
|
||||
ONE_SEC_AFTER_JSON_DIR := '${BASE_DIR}/$(ONE_SEC_AFTER)'
|
||||
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'pnpm' 'cargo' 'gomod' 'gosum' 'gobinary' 'jar' 'pom' 'nuget-lock' 'nuget-config' 'dotnet-deps' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
|
||||
|
||||
diff:
|
||||
# git clone git@github.com:vulsio/vulsctl.git
|
||||
# cd vulsctl/docker
|
||||
# ./update-all.sh
|
||||
# cd /path/to/vuls
|
||||
# vim integration/int-config.toml
|
||||
# ln -s vuls vuls.new
|
||||
# ln -s oldvuls vuls.old
|
||||
# make int
|
||||
# (ex. test 10 times: for i in `seq 10`; do make int ARGS=-quiet ; done)
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
diff-redis:
|
||||
# docker network create redis-nw
|
||||
# docker run --name redis -d --network redis-nw -p 127.0.0.1:6379:6379 redis
|
||||
# git clone git@github.com:vulsio/vulsctl.git
|
||||
# cd vulsctl/docker
|
||||
# ./update-all-redis.sh
|
||||
# (or export DOCKER_NETWORK=redis-nw; cd /home/ubuntu/vulsctl/docker; ./update-all.sh --dbtype redis --dbpath "redis://redis/0")
|
||||
# vim integration/int-redis-config.toml
|
||||
# ln -s vuls vuls.new
|
||||
# ln -s oldvuls vuls.old
|
||||
# make int-redis
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
diff-rdb-redis:
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
sleep 1
|
||||
# new vs new
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
head= $(shell git rev-parse HEAD)
|
||||
prev= $(shell git rev-parse HEAD^)
|
||||
branch=$(shell git rev-parse --abbrev-ref HEAD)
|
||||
build-integration:
|
||||
git stash
|
||||
|
||||
# buld HEAD
|
||||
git checkout ${head}
|
||||
make build
|
||||
mv -f ./vuls ./vuls.${head}
|
||||
|
||||
# HEAD^
|
||||
git checkout ${prev}
|
||||
make build
|
||||
mv -f ./vuls ./vuls.${prev}
|
||||
|
||||
# master
|
||||
git checkout master
|
||||
make build
|
||||
mv -f ./vuls ./vuls.master
|
||||
|
||||
# working tree
|
||||
git checkout ${branch}
|
||||
git stash apply stash@\{0\}
|
||||
make build
|
||||
|
||||
# update integration data
|
||||
git submodule update --remote
|
||||
|
||||
# for integration testing, vuls.new and vuls.old needed.
|
||||
# ex)
|
||||
# $ ln -s ./vuls ./vuls.new
|
||||
# $ ln -s ./vuls.${head} ./vuls.old
|
||||
# or
|
||||
# $ ln -s ./vuls.${prev} ./vuls.old
|
||||
# then
|
||||
# $ make diff
|
||||
# $ make diff-redis
|
||||
# $ make diff-rdb-redis
|
||||
|
||||
|
||||
define sed-d
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
|
||||
endef
|
||||
|
||||
define count-cve
|
||||
for jsonfile in ${NOW_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
for jsonfile in ${ONE_SEC_AFTER_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
endef
|
||||
.PHONY: fmtcheck
|
||||
fmtcheck:
|
||||
gofmt -s -d .
|
||||
674
LICENSE
674
LICENSE
@@ -1,674 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
Vuls - Vulnerability Scanner
|
||||
Copyright (C) 2016 Future Corporation , Japan.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
Vuls Copyright (C) 2016 Future Corporation , Japan.
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
716
README.md
716
README.md
@@ -1,207 +1,513 @@
|
||||
|
||||
# Vuls: VULnerability Scanner
|
||||
|
||||
[](http://goo.gl/forms/xm5KFo35tu)
|
||||
[](https://github.com/future-architect/vuls/blob/master/LICENSE)
|
||||
[](https://travis-ci.org/future-architect/vuls)
|
||||
[](https://goreportcard.com/report/github.com/future-architect/vuls)
|
||||
[](https://github.com/future-architect/vuls/graphs/contributors)
|
||||
|
||||

|
||||
|
||||
Vulnerability scanner for Linux/FreeBSD, agent-less, written in Go.
|
||||
We have a slack team. [Join slack team](https://join.slack.com/t/vuls-github/shared_invite/zt-1fculjwj4-6nex2JNE7DpOSiKZ1ztDFw)
|
||||
Twitter: [@vuls_en](https://twitter.com/vuls_en)
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
[](https://asciinema.org/a/3y9zrf950agiko7klg8abvyck)
|
||||
|
||||

|
||||
|
||||
----
|
||||
|
||||
## Abstract
|
||||
|
||||
For a system administrator, having to perform security vulnerability analysis and software update on a daily basis can be a burden.
|
||||
To avoid downtime in a production environment, it is common for a system administrator to choose not to use the automatic update option provided by the package manager and to perform update manually.
|
||||
This leads to the following problems.
|
||||
|
||||
- The system administrator will have to constantly watch out for any new vulnerabilities in NVD (National Vulnerability Database) or similar databases.
|
||||
- It might be impossible for the system administrator to monitor all the software if there are a large number of software packages installed in the server.
|
||||
- It is expensive to perform analysis to determine the servers affected by new vulnerabilities. The possibility of overlooking a server or two during analysis is there.
|
||||
|
||||
Vuls is a tool created to solve the problems listed above. It has the following characteristics.
|
||||
|
||||
- Informs users of the vulnerabilities that are related to the system.
|
||||
- Informs users of the servers that are affected.
|
||||
- Vulnerability detection is done automatically to prevent any oversight.
|
||||
- A report is generated on a regular basis using CRON or other methods. to manage vulnerability.
|
||||
|
||||

|
||||
|
||||
----
|
||||
|
||||
## Main Features
|
||||
|
||||
### Scan for any vulnerabilities in Linux/FreeBSD Server
|
||||
|
||||
[Supports major Linux/FreeBSD](https://vuls.io/docs/en/supported-os.html)
|
||||
|
||||
- Alpine, Amazon Linux, CentOS, AlmaLinux, Rocky Linux, Debian, Oracle Linux, Raspbian, RHEL, openSUSE, openSUSE Leap, SUSE Enterprise Linux, Fedora, and Ubuntu
|
||||
- FreeBSD
|
||||
- Cloud, on-premise, Running Docker Container
|
||||
|
||||
### High-quality scan
|
||||
|
||||
- Vulnerability Database
|
||||
- [NVD](https://nvd.nist.gov/)
|
||||
- [JVN(Japanese)](http://jvndb.jvn.jp/apis/myjvn/)
|
||||
|
||||
- OVAL
|
||||
- [Red Hat](https://www.redhat.com/security/data/oval/)
|
||||
- [Debian](https://www.debian.org/security/oval/)
|
||||
- [Ubuntu](https://people.canonical.com/~ubuntu-security/oval/)
|
||||
- [SUSE](http://ftp.suse.com/pub/projects/security/oval/)
|
||||
- [Oracle Linux](https://linux.oracle.com/security/oval/)
|
||||
|
||||
- Security Advisory
|
||||
- [Alpine-secdb](https://git.alpinelinux.org/cgit/alpine-secdb/)
|
||||
- [Red Hat Security Advisories](https://access.redhat.com/security/security-updates/)
|
||||
- [Debian Security Bug Tracker](https://security-tracker.debian.org/tracker/)
|
||||
- [Ubuntu CVE Tracker](https://people.canonical.com/~ubuntu-security/cve/)
|
||||
|
||||
- Commands(yum, zypper, pkg-audit)
|
||||
- RHSA / ALAS / ELSA / FreeBSD-SA
|
||||
- Changelog
|
||||
|
||||
- PoC, Exploit
|
||||
- [Exploit Database](https://www.exploit-db.com/)
|
||||
- [Metasploit-Framework modules](https://www.rapid7.com/db/?q=&type=metasploit)
|
||||
- [qazbnm456/awesome-cve-poc](https://github.com/qazbnm456/awesome-cve-poc)
|
||||
- [nomi-sec/PoC-in-GitHub](https://github.com/nomi-sec/PoC-in-GitHub)
|
||||
- [gmatuz/inthewilddb](https://github.com/gmatuz/inthewilddb)
|
||||
|
||||
- CERT
|
||||
- [US-CERT](https://www.us-cert.gov/ncas/alerts)
|
||||
- [JPCERT](http://www.jpcert.or.jp/at/2019.html)
|
||||
|
||||
- CISA(Cybersecurity & Infrastructure Security Agency)
|
||||
- [Known Exploited Vulnerabilities Catalog](https://www.cisa.gov/known-exploited-vulnerabilities-catalog)
|
||||
|
||||
- Cyber Threat Intelligence(MITRE ATT&CK and CAPEC)
|
||||
- [mitre/cti](https://github.com/mitre/cti)
|
||||
|
||||
- Libraries
|
||||
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
|
||||
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
|
||||
- [Safety DB(Python)](https://github.com/pyupio/safety-db)
|
||||
- [PHP Security Advisories Database](https://github.com/FriendsOfPHP/security-advisories)
|
||||
- [RustSec Advisory Database](https://github.com/RustSec/advisory-db)
|
||||
|
||||
- WordPress
|
||||
- [wpscan](https://wpscan.com/api)
|
||||
|
||||
### Scan mode
|
||||
|
||||
[Fast Scan](https://vuls.io/docs/en/architecture-fast-scan.html)
|
||||
|
||||
- Scan without root privilege, no dependencies
|
||||
- Almost no load on the scan target server
|
||||
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, Fedora, and Ubuntu)
|
||||
|
||||
[Fast Root Scan](https://vuls.io/docs/en/architecture-fast-root-scan.html)
|
||||
|
||||
- Scan with root privilege
|
||||
- Almost no load on the scan target server
|
||||
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Alma Linux, Rocky Linux, Oracle Linux, Fedora, and RedHat)
|
||||
- Detect processes which updated before but not restarting yet using checkrestart of debian-goodies (Debian and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, Fedora, and Ubuntu)
|
||||
|
||||
### [Remote, Local scan mode, Server mode](https://vuls.io/docs/en/architecture-remote-local.html)
|
||||
|
||||
[Remote scan mode](https://vuls.io/docs/en/architecture-remote-scan.html)
|
||||
|
||||
- User is required to only set up one machine that is connected to other target servers via SSH
|
||||
|
||||
[Local scan mode](https://vuls.io/docs/en/architecture-local-scan.html)
|
||||
|
||||
- If you don't want the central Vuls server to connect to each server by SSH, you can use Vuls in the Local Scan mode.
|
||||
|
||||
[Server mode](https://vuls.io/docs/en/usage-server.html)
|
||||
|
||||
- First, start Vuls in server mode and listen as an HTTP server.
|
||||
- Next, issue a command on the scan target server to collect software information. Then send the result to Vuls Server via HTTP. You receive the scan results as JSON format.
|
||||
- No SSH needed, No Scanner needed. Only issuing Linux commands directory on the scan target server.
|
||||
|
||||
### **Dynamic** Analysis
|
||||
|
||||
- It is possible to acquire the state of the server by connecting via SSH and executing the command.
|
||||
- Vuls warns when the scan target server was updated the kernel etc. but not restarting it.
|
||||
|
||||
### Scan vulnerabilities of non-OS-packages
|
||||
|
||||
- Libraries of programming language
|
||||
- Self-compiled software
|
||||
- Network Devices
|
||||
|
||||
Vuls has some options to detect the vulnerabilities
|
||||
|
||||
- [Lockfile based Scan](https://vuls.io/docs/en/usage-scan-non-os-packages.html#library-vulns-scan)
|
||||
- [GitHub Integration](https://vuls.io/docs/en/usage-scan-non-os-packages.html#usage-integrate-with-github-security-alerts)
|
||||
- [Common Platform Enumeration (CPE) based Scan](https://vuls.io/docs/en/usage-scan-non-os-packages.html#cpe-scan)
|
||||
- [OWASP Dependency Check Integration](https://vuls.io/docs/en/usage-scan-non-os-packages.html#usage-integrate-with-owasp-dependency-check-to-automatic-update-when-the-libraries-are-updated-experimental)
|
||||
|
||||
## Scan WordPress core, themes, plugins
|
||||
|
||||
- [Scan WordPress](https://vuls.io/docs/en/usage-scan-wordpress.html)
|
||||
|
||||
## MISC
|
||||
|
||||
- Nondestructive testing
|
||||
- Pre-authorization is *NOT* necessary before scanning on AWS
|
||||
- Vuls works well with Continuous Integration since tests can be run every day. This allows you to find vulnerabilities very quickly.
|
||||
- Auto-generation of configuration file template
|
||||
- Auto-detection of servers set using CIDR, generate configuration file template
|
||||
- Email and Slack notification is possible (supports Japanese language)
|
||||
- Scan result is viewable on accessory software, TUI Viewer in a terminal or Web UI ([VulsRepo](https://github.com/ishiDACo/vulsrepo)).
|
||||
|
||||
----
|
||||
|
||||
## What Vuls Doesn't Do
|
||||
|
||||
- Vuls doesn't update the vulnerable packages.
|
||||
|
||||
----
|
||||
|
||||
## Document
|
||||
|
||||
For more information such as Installation, Tutorial, Usage, visit [vuls.io](https://vuls.io/)
|
||||
[日本語翻訳ドキュメント](https://vuls.io/ja/)
|
||||
|
||||
----
|
||||
|
||||
## Authors
|
||||
|
||||
kotakanbe ([@kotakanbe](https://twitter.com/kotakanbe)) created vuls and [these fine people](https://github.com/future-architect/vuls/graphs/contributors) have contributed.
|
||||
|
||||
## Contribute
|
||||
|
||||
see [vulsdoc](https://vuls.io/docs/en/how-to-contribute.html)
|
||||
|
||||
----
|
||||
|
||||
## Sponsors
|
||||
|
||||
| | |
|
||||
| ------------- | ------------- |
|
||||
| <a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=vuls"><img src="img/sponsor/tines.png" align="left" width="600px" ></a> | Tines is no-code automation for security teams. Build powerful, reliable workflows without a development team. |
|
||||
| <a href="https://www.sakura.ad.jp/"><img src="https://vuls.io/img/icons/sakura.svg" align="left" width="600px" ></a> | SAKURA internet Inc. is an Internet company founded in 1996. We provide cloud computing services such as "Sakura's Shared Server", "Sakura's VPS", and "Sakura's Cloud" to meet the needs of a wide range of customers, from individuals and corporations to the education and public sectors, using its own data centers in Japan. Based on the philosophy of "changing what you want to do into what you can do," we offer DX solutions for all fields. |
|
||||
|
||||
----
|
||||
|
||||
## License
|
||||
|
||||
Please see [LICENSE](https://github.com/future-architect/vuls/blob/master/LICENSE).
|
||||
## NOTE
|
||||
This is the nightly branch and provides the latest functionality.
|
||||
Please use the master branch if you want to use it stably, as destructive changes are also made.
|
||||
|
||||
## Usage
|
||||
|
||||
### 1. Nightly Vuls installation
|
||||
|
||||
```console
|
||||
$ go install github.com/future-architect/vuls/cmd/vuls@nightly
|
||||
$ vuls version
|
||||
vuls nightly 2ef5390
|
||||
```
|
||||
|
||||
### 2. DB Fetch
|
||||
|
||||
```console
|
||||
$ vuls db fetch
|
||||
```
|
||||
|
||||
### 3. init config & configuration change
|
||||
|
||||
Execute the following command to output a config template to stdout.
|
||||
Make a file of it and change the necessary host part, path to the DB, etc. to suit your environment.
|
||||
|
||||
```console
|
||||
$ vuls config init
|
||||
```
|
||||
|
||||
### Optional. add to list of known hosts
|
||||
|
||||
When using remote as the host type, make an ssh connection to the remote host.
|
||||
Before scanning, register the host in the known_hosts.
|
||||
|
||||
```console
|
||||
$ ssh -i /home/mainek00n/.ssh/id_rsa -p 2222 root@127.0.0.1
|
||||
The authenticity of host '[127.0.0.1]:2222 ([127.0.0.1]:2222)' can't be established.
|
||||
ED25519 key fingerprint is SHA256:dK+aO73n6hymIC3+3yFFHpvyNu/txTYi/LXvMl/TzOk.
|
||||
This key is not known by any other names
|
||||
Are you sure you want to continue connecting (yes/no/[fingerprint])? yes
|
||||
Warning: Permanently added '[127.0.0.1]:2222' (ED25519) to the list of known hosts.
|
||||
```
|
||||
|
||||
### 4. Scan
|
||||
|
||||
```console
|
||||
$ vuls scan
|
||||
Scan Summary
|
||||
============
|
||||
local (ubuntu 22.04): success ospkg: 2663, cpe: 0 installed
|
||||
remote (debian 11): success ospkg: 319, cpe: 0 installed
|
||||
cpe: success ospkg: 0, cpe: 1 installed
|
||||
|
||||
or
|
||||
|
||||
$ vuls server
|
||||
|
||||
$ cat machine.json
|
||||
{
|
||||
"contents": [
|
||||
{
|
||||
"content_type": "os-release",
|
||||
"content": "PRETTY_NAME=\"Ubuntu 22.04.1 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.1 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https:\/\/www.ubuntu.com\/\"\nSUPPORT_URL=\"https:\/\/help.ubuntu.com\/\"\nBUG_REPORT_URL=\"https:\/\/bugs.launchpad.net\/ubuntu\/\"\nPRIVACY_POLICY_URL=\"https:\/\/www.ubuntu.com\/legal\/terms-and-policies\/privacy-policy\"\nUBUNTU_CODENAME=jammy"
|
||||
},
|
||||
{
|
||||
"content_type": "dpkg",
|
||||
"content": "accountsservice,ii ,22.07.5-2ubuntu1.3,amd64,accountsservice,22.07.5-2ubuntu1.3\nacl,ii ,2.3.1-1,amd64,acl,2.3.1-1\nnvim-common,ii ,2:8.2.3995-1ubuntu2.1,all,vim,2:8.2.3995-1ubuntu2.1\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
$ curl -s -X POST -H "Content-Type: application/json" -d @machine.json 127.0.0.1:5515/scan | jq
|
||||
{
|
||||
"name": "8002e134-dc2d-4786-96b3-e751103fe5c3",
|
||||
"family": "ubuntu",
|
||||
"release": "22.04",
|
||||
"scanned_at": "2022-11-14T10:41:07.558379311+09:00",
|
||||
"packages": {
|
||||
"kernel": {},
|
||||
"os_pkg": {
|
||||
"accountsservice": {
|
||||
"name": "accountsservice",
|
||||
"version": "22.07.5-2ubuntu1.3",
|
||||
"arch": "amd64",
|
||||
"src_name": "accountsservice",
|
||||
"src_version": "22.07.5-2ubuntu1.3"
|
||||
},
|
||||
"acl": {
|
||||
"name": "acl",
|
||||
"version": "2.3.1-1",
|
||||
"arch": "amd64",
|
||||
"src_name": "acl",
|
||||
"src_version": "2.3.1-1"
|
||||
},
|
||||
"nvim-common": {
|
||||
"name": "nvim-common",
|
||||
"version": "2:8.2.3995-1ubuntu2.1",
|
||||
"arch": "all",
|
||||
"src_name": "vim",
|
||||
"src_version": "2:8.2.3995-1ubuntu2.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Detect
|
||||
|
||||
```console
|
||||
$ vuls detect
|
||||
Detect Summary
|
||||
==============
|
||||
local (ubuntu 22.04) : success 143 CVEs detected
|
||||
remote (debian 11) : success 101 CVEs detected
|
||||
cpe : success 7 CVEs detected
|
||||
|
||||
or
|
||||
|
||||
$ vuls server
|
||||
|
||||
$ cat scan.json
|
||||
{
|
||||
"name": "8002e134-dc2d-4786-96b3-e751103fe5c3",
|
||||
"family": "ubuntu",
|
||||
"release": "22.04",
|
||||
"scanned_at": "2022-11-14T10:41:07.558379311+09:00",
|
||||
"packages": {
|
||||
"kernel": {},
|
||||
"os_pkg": {
|
||||
"accountsservice": {
|
||||
"name": "accountsservice",
|
||||
"version": "22.07.5-2ubuntu1.3",
|
||||
"arch": "amd64",
|
||||
"src_name": "accountsservice",
|
||||
"src_version": "22.07.5-2ubuntu1.3"
|
||||
},
|
||||
"acl": {
|
||||
"name": "acl",
|
||||
"version": "2.3.1-1",
|
||||
"arch": "amd64",
|
||||
"src_name": "acl",
|
||||
"src_version": "2.3.1-1"
|
||||
},
|
||||
"nvim-common": {
|
||||
"name": "nvim-common",
|
||||
"version": "2:8.2.3995-1ubuntu2.1",
|
||||
"arch": "all",
|
||||
"src_name": "vim",
|
||||
"src_version": "2:8.2.3995-1ubuntu2.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
|
||||
$ curl -s -X POST -H "Content-Type: application/json" -d @scan.json 127.0.0.1:5515/detect | jq
|
||||
{
|
||||
"name": "0c33d5fc-add4-465b-9ffa-90e74036259d",
|
||||
"family": "ubuntu",
|
||||
"release": "22.04",
|
||||
"scanned_at": "2022-11-14T10:42:31.863598309+09:00",
|
||||
"detectedd_at": "2022-11-14T10:42:50.677085953+09:00",
|
||||
"packages": {
|
||||
"kernel": {},
|
||||
"os_pkg": {
|
||||
"accountsservice": {
|
||||
"name": "accountsservice",
|
||||
"version": "22.07.5-2ubuntu1.3",
|
||||
"arch": "amd64",
|
||||
"src_name": "accountsservice",
|
||||
"src_version": "22.07.5-2ubuntu1.3"
|
||||
},
|
||||
"acl": {
|
||||
"name": "acl",
|
||||
"version": "2.3.1-1",
|
||||
"arch": "amd64",
|
||||
"src_name": "acl",
|
||||
"src_version": "2.3.1-1"
|
||||
},
|
||||
"nvim-common": {
|
||||
"name": "nvim-common",
|
||||
"version": "2:8.2.3995-1ubuntu2.1",
|
||||
"arch": "all",
|
||||
"src_name": "vim",
|
||||
"src_version": "2:8.2.3995-1ubuntu2.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"scanned_cves": {
|
||||
"CVE-2022-0128": {
|
||||
"content": {
|
||||
"official": {
|
||||
"id": "CVE-2022-0128",
|
||||
"advisory": [
|
||||
"mitre",
|
||||
"nvd",
|
||||
"alpine:3.12:CVE-2022-0128",
|
||||
"alpine:3.13:CVE-2022-0128",
|
||||
"alpine:3.14:CVE-2022-0128",
|
||||
"alpine:3.15:CVE-2022-0128",
|
||||
"alpine:3.16:CVE-2022-0128",
|
||||
"amazon:2022:ALAS2022-2022-014",
|
||||
"debian_security_tracker:11:CVE-2022-0128",
|
||||
"debian_security_tracker:12:CVE-2022-0128",
|
||||
"debian_security_tracker:sid:CVE-2022-0128",
|
||||
"debian_security_tracker:10:CVE-2022-0128",
|
||||
"redhat_oval:6-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:7-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:8-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:9-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"suse_oval:opensuse.leap.15.3:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:opensuse.leap.15.4:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:suse.linux.enterprise.desktop.15:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:suse.linux.enterprise.server.12:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:suse.linux.enterprise.server.15:oval:org.opensuse.security:def:20220128",
|
||||
"suse_cvrf",
|
||||
"ubuntu_oval:14.04:oval:com.ubuntu.trusty:def:202201280000000",
|
||||
"ubuntu_oval:16.04:oval:com.ubuntu.xenial:def:202201280000000",
|
||||
"ubuntu_oval:21.04:oval:com.ubuntu.hirsute:def:202201280000000",
|
||||
"ubuntu_oval:22.04:oval:com.ubuntu.jammy:def:202201280000000",
|
||||
"ubuntu_oval:22.10:oval:com.ubuntu.kinetic:def:202201280000000",
|
||||
"ubuntu_security_tracker"
|
||||
],
|
||||
"title": "CVE-2022-0128",
|
||||
"description": "vim is vulnerable to Out-of-bounds Read",
|
||||
"cvss": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"version": "2.0",
|
||||
"vector": "AV:N/AC:M/Au:N/C:P/I:P/A:P",
|
||||
"score": 6.8,
|
||||
"severity": "MEDIUM"
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H",
|
||||
"score": 7.8,
|
||||
"severity": "HIGH"
|
||||
},
|
||||
{
|
||||
"source": "amazon:2022:ALAS2022-2022-014",
|
||||
"severity": "Important"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:7-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:8-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:9-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:6-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:opensuse.leap.15.4:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:suse.linux.enterprise.desktop.15:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:suse.linux.enterprise.server.12:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:suse.linux.enterprise.server.15:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:opensuse.leap.15.3:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_cvrf",
|
||||
"version": "2.0",
|
||||
"vector": "AV:N/AC:M/Au:N/C:P/I:P/A:P",
|
||||
"score": 6.8
|
||||
},
|
||||
{
|
||||
"source": "suse_cvrf",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:14.04:oval:com.ubuntu.trusty:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:16.04:oval:com.ubuntu.xenial:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:21.04:oval:com.ubuntu.hirsute:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:22.04:oval:com.ubuntu.jammy:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:22.10:oval:com.ubuntu.kinetic:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_security_tracker",
|
||||
"severity": "medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_security_tracker",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H",
|
||||
"score": 7.8,
|
||||
"severity": "HIGH"
|
||||
}
|
||||
],
|
||||
"epss": {
|
||||
"epss": 0.01537,
|
||||
"percentile": 0.73989
|
||||
},
|
||||
"cwe": [
|
||||
{
|
||||
"source": [
|
||||
"nvd",
|
||||
"redhat_oval:6-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:7-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:8-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:9-including-unpatched:oval:com.redhat.unaffected:def:20220128"
|
||||
],
|
||||
"id": "125"
|
||||
}
|
||||
],
|
||||
"exploit": [
|
||||
{
|
||||
"source": [
|
||||
"nvd",
|
||||
"inthewild",
|
||||
"trickest"
|
||||
],
|
||||
"url": "https://huntr.dev/bounties/63f51299-008a-4112-b85b-1e904aadd4ba"
|
||||
}
|
||||
],
|
||||
"published": "2022-01-06T17:15:00Z",
|
||||
"modified": "2022-11-02T13:18:00Z",
|
||||
"reference": [
|
||||
"http://www.openwall.com/lists/oss-security/2022/01/15/1",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4166",
|
||||
"https://access.redhat.com/security/cve/CVE-2022-0128",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011493.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011575.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011592.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-June/011301.html",
|
||||
"https://bugs.launchpad.net/ubuntu/+bug/https://huntr.dev/bounties/63f51299-008a-4112-b85b-1e904aadd4ba",
|
||||
"https://support.apple.com/kb/HT213343",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4019",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4187",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4193",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0156",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-August/011821.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-September/012143.html",
|
||||
"https://bugzilla.suse.com/1194388",
|
||||
"https://www.suse.com/support/security/rating/",
|
||||
"http://people.canonical.com/~ubuntu-security/cve/2022/CVE-2022-0128.html",
|
||||
"http://seclists.org/fulldisclosure/2022/May/35",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4069",
|
||||
"https://www.suse.com/security/cve/CVE-2022-0128",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011573.html",
|
||||
"http://seclists.org/fulldisclosure/2022/Mar/29",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0128",
|
||||
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0128",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-August/011795.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011495.html",
|
||||
"https://ubuntu.com/security/CVE-2022-0128",
|
||||
"https://huntr.dev/bounties/63f51299-008a-4112-b85b-1e904aadd4ba",
|
||||
"http://seclists.org/fulldisclosure/2022/Jul/14",
|
||||
"https://security.gentoo.org/glsa/202208-32",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0158",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011591.html",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4136",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4173",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011494.html",
|
||||
"https://support.apple.com/kb/HT213183",
|
||||
"https://support.apple.com/kb/HT213256",
|
||||
"https://github.com/vim/vim/commit/d3a117814d6acbf0dca3eff1a7626843b9b3734a",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4192",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011574.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011593.html"
|
||||
]
|
||||
}
|
||||
},
|
||||
"affected_packages": [
|
||||
{
|
||||
"name": "vim",
|
||||
"source": "official:ubuntu_security_tracker:CVE-2022-0128",
|
||||
"status": "needed"
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
},
|
||||
"config": {
|
||||
"detect": {
|
||||
"path": "/home/mainek00n/github/github.com/future-architect/vuls/vuls.db",
|
||||
"result_dir": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 6. Report
|
||||
|
||||
```console
|
||||
$ vuls report --format list
|
||||
cpe
|
||||
===
|
||||
+----------------+----------------------------+------+----------+-------+----------------------------------------+--------+----------+
|
||||
| CVEID | VECTOR | CVSS | EPSS | KEV | PACKAGE | STATUS | SOURCE |
|
||||
+----------------+----------------------------+------+----------+-------+----------------------------------------+--------+----------+
|
||||
| CVE-2021-44228 | AV:N/AC:M/Au:N/C:C/I:C/A:C | 9.3 | 0.911590 | true | cpe:2.3:a:apache:log4j:*:*:*:*:*:*:*:* | | official |
|
||||
+----------------+----------------------------+------+----------+-------+ +--------+ +
|
||||
| CVE-2022-23307 | AV:N/AC:L/Au:S/C:C/I:C/A:C | 9.0 | 0.011640 | false | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2021-44832 | AV:N/AC:M/Au:S/C:C/I:C/A:C | 8.5 | 0.686370 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2022-23305 | AV:N/AC:M/Au:N/C:P/I:P/A:P | 6.8 | 0.017420 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2022-23302 | AV:N/AC:M/Au:S/C:P/I:P/A:P | 6.0 | 0.091480 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2021-45046 | AV:N/AC:H/Au:N/C:P/I:P/A:P | 5.1 | 0.719510 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2021-45105 | AV:N/AC:M/Au:N/C:N/I:N/A:P | 4.3 | 0.442620 | | | | |
|
||||
+----------------+----------------------------+------+----------+-------+----------------------------------------+--------+----------+
|
||||
|
||||
local (ubuntu 22.04)
|
||||
====================
|
||||
+----------------+----------------------------+------+----------+-------+-----------------------+----------+----------+
|
||||
| CVEID | VECTOR | CVSS | EPSS | KEV | PACKAGE | STATUS | SOURCE |
|
||||
+----------------+----------------------------+------+----------+-------+-----------------------+----------+----------+
|
||||
| CVE-2022-0318 | AV:N/AC:L/Au:N/C:P/I:P/A:P | 7.5 | 0.011830 | false | vim | needed | official |
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+ + +
|
||||
| CVE-2022-25255 | AV:L/AC:L/Au:N/C:C/I:C/A:C | 7.2 | 0.009500 | | qtbase-opensource-src | | |
|
||||
+----------------+ + +----------+ +-----------------------+ + +
|
||||
| CVE-2022-0995 | | | 0.024480 | | linux | | |
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+ + +
|
||||
...
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+----------+ +
|
||||
| CVE-2022-42799 | | | 0.011080 | | webkit2gtk | needed | |
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+ + +
|
||||
| CVE-2022-3061 | | | 0.008900 | | linux | | |
|
||||
+----------------+----------------------------+------+----------+-------+-----------------------+----------+----------+
|
||||
|
||||
remote (debian 11)
|
||||
==================
|
||||
+----------------+----------------------------+------+----------+-------+------------+--------+----------+
|
||||
| CVEID | VECTOR | CVSS | EPSS | KEV | PACKAGE | STATUS | SOURCE |
|
||||
+----------------+----------------------------+------+----------+-------+------------+--------+----------+
|
||||
| CVE-2022-31782 | AV:N/AC:M/Au:N/C:P/I:P/A:P | 6.8 | 0.008850 | false | freetype | open | official |
|
||||
+----------------+ + +----------+ +------------+ + +
|
||||
| CVE-2022-1304 | | | 0.010360 | | e2fsprogs | | |
|
||||
+----------------+----------------------------+------+----------+ +------------+ + +
|
||||
| CVE-2022-1587 | AV:N/AC:L/Au:N/C:P/I:N/A:P | 6.4 | 0.011080 | | pcre2 | | |
|
||||
+----------------+ + +----------+ + + + +
|
||||
| CVE-2022-1586 | | | 0.011830 | | | | |
|
||||
+----------------+----------------------------+------+ + +------------+ + +
|
||||
| CVE-2022-2097 | AV:N/AC:L/Au:N/C:P/I:N/A:N | 5.0 | | | openssl | | |
|
||||
+----------------+----------------------------+------+----------+ +------------+ + +
|
||||
...
|
||||
+----------------+----------------------------+------+----------+ +------------+ + +
|
||||
| CVE-2022-38126 | | | 0.008850 | | binutils | | |
|
||||
+----------------+----------------------------+------+ + +------------+ + +
|
||||
| CVE-2022-41848 | | | | | linux | | |
|
||||
+----------------+----------------------------+------+----------+-------+------------+--------+----------+
|
||||
|
||||
|
||||
```
|
||||
@@ -1,9 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Only the latest version is supported.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Email kotakanbe@gmail.com
|
||||
172
cache/bolt.go
vendored
172
cache/bolt.go
vendored
@@ -1,172 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
bolt "go.etcd.io/bbolt"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/util"
|
||||
)
|
||||
|
||||
// Bolt holds a pointer of bolt.DB
|
||||
// boltdb is used to store a cache of Changelogs of Ubuntu/Debian
|
||||
type Bolt struct {
|
||||
Path string
|
||||
Log logging.Logger
|
||||
db *bolt.DB
|
||||
}
|
||||
|
||||
// SetupBolt opens a boltdb and creates a meta bucket if not exists.
|
||||
func SetupBolt(path string, l logging.Logger) error {
|
||||
l.Infof("Open boltDB: %s", path)
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b := Bolt{
|
||||
Path: path,
|
||||
Log: l,
|
||||
db: db,
|
||||
}
|
||||
if err = b.createBucketIfNotExists(metabucket); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
DB = b
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close a db.
|
||||
func (b Bolt) Close() error {
|
||||
if b.db == nil {
|
||||
return nil
|
||||
}
|
||||
return b.db.Close()
|
||||
}
|
||||
|
||||
// CreateBucketIfNotExists creates a bucket that is specified by arg.
|
||||
func (b *Bolt) createBucketIfNotExists(name string) error {
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
_, err := tx.CreateBucketIfNotExists([]byte(name))
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to create bucket: %w", err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// GetMeta gets a Meta Information os the servername to boltdb.
|
||||
func (b Bolt) GetMeta(serverName string) (meta Meta, found bool, err error) {
|
||||
err = b.db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
v := bkt.Get([]byte(serverName))
|
||||
if len(v) == 0 {
|
||||
found = false
|
||||
return nil
|
||||
}
|
||||
if e := json.Unmarshal(v, &meta); e != nil {
|
||||
return e
|
||||
}
|
||||
found = true
|
||||
return nil
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// RefreshMeta gets a Meta Information os the servername to boltdb.
|
||||
func (b Bolt) RefreshMeta(meta Meta) error {
|
||||
meta.CreatedAt = time.Now()
|
||||
jsonBytes, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to marshal to JSON: %w", err)
|
||||
}
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
if err := bkt.Put([]byte(meta.Name), jsonBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Log.Debugf("Refreshed Meta: %s", meta.Name)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// EnsureBuckets puts a Meta information and create a bucket that holds changelogs.
|
||||
func (b Bolt) EnsureBuckets(meta Meta) error {
|
||||
jsonBytes, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to marshal to JSON: %w", err)
|
||||
}
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
b.Log.Debugf("Put to meta: %s", meta.Name)
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
if err := bkt.Put([]byte(meta.Name), jsonBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// re-create a bucket (bucket name: servername)
|
||||
bkt = tx.Bucket([]byte(meta.Name))
|
||||
if bkt != nil {
|
||||
b.Log.Debugf("Delete bucket: %s", meta.Name)
|
||||
if err := tx.DeleteBucket([]byte(meta.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Log.Debugf("Bucket deleted: %s", meta.Name)
|
||||
}
|
||||
b.Log.Debugf("Create bucket: %s", meta.Name)
|
||||
if _, err := tx.CreateBucket([]byte(meta.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Log.Debugf("Bucket created: %s", meta.Name)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// PrettyPrint is for debug
|
||||
func (b Bolt) PrettyPrint(meta Meta) error {
|
||||
return b.db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
v := bkt.Get([]byte(meta.Name))
|
||||
b.Log.Debugf("Meta: key:%s, value:%s", meta.Name, v)
|
||||
|
||||
bkt = tx.Bucket([]byte(meta.Name))
|
||||
c := bkt.Cursor()
|
||||
for k, v := c.First(); k != nil; k, v = c.Next() {
|
||||
b.Log.Debugf("key:%s, len: %d, %s...",
|
||||
k, len(v), util.Truncate(string(v), 30))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// GetChangelog get the changelog of specified packName from the Bucket
|
||||
func (b Bolt) GetChangelog(servername, packName string) (changelog string, err error) {
|
||||
err = b.db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
if bkt == nil {
|
||||
return xerrors.Errorf("Failed to get Bucket: %s", servername)
|
||||
}
|
||||
v := bkt.Get([]byte(packName))
|
||||
if v == nil {
|
||||
changelog = ""
|
||||
return nil
|
||||
}
|
||||
changelog = string(v)
|
||||
return nil
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// PutChangelog put the changelog of specified packName into the Bucket
|
||||
func (b Bolt) PutChangelog(servername, packName, changelog string) error {
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
if bkt == nil {
|
||||
return xerrors.Errorf("Failed to get Bucket: %s", servername)
|
||||
}
|
||||
return bkt.Put([]byte(packName), []byte(changelog))
|
||||
})
|
||||
}
|
||||
121
cache/bolt_test.go
vendored
121
cache/bolt_test.go
vendored
@@ -1,121 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
bolt "go.etcd.io/bbolt"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
const path = "/tmp/vuls-test-cache-11111111.db"
|
||||
const servername = "server1"
|
||||
|
||||
var meta = Meta{
|
||||
Name: servername,
|
||||
Distro: config.Distro{
|
||||
Family: "ubuntu",
|
||||
Release: "16.04",
|
||||
},
|
||||
Packs: models.Packages{
|
||||
"apt": {
|
||||
Name: "apt",
|
||||
Version: "1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestSetupBolt(t *testing.T) {
|
||||
log := logging.NewNormalLogger()
|
||||
err := SetupBolt(path, log)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
defer os.Remove(path)
|
||||
|
||||
if err := DB.Close(); err != nil {
|
||||
t.Errorf("Failed to close bolt: %s", err)
|
||||
}
|
||||
|
||||
// check if meta bucket exists
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to open bolt: %s", err)
|
||||
}
|
||||
|
||||
_ = db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
if bkt == nil {
|
||||
t.Errorf("Meta bucket nof found")
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func TestEnsureBuckets(t *testing.T) {
|
||||
log := logging.NewNormalLogger()
|
||||
if err := SetupBolt(path, log); err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
if err := DB.EnsureBuckets(meta); err != nil {
|
||||
t.Errorf("Failed to ensure buckets: %s", err)
|
||||
}
|
||||
defer os.Remove(path)
|
||||
|
||||
m, found, err := DB.GetMeta(servername)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to get meta: %s", err)
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Not Found in meta")
|
||||
}
|
||||
if meta.Name != m.Name || meta.Distro != m.Distro {
|
||||
t.Errorf("expected %v, actual %v", meta, m)
|
||||
}
|
||||
if !reflect.DeepEqual(meta.Packs, m.Packs) {
|
||||
t.Errorf("expected %v, actual %v", meta.Packs, m.Packs)
|
||||
}
|
||||
if err := DB.Close(); err != nil {
|
||||
t.Errorf("Failed to close bolt: %s", err)
|
||||
}
|
||||
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to open bolt: %s", err)
|
||||
}
|
||||
_ = db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
if bkt == nil {
|
||||
t.Errorf("Meta bucket nof found")
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestPutGetChangelog(t *testing.T) {
|
||||
clog := "changelog-text"
|
||||
log := logging.NewNormalLogger()
|
||||
if err := SetupBolt(path, log); err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
defer os.Remove(path)
|
||||
|
||||
if err := DB.EnsureBuckets(meta); err != nil {
|
||||
t.Errorf("Failed to ensure buckets: %s", err)
|
||||
}
|
||||
if err := DB.PutChangelog(servername, "apt", clog); err != nil {
|
||||
t.Errorf("Failed to put changelog: %s", err)
|
||||
}
|
||||
if actual, err := DB.GetChangelog(servername, "apt"); err != nil {
|
||||
t.Errorf("Failed to get changelog: %s", err)
|
||||
} else {
|
||||
if actual != clog {
|
||||
t.Errorf("changelog is not same. e: %s, a: %s", clog, actual)
|
||||
}
|
||||
}
|
||||
}
|
||||
33
cache/db.go
vendored
33
cache/db.go
vendored
@@ -1,33 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// DB has a cache instance
|
||||
var DB Cache
|
||||
|
||||
const metabucket = "changelog-meta"
|
||||
|
||||
// Cache is a interface of cache
|
||||
type Cache interface {
|
||||
Close() error
|
||||
GetMeta(string) (Meta, bool, error)
|
||||
RefreshMeta(Meta) error
|
||||
EnsureBuckets(Meta) error
|
||||
PrettyPrint(Meta) error
|
||||
GetChangelog(string, string) (string, error)
|
||||
PutChangelog(string, string, string) error
|
||||
}
|
||||
|
||||
// Meta holds a server name, distro information of the scanned server and
|
||||
// package information that was collected at the last scan.
|
||||
type Meta struct {
|
||||
Name string
|
||||
Distro config.Distro
|
||||
Packs models.Packages
|
||||
CreatedAt time.Time
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"context"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
commands "github.com/future-architect/vuls/subcmds"
|
||||
"github.com/google/subcommands"
|
||||
)
|
||||
|
||||
func main() {
|
||||
subcommands.Register(subcommands.HelpCommand(), "")
|
||||
subcommands.Register(subcommands.FlagsCommand(), "")
|
||||
subcommands.Register(subcommands.CommandsCommand(), "")
|
||||
subcommands.Register(&commands.DiscoverCmd{}, "discover")
|
||||
subcommands.Register(&commands.ScanCmd{}, "scan")
|
||||
subcommands.Register(&commands.HistoryCmd{}, "history")
|
||||
subcommands.Register(&commands.ConfigtestCmd{}, "configtest")
|
||||
subcommands.Register(&commands.SaaSCmd{}, "saas")
|
||||
|
||||
var v = flag.Bool("v", false, "Show version")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if *v {
|
||||
fmt.Printf("vuls %s %s\n", config.Version, config.Revision)
|
||||
os.Exit(int(subcommands.ExitSuccess))
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
os.Exit(int(subcommands.Execute(ctx)))
|
||||
}
|
||||
@@ -1,38 +1,15 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"context"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
commands "github.com/future-architect/vuls/subcmds"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/future-architect/vuls/pkg/cmd/root"
|
||||
)
|
||||
|
||||
func main() {
|
||||
subcommands.Register(subcommands.HelpCommand(), "")
|
||||
subcommands.Register(subcommands.FlagsCommand(), "")
|
||||
subcommands.Register(subcommands.CommandsCommand(), "")
|
||||
subcommands.Register(&commands.DiscoverCmd{}, "discover")
|
||||
subcommands.Register(&commands.TuiCmd{}, "tui")
|
||||
subcommands.Register(&commands.ScanCmd{}, "scan")
|
||||
subcommands.Register(&commands.HistoryCmd{}, "history")
|
||||
subcommands.Register(&commands.ReportCmd{}, "report")
|
||||
subcommands.Register(&commands.ConfigtestCmd{}, "configtest")
|
||||
subcommands.Register(&commands.ServerCmd{}, "server")
|
||||
|
||||
var v = flag.Bool("v", false, "Show version")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if *v {
|
||||
fmt.Printf("vuls-%s-%s\n", config.Version, config.Revision)
|
||||
os.Exit(int(subcommands.ExitSuccess))
|
||||
if err := root.NewCmdRoot().Execute(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to exec vuls: %s\n", fmt.Sprintf("%+v", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
os.Exit(int(subcommands.Execute(ctx)))
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
package config
|
||||
|
||||
// AWSConf is aws config
|
||||
type AWSConf struct {
|
||||
// AWS profile to use
|
||||
Profile string `json:"profile"`
|
||||
|
||||
// AWS region to use
|
||||
Region string `json:"region"`
|
||||
|
||||
// S3 bucket name
|
||||
S3Bucket string `json:"s3Bucket"`
|
||||
|
||||
// /bucket/path/to/results
|
||||
S3ResultsDir string `json:"s3ResultsDir"`
|
||||
|
||||
// The Server-side encryption algorithm used when storing the reports in S3 (e.g., AES256, aws:kms).
|
||||
S3ServerSideEncryption string `json:"s3ServerSideEncryption"`
|
||||
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate configuration
|
||||
func (c *AWSConf) Validate() (errs []error) {
|
||||
// TODO
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// AzureConf is azure config
|
||||
type AzureConf struct {
|
||||
// Azure account name to use. AZURE_STORAGE_ACCOUNT environment variable is used if not specified
|
||||
AccountName string `json:"accountName"`
|
||||
|
||||
// Azure account key to use. AZURE_STORAGE_ACCESS_KEY environment variable is used if not specified
|
||||
AccountKey string `json:"-"`
|
||||
|
||||
// Azure storage container name
|
||||
ContainerName string `json:"containerName"`
|
||||
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
const (
|
||||
azureAccount = "AZURE_STORAGE_ACCOUNT"
|
||||
azureKey = "AZURE_STORAGE_ACCESS_KEY"
|
||||
)
|
||||
|
||||
// Validate configuration
|
||||
func (c *AzureConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
|
||||
// overwrite if env var is not empty
|
||||
if os.Getenv(azureAccount) != "" {
|
||||
c.AccountName = os.Getenv(azureAccount)
|
||||
}
|
||||
if os.Getenv(azureKey) != "" {
|
||||
c.AccountKey = os.Getenv(azureKey)
|
||||
}
|
||||
|
||||
if c.ContainerName == "" {
|
||||
errs = append(errs, xerrors.Errorf("Azure storage container name is required"))
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ChatWorkConf is ChatWork config
|
||||
type ChatWorkConf struct {
|
||||
APIToken string `json:"-"`
|
||||
Room string `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *ChatWorkConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.Room) == 0 {
|
||||
errs = append(errs, xerrors.New("chatWorkConf.room must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.APIToken) == 0 {
|
||||
errs = append(errs, xerrors.New("chatWorkConf.ApiToken must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package config
|
||||
|
||||
var (
|
||||
// Colors has ansi color list
|
||||
Colors = []string{
|
||||
"\033[32m", // green
|
||||
"\033[33m", // yellow
|
||||
"\033[36m", // cyan
|
||||
"\033[35m", // magenta
|
||||
"\033[31m", // red
|
||||
"\033[34m", // blue
|
||||
}
|
||||
// ResetColor is reset color
|
||||
ResetColor = "\033[0m"
|
||||
)
|
||||
345
config/config.go
345
config/config.go
@@ -1,345 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Version of Vuls
|
||||
var Version = "`make build` or `make install` will show the version"
|
||||
|
||||
// Revision of Git
|
||||
var Revision string
|
||||
|
||||
// Conf has Configuration
|
||||
var Conf Config
|
||||
|
||||
//Config is struct of Configuration
|
||||
type Config struct {
|
||||
logging.LogOpts
|
||||
|
||||
// scan, report
|
||||
HTTPProxy string `valid:"url" json:"httpProxy,omitempty"`
|
||||
ResultsDir string `json:"resultsDir,omitempty"`
|
||||
Pipe bool `json:"pipe,omitempty"`
|
||||
|
||||
Default ServerInfo `json:"default,omitempty"`
|
||||
Servers map[string]ServerInfo `json:"servers,omitempty"`
|
||||
|
||||
ScanOpts
|
||||
|
||||
// report
|
||||
CveDict GoCveDictConf `json:"cveDict,omitempty"`
|
||||
OvalDict GovalDictConf `json:"ovalDict,omitempty"`
|
||||
Gost GostConf `json:"gost,omitempty"`
|
||||
Exploit ExploitConf `json:"exploit,omitempty"`
|
||||
Metasploit MetasploitConf `json:"metasploit,omitempty"`
|
||||
KEVuln KEVulnConf `json:"kevuln,omitempty"`
|
||||
Cti CtiConf `json:"cti,omitempty"`
|
||||
|
||||
Slack SlackConf `json:"-"`
|
||||
EMail SMTPConf `json:"-"`
|
||||
HTTP HTTPConf `json:"-"`
|
||||
Syslog SyslogConf `json:"-"`
|
||||
AWS AWSConf `json:"-"`
|
||||
Azure AzureConf `json:"-"`
|
||||
ChatWork ChatWorkConf `json:"-"`
|
||||
GoogleChat GoogleChatConf `json:"-"`
|
||||
Telegram TelegramConf `json:"-"`
|
||||
WpScan WpScanConf `json:"-"`
|
||||
Saas SaasConf `json:"-"`
|
||||
|
||||
ReportOpts
|
||||
}
|
||||
|
||||
// ReportConf is an interface to Validate Report Config
|
||||
type ReportConf interface {
|
||||
Validate() []error
|
||||
}
|
||||
|
||||
// ScanOpts is options for scan
|
||||
type ScanOpts struct {
|
||||
Vvv bool `json:"vvv,omitempty"`
|
||||
}
|
||||
|
||||
// ReportOpts is options for report
|
||||
type ReportOpts struct {
|
||||
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
|
||||
ConfidenceScoreOver int `json:"confidenceScoreOver,omitempty"`
|
||||
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
|
||||
NoProgress bool `json:"noProgress,omitempty"`
|
||||
RefreshCve bool `json:"refreshCve,omitempty"`
|
||||
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
|
||||
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
|
||||
DiffPlus bool `json:"diffPlus,omitempty"`
|
||||
DiffMinus bool `json:"diffMinus,omitempty"`
|
||||
Diff bool `json:"diff,omitempty"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
}
|
||||
|
||||
// ValidateOnConfigtest validates
|
||||
func (c Config) ValidateOnConfigtest() bool {
|
||||
errs := c.checkSSHKeyExist()
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
for _, err := range errs {
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnScan validates configuration
|
||||
func (c Config) ValidateOnScan() bool {
|
||||
errs := c.checkSSHKeyExist()
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, server := range c.Servers {
|
||||
if !server.Module.IsScanPort() {
|
||||
continue
|
||||
}
|
||||
if es := server.PortScan.Validate(); 0 < len(es) {
|
||||
errs = append(errs, es...)
|
||||
}
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
func (c Config) checkSSHKeyExist() (errs []error) {
|
||||
for serverName, v := range c.Servers {
|
||||
if v.Type == constant.ServerTypePseudo {
|
||||
continue
|
||||
}
|
||||
if v.KeyPath != "" {
|
||||
if _, err := os.Stat(v.KeyPath); err != nil {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"%s is invalid. keypath: %s not exists", serverName, v.KeyPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
// ValidateOnReport validates configuration
|
||||
func (c *Config) ValidateOnReport() bool {
|
||||
errs := []error{}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, rc := range []ReportConf{
|
||||
&c.EMail,
|
||||
&c.Slack,
|
||||
&c.ChatWork,
|
||||
&c.GoogleChat,
|
||||
&c.Telegram,
|
||||
&c.Syslog,
|
||||
&c.HTTP,
|
||||
&c.AWS,
|
||||
&c.Azure,
|
||||
} {
|
||||
if es := rc.Validate(); 0 < len(es) {
|
||||
errs = append(errs, es...)
|
||||
}
|
||||
}
|
||||
|
||||
for _, cnf := range []VulnDictInterface{
|
||||
&Conf.CveDict,
|
||||
&Conf.OvalDict,
|
||||
&Conf.Gost,
|
||||
&Conf.Exploit,
|
||||
&Conf.Metasploit,
|
||||
&Conf.KEVuln,
|
||||
&Conf.Cti,
|
||||
} {
|
||||
if err := cnf.Validate(); err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Failed to validate %s: %+v", cnf.GetName(), err))
|
||||
}
|
||||
if err := cnf.CheckHTTPHealth(); err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Run %s as server mode before reporting: %+v", cnf.GetName(), err))
|
||||
}
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnSaaS validates configuration
|
||||
func (c Config) ValidateOnSaaS() bool {
|
||||
saaserrs := c.Saas.Validate()
|
||||
for _, err := range saaserrs {
|
||||
logging.Log.Error("Failed to validate SaaS conf: %+w", err)
|
||||
}
|
||||
return len(saaserrs) == 0
|
||||
}
|
||||
|
||||
// WpScanConf is wpscan.com config
|
||||
type WpScanConf struct {
|
||||
Token string `toml:"token,omitempty" json:"-"`
|
||||
DetectInactive bool `toml:"detectInactive,omitempty" json:"detectInactive,omitempty"`
|
||||
}
|
||||
|
||||
// ServerInfo has SSH Info, additional CPE packages to scan.
|
||||
type ServerInfo struct {
|
||||
BaseName string `toml:"-" json:"-"`
|
||||
ServerName string `toml:"-" json:"serverName,omitempty"`
|
||||
User string `toml:"user,omitempty" json:"user,omitempty"`
|
||||
Host string `toml:"host,omitempty" json:"host,omitempty"`
|
||||
IgnoreIPAddresses []string `toml:"ignoreIPAddresses,omitempty" json:"ignoreIPAddresses,omitempty"`
|
||||
JumpServer []string `toml:"jumpServer,omitempty" json:"jumpServer,omitempty"`
|
||||
Port string `toml:"port,omitempty" json:"port,omitempty"`
|
||||
SSHConfigPath string `toml:"sshConfigPath,omitempty" json:"sshConfigPath,omitempty"`
|
||||
KeyPath string `toml:"keyPath,omitempty" json:"keyPath,omitempty"`
|
||||
CpeNames []string `toml:"cpeNames,omitempty" json:"cpeNames,omitempty"`
|
||||
ScanMode []string `toml:"scanMode,omitempty" json:"scanMode,omitempty"`
|
||||
ScanModules []string `toml:"scanModules,omitempty" json:"scanModules,omitempty"`
|
||||
OwaspDCXMLPath string `toml:"owaspDCXMLPath,omitempty" json:"owaspDCXMLPath,omitempty"`
|
||||
ContainersOnly bool `toml:"containersOnly,omitempty" json:"containersOnly,omitempty"`
|
||||
ContainersIncluded []string `toml:"containersIncluded,omitempty" json:"containersIncluded,omitempty"`
|
||||
ContainersExcluded []string `toml:"containersExcluded,omitempty" json:"containersExcluded,omitempty"`
|
||||
ContainerType string `toml:"containerType,omitempty" json:"containerType,omitempty"`
|
||||
Containers map[string]ContainerSetting `toml:"containers,omitempty" json:"containers,omitempty"`
|
||||
IgnoreCves []string `toml:"ignoreCves,omitempty" json:"ignoreCves,omitempty"`
|
||||
IgnorePkgsRegexp []string `toml:"ignorePkgsRegexp,omitempty" json:"ignorePkgsRegexp,omitempty"`
|
||||
GitHubRepos map[string]GitHubConf `toml:"githubs" json:"githubs,omitempty"` // key: owner/repo
|
||||
UUIDs map[string]string `toml:"uuids,omitempty" json:"uuids,omitempty"`
|
||||
Memo string `toml:"memo,omitempty" json:"memo,omitempty"`
|
||||
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, Alma, Rocky, RHEL, Amazon
|
||||
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
|
||||
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
|
||||
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
|
||||
FindLockDirs []string `toml:"findLockDirs,omitempty" json:"findLockDirs,omitempty"`
|
||||
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
|
||||
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
|
||||
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
|
||||
PortScan *PortScanConf `toml:"portscan,omitempty" json:"portscan,omitempty"`
|
||||
|
||||
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
|
||||
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
|
||||
IPSIdentifiers map[string]string `toml:"-" json:"ipsIdentifiers,omitempty"`
|
||||
|
||||
// internal use
|
||||
LogMsgAnsiColor string `toml:"-" json:"-"` // DebugLog Color
|
||||
Container Container `toml:"-" json:"-"`
|
||||
Distro Distro `toml:"-" json:"-"`
|
||||
Mode ScanMode `toml:"-" json:"-"`
|
||||
Module ScanModule `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// ContainerSetting is used for loading container setting in config.toml
|
||||
type ContainerSetting struct {
|
||||
Cpes []string `json:"cpes,omitempty"`
|
||||
OwaspDCXMLPath string `json:"owaspDCXMLPath,omitempty"`
|
||||
IgnorePkgsRegexp []string `json:"ignorePkgsRegexp,omitempty"`
|
||||
IgnoreCves []string `json:"ignoreCves,omitempty"`
|
||||
}
|
||||
|
||||
// WordPressConf used for WordPress Scanning
|
||||
type WordPressConf struct {
|
||||
OSUser string `toml:"osUser,omitempty" json:"osUser,omitempty"`
|
||||
DocRoot string `toml:"docRoot,omitempty" json:"docRoot,omitempty"`
|
||||
CmdPath string `toml:"cmdPath,omitempty" json:"cmdPath,omitempty"`
|
||||
}
|
||||
|
||||
// IsZero return whether this struct is not specified in config.toml
|
||||
func (cnf WordPressConf) IsZero() bool {
|
||||
return cnf.OSUser == "" && cnf.DocRoot == "" && cnf.CmdPath == ""
|
||||
}
|
||||
|
||||
// GitHubConf is used for GitHub Security Alerts
|
||||
type GitHubConf struct {
|
||||
Token string `json:"-"`
|
||||
IgnoreGitHubDismissed bool `json:"ignoreGitHubDismissed,omitempty"`
|
||||
}
|
||||
|
||||
// GetServerName returns ServerName if this serverInfo is about host.
|
||||
// If this serverInfo is about a container, returns containerID@ServerName
|
||||
func (s ServerInfo) GetServerName() string {
|
||||
if len(s.Container.ContainerID) == 0 {
|
||||
return s.ServerName
|
||||
}
|
||||
return fmt.Sprintf("%s@%s", s.Container.Name, s.ServerName)
|
||||
}
|
||||
|
||||
// Distro has distribution info
|
||||
type Distro struct {
|
||||
Family string
|
||||
Release string
|
||||
}
|
||||
|
||||
func (l Distro) String() string {
|
||||
return fmt.Sprintf("%s %s", l.Family, l.Release)
|
||||
}
|
||||
|
||||
// MajorVersion returns Major version
|
||||
func (l Distro) MajorVersion() (int, error) {
|
||||
switch l.Family {
|
||||
case constant.Amazon:
|
||||
return strconv.Atoi(getAmazonLinuxVersion(l.Release))
|
||||
case constant.CentOS:
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(strings.TrimPrefix(l.Release, "stream"), ".")[0])
|
||||
}
|
||||
case constant.OpenSUSE:
|
||||
if l.Release != "" {
|
||||
if l.Release == "tumbleweed" {
|
||||
return 0, nil
|
||||
}
|
||||
return strconv.Atoi(strings.Split(l.Release, ".")[0])
|
||||
}
|
||||
default:
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(l.Release, ".")[0])
|
||||
}
|
||||
}
|
||||
return 0, xerrors.New("Release is empty")
|
||||
}
|
||||
|
||||
// IsContainer returns whether this ServerInfo is about container
|
||||
func (s ServerInfo) IsContainer() bool {
|
||||
return 0 < len(s.Container.ContainerID)
|
||||
}
|
||||
|
||||
// SetContainer set container
|
||||
func (s *ServerInfo) SetContainer(d Container) {
|
||||
s.Container = d
|
||||
}
|
||||
|
||||
// Container has Container information.
|
||||
type Container struct {
|
||||
ContainerID string
|
||||
Name string
|
||||
Image string
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
func TestSyslogConfValidate(t *testing.T) {
|
||||
var tests = []struct {
|
||||
conf SyslogConf
|
||||
expectedErrLength int
|
||||
}{
|
||||
{
|
||||
conf: SyslogConf{},
|
||||
expectedErrLength: 0,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "tcp",
|
||||
Port: "5140",
|
||||
},
|
||||
expectedErrLength: 0,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "udp",
|
||||
Port: "12345",
|
||||
Severity: "emerg",
|
||||
Facility: "user",
|
||||
},
|
||||
expectedErrLength: 0,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "foo",
|
||||
Port: "514",
|
||||
},
|
||||
expectedErrLength: 1,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "invalid",
|
||||
Port: "-1",
|
||||
},
|
||||
expectedErrLength: 2,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "invalid",
|
||||
Port: "invalid",
|
||||
Severity: "invalid",
|
||||
Facility: "invalid",
|
||||
},
|
||||
expectedErrLength: 4,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
tt.conf.Enabled = true
|
||||
errs := tt.conf.Validate()
|
||||
if len(errs) != tt.expectedErrLength {
|
||||
t.Errorf("test: %d, expected %d, actual %d", i, tt.expectedErrLength, len(errs))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistro_MajorVersion(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in Distro
|
||||
out int
|
||||
}{
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2022 (Amazon Linux)",
|
||||
},
|
||||
out: 2022,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2 (2017.12)",
|
||||
},
|
||||
out: 2,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2017.12",
|
||||
},
|
||||
out: 1,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: CentOS,
|
||||
Release: "7.10",
|
||||
},
|
||||
out: 7,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
ver, err := tt.in.MajorVersion()
|
||||
if err != nil {
|
||||
t.Errorf("[%d] err occurred: %s", i, err)
|
||||
}
|
||||
if tt.out != ver {
|
||||
t.Errorf("[%d] expected %d, actual %d", i, tt.out, ver)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GoogleChatConf is GoogleChat config
|
||||
type GoogleChatConf struct {
|
||||
WebHookURL string `valid:"url" json:"-" toml:"webHookURL,omitempty"`
|
||||
SkipIfNoCve bool `valid:"type(bool)" json:"-" toml:"skipIfNoCve"`
|
||||
ServerNameRegexp string `valid:"type(string)" json:"-" toml:"serverNameRegexp,omitempty"`
|
||||
Enabled bool `valid:"type(bool)" json:"-" toml:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *GoogleChatConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.WebHookURL) == 0 {
|
||||
errs = append(errs, xerrors.New("googleChatConf.webHookURL must not be empty"))
|
||||
}
|
||||
if !govalidator.IsRegex(c.ServerNameRegexp) {
|
||||
errs = append(errs, xerrors.New("googleChatConf.serverNameRegexp must be regex"))
|
||||
}
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
)
|
||||
|
||||
// HTTPConf is HTTP config
|
||||
type HTTPConf struct {
|
||||
URL string `valid:"url" json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
const httpKey = "VULS_HTTP_URL"
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *HTTPConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
// overwrite if env var is not empty
|
||||
if os.Getenv(httpKey) != "" {
|
||||
c.URL = os.Getenv(httpKey)
|
||||
}
|
||||
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return errs
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
package config
|
||||
|
||||
import "golang.org/x/xerrors"
|
||||
|
||||
// JSONLoader loads configuration
|
||||
type JSONLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration JSON file specified by path arg.
|
||||
func (c JSONLoader) Load(_, _, _ string) (err error) {
|
||||
return xerrors.New("Not implement yet")
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
package config
|
||||
|
||||
// Load loads configuration
|
||||
func Load(path string) error {
|
||||
loader := TOMLLoader{}
|
||||
return loader.Load(path)
|
||||
}
|
||||
|
||||
// Loader is interface of concrete loader
|
||||
type Loader interface {
|
||||
Load(string, string) error
|
||||
}
|
||||
313
config/os.go
313
config/os.go
@@ -1,313 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
// EOL has End-of-Life information
|
||||
type EOL struct {
|
||||
StandardSupportUntil time.Time
|
||||
ExtendedSupportUntil time.Time
|
||||
Ended bool
|
||||
}
|
||||
|
||||
// IsStandardSupportEnded checks now is under standard support
|
||||
func (e EOL) IsStandardSupportEnded(now time.Time) bool {
|
||||
return e.Ended ||
|
||||
!e.ExtendedSupportUntil.IsZero() && e.StandardSupportUntil.IsZero() ||
|
||||
!e.StandardSupportUntil.IsZero() && now.After(e.StandardSupportUntil)
|
||||
}
|
||||
|
||||
// IsExtendedSuppportEnded checks now is under extended support
|
||||
func (e EOL) IsExtendedSuppportEnded(now time.Time) bool {
|
||||
if e.Ended {
|
||||
return true
|
||||
}
|
||||
if e.StandardSupportUntil.IsZero() && e.ExtendedSupportUntil.IsZero() {
|
||||
return false
|
||||
}
|
||||
return !e.ExtendedSupportUntil.IsZero() && now.After(e.ExtendedSupportUntil) ||
|
||||
e.ExtendedSupportUntil.IsZero() && now.After(e.StandardSupportUntil)
|
||||
}
|
||||
|
||||
// GetEOL return EOL information for the OS-release passed by args
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/redhat/redhat.go#L20
|
||||
func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
switch family {
|
||||
case constant.Amazon:
|
||||
eol, found = map[string]EOL{
|
||||
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2022": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[getAmazonLinuxVersion(release)]
|
||||
case constant.RedHat:
|
||||
// https://access.redhat.com/support/policy/updates/errata
|
||||
eol, found = map[string]EOL{
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {
|
||||
StandardSupportUntil: time.Date(2020, 11, 30, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"7": {
|
||||
StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"8": {
|
||||
StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2031, 5, 31, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"9": {
|
||||
StandardSupportUntil: time.Date(2032, 5, 31, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2034, 5, 31, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[major(release)]
|
||||
case constant.CentOS:
|
||||
// https://en.wikipedia.org/wiki/CentOS#End-of-support_schedule
|
||||
eol, found = map[string]EOL{
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {Ended: true},
|
||||
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"stream8": {StandardSupportUntil: time.Date(2024, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"stream9": {StandardSupportUntil: time.Date(2027, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Alma:
|
||||
eol, found = map[string]EOL{
|
||||
"8": {StandardSupportUntil: time.Date(2029, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"9": {StandardSupportUntil: time.Date(2032, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Rocky:
|
||||
eol, found = map[string]EOL{
|
||||
"8": {StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"9": {StandardSupportUntil: time.Date(2032, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Oracle:
|
||||
eol, found = map[string]EOL{
|
||||
// Source:
|
||||
// https://www.oracle.com/a/ocom/docs/elsp-lifetime-069338.pdf
|
||||
// https://community.oracle.com/docs/DOC-917964
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {
|
||||
StandardSupportUntil: time.Date(2021, 3, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2024, 6, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"7": {
|
||||
StandardSupportUntil: time.Date(2024, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2026, 6, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"8": {
|
||||
StandardSupportUntil: time.Date(2029, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2031, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"9": {
|
||||
StandardSupportUntil: time.Date(2032, 6, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2034, 6, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[major(release)]
|
||||
case constant.Debian:
|
||||
eol, found = map[string]EOL{
|
||||
// https://wiki.debian.org/LTS
|
||||
"6": {Ended: true},
|
||||
"7": {Ended: true},
|
||||
"8": {Ended: true},
|
||||
"9": {StandardSupportUntil: time.Date(2022, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"10": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"11": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Raspbian:
|
||||
// Not found
|
||||
eol, found = map[string]EOL{}[major(release)]
|
||||
case constant.Ubuntu:
|
||||
// https://wiki.ubuntu.com/Releases
|
||||
eol, found = map[string]EOL{
|
||||
"14.10": {Ended: true},
|
||||
"14.04": {
|
||||
ExtendedSupportUntil: time.Date(2022, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"15.04": {Ended: true},
|
||||
"16.10": {Ended: true},
|
||||
"17.04": {Ended: true},
|
||||
"17.10": {Ended: true},
|
||||
"16.04": {
|
||||
StandardSupportUntil: time.Date(2021, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2024, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"18.04": {
|
||||
StandardSupportUntil: time.Date(2023, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2028, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"18.10": {Ended: true},
|
||||
"19.04": {Ended: true},
|
||||
"19.10": {Ended: true},
|
||||
"20.04": {
|
||||
StandardSupportUntil: time.Date(2025, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2030, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"20.10": {
|
||||
StandardSupportUntil: time.Date(2021, 7, 22, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.04": {
|
||||
StandardSupportUntil: time.Date(2022, 1, 20, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.10": {
|
||||
StandardSupportUntil: time.Date(2022, 7, 14, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"22.04": {
|
||||
StandardSupportUntil: time.Date(2027, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2032, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"22.10": {
|
||||
StandardSupportUntil: time.Date(2023, 7, 20, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[release]
|
||||
case constant.OpenSUSE:
|
||||
// https://en.opensuse.org/Lifetime
|
||||
eol, found = map[string]EOL{
|
||||
"10.2": {Ended: true},
|
||||
"10.3": {Ended: true},
|
||||
"11.0": {Ended: true},
|
||||
"11.1": {Ended: true},
|
||||
"11.2": {Ended: true},
|
||||
"11.3": {Ended: true},
|
||||
"11.4": {Ended: true},
|
||||
"12.1": {Ended: true},
|
||||
"12.2": {Ended: true},
|
||||
"12.3": {Ended: true},
|
||||
"13.1": {Ended: true},
|
||||
"13.2": {Ended: true},
|
||||
"tumbleweed": {},
|
||||
}[release]
|
||||
case constant.OpenSUSELeap:
|
||||
// https://en.opensuse.org/Lifetime
|
||||
eol, found = map[string]EOL{
|
||||
"42.1": {Ended: true},
|
||||
"42.2": {Ended: true},
|
||||
"42.3": {Ended: true},
|
||||
"15.0": {Ended: true},
|
||||
"15.1": {Ended: true},
|
||||
"15.2": {Ended: true},
|
||||
"15.3": {StandardSupportUntil: time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"15.4": {StandardSupportUntil: time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[release]
|
||||
case constant.SUSEEnterpriseServer:
|
||||
// https://www.suse.com/lifecycle
|
||||
eol, found = map[string]EOL{
|
||||
"11": {Ended: true},
|
||||
"11.1": {Ended: true},
|
||||
"11.2": {Ended: true},
|
||||
"11.3": {Ended: true},
|
||||
"11.4": {Ended: true},
|
||||
"12": {Ended: true},
|
||||
"12.1": {Ended: true},
|
||||
"12.2": {Ended: true},
|
||||
"12.3": {Ended: true},
|
||||
"12.4": {Ended: true},
|
||||
"12.5": {StandardSupportUntil: time.Date(2024, 10, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"15": {Ended: true},
|
||||
"15.1": {Ended: true},
|
||||
"15.2": {Ended: true},
|
||||
"15.3": {StandardSupportUntil: time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"15.4": {StandardSupportUntil: time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[release]
|
||||
case constant.SUSEEnterpriseDesktop:
|
||||
// https://www.suse.com/lifecycle
|
||||
eol, found = map[string]EOL{
|
||||
"11": {Ended: true},
|
||||
"11.1": {Ended: true},
|
||||
"11.2": {Ended: true},
|
||||
"11.3": {Ended: true},
|
||||
"11.4": {Ended: true},
|
||||
"12": {Ended: true},
|
||||
"12.1": {Ended: true},
|
||||
"12.2": {Ended: true},
|
||||
"12.3": {Ended: true},
|
||||
"12.4": {Ended: true},
|
||||
"15": {Ended: true},
|
||||
"15.1": {Ended: true},
|
||||
"15.2": {Ended: true},
|
||||
"15.3": {StandardSupportUntil: time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"15.4": {StandardSupportUntil: time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[release]
|
||||
case constant.Alpine:
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/alpine/alpine.go#L19
|
||||
// https://alpinelinux.org/releases/
|
||||
eol, found = map[string]EOL{
|
||||
"2.0": {Ended: true},
|
||||
"2.1": {Ended: true},
|
||||
"2.2": {Ended: true},
|
||||
"2.3": {Ended: true},
|
||||
"2.4": {Ended: true},
|
||||
"2.5": {Ended: true},
|
||||
"2.6": {Ended: true},
|
||||
"2.7": {Ended: true},
|
||||
"3.0": {Ended: true},
|
||||
"3.1": {Ended: true},
|
||||
"3.2": {Ended: true},
|
||||
"3.3": {Ended: true},
|
||||
"3.4": {Ended: true},
|
||||
"3.5": {Ended: true},
|
||||
"3.6": {Ended: true},
|
||||
"3.7": {Ended: true},
|
||||
"3.8": {Ended: true},
|
||||
"3.9": {Ended: true},
|
||||
"3.10": {StandardSupportUntil: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.11": {StandardSupportUntil: time.Date(2021, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.12": {StandardSupportUntil: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.13": {StandardSupportUntil: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.14": {StandardSupportUntil: time.Date(2023, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.15": {StandardSupportUntil: time.Date(2023, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.16": {StandardSupportUntil: time.Date(2024, 5, 23, 23, 59, 59, 0, time.UTC)},
|
||||
}[majorDotMinor(release)]
|
||||
case constant.FreeBSD:
|
||||
// https://www.freebsd.org/security/
|
||||
eol, found = map[string]EOL{
|
||||
"7": {Ended: true},
|
||||
"8": {Ended: true},
|
||||
"9": {Ended: true},
|
||||
"10": {Ended: true},
|
||||
"11": {StandardSupportUntil: time.Date(2021, 9, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"12": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"13": {StandardSupportUntil: time.Date(2026, 1, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Fedora:
|
||||
// https://docs.fedoraproject.org/en-US/releases/eol/
|
||||
// https://endoflife.date/fedora
|
||||
eol, found = map[string]EOL{
|
||||
"32": {StandardSupportUntil: time.Date(2021, 5, 25, 23, 59, 59, 0, time.UTC)},
|
||||
"33": {StandardSupportUntil: time.Date(2021, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"34": {StandardSupportUntil: time.Date(2022, 5, 17, 23, 59, 59, 0, time.UTC)},
|
||||
"35": {StandardSupportUntil: time.Date(2022, 12, 7, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func major(osVer string) (majorVersion string) {
|
||||
return strings.Split(osVer, ".")[0]
|
||||
}
|
||||
|
||||
func majorDotMinor(osVer string) (majorDotMinor string) {
|
||||
ss := strings.SplitN(osVer, ".", 3)
|
||||
if len(ss) == 1 {
|
||||
return osVer
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", ss[0], ss[1])
|
||||
}
|
||||
|
||||
func getAmazonLinuxVersion(osRelease string) string {
|
||||
ss := strings.Fields(osRelease)
|
||||
if len(ss) == 1 {
|
||||
return "1"
|
||||
}
|
||||
return ss[0]
|
||||
}
|
||||
@@ -1,626 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
. "github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
type fields struct {
|
||||
family string
|
||||
release string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
now time.Time
|
||||
found bool
|
||||
stdEnded bool
|
||||
extEnded bool
|
||||
}{
|
||||
// Amazon Linux
|
||||
{
|
||||
name: "amazon linux 1 supported",
|
||||
fields: fields{family: Amazon, release: "2018.03"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 1 eol on 2023-6-30",
|
||||
fields: fields{family: Amazon, release: "2018.03"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 2 supported",
|
||||
fields: fields{family: Amazon, release: "2 (Karoo)"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 2022 supported",
|
||||
fields: fields{family: Amazon, release: "2022 (Amazon Linux)"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 2024 not found",
|
||||
fields: fields{family: Amazon, release: "2024 (Amazon Linux)"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//RHEL
|
||||
{
|
||||
name: "RHEL6 eol",
|
||||
fields: fields{family: RedHat, release: "6"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL7 supported",
|
||||
fields: fields{family: RedHat, release: "7"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL8 supported",
|
||||
fields: fields{family: RedHat, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL9 supported",
|
||||
fields: fields{family: RedHat, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL10 not found",
|
||||
fields: fields{family: RedHat, release: "10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//CentOS
|
||||
{
|
||||
name: "CentOS 6 eol",
|
||||
fields: fields{family: CentOS, release: "6"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS 7 supported",
|
||||
fields: fields{family: CentOS, release: "7"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS 8 supported",
|
||||
fields: fields{family: CentOS, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS stream8 supported",
|
||||
fields: fields{family: CentOS, release: "stream8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS stream9 supported",
|
||||
fields: fields{family: CentOS, release: "stream9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS stream10 Not Found",
|
||||
fields: fields{family: CentOS, release: "stream10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// Alma
|
||||
{
|
||||
name: "Alma Linux 8 supported",
|
||||
fields: fields{family: Alma, release: "8"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alma Linux 9 supported",
|
||||
fields: fields{family: Alma, release: "9"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alma Linux 10 Not Found",
|
||||
fields: fields{family: Alma, release: "10"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// Rocky
|
||||
{
|
||||
name: "Rocky Linux 8 supported",
|
||||
fields: fields{family: Rocky, release: "8"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Rocky Linux 9 supported",
|
||||
fields: fields{family: Rocky, release: "9"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Rocky Linux 10 Not Found",
|
||||
fields: fields{family: Rocky, release: "10"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//Oracle
|
||||
{
|
||||
name: "Oracle Linux 6 eol",
|
||||
fields: fields{family: Oracle, release: "6"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 7 supported",
|
||||
fields: fields{family: Oracle, release: "7"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 8 supported",
|
||||
fields: fields{family: Oracle, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 9 supported",
|
||||
fields: fields{family: Oracle, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 10 not found",
|
||||
fields: fields{family: Oracle, release: "10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//Ubuntu
|
||||
{
|
||||
name: "Ubuntu 12.10 not found",
|
||||
fields: fields{family: Ubuntu, release: "12.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
found: false,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 14.04 eol",
|
||||
fields: fields{family: Ubuntu, release: "14.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 14.10 eol",
|
||||
fields: fields{family: Ubuntu, release: "14.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 16.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "18.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 18.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "18.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 18.04 ext supported",
|
||||
fields: fields{family: Ubuntu, release: "18.04"},
|
||||
now: time.Date(2025, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 20.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "20.04"},
|
||||
now: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 20.04 ext supported",
|
||||
fields: fields{family: Ubuntu, release: "20.04"},
|
||||
now: time.Date(2025, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 20.10 supported",
|
||||
fields: fields{family: Ubuntu, release: "20.10"},
|
||||
now: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 21.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "21.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 21.10 supported",
|
||||
fields: fields{family: Ubuntu, release: "21.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 22.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "22.04"},
|
||||
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 22.10 supported",
|
||||
fields: fields{family: Ubuntu, release: "22.10"},
|
||||
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
//Debian
|
||||
{
|
||||
name: "Debian 9 supported",
|
||||
fields: fields{family: Debian, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 10 supported",
|
||||
fields: fields{family: Debian, release: "10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 8 supported",
|
||||
fields: fields{family: Debian, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 11 supported",
|
||||
fields: fields{family: Debian, release: "11"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 12 is not supported yet",
|
||||
fields: fields{family: Debian, release: "12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//alpine
|
||||
{
|
||||
name: "alpine 3.10 supported",
|
||||
fields: fields{family: Alpine, release: "3.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.11 supported",
|
||||
fields: fields{family: Alpine, release: "3.11"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.12 supported",
|
||||
fields: fields{family: Alpine, release: "3.12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.9 eol",
|
||||
fields: fields{family: Alpine, release: "3.9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.14 supported",
|
||||
fields: fields{family: Alpine, release: "3.14"},
|
||||
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.15 supported",
|
||||
fields: fields{family: Alpine, release: "3.15"},
|
||||
now: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.16 supported",
|
||||
fields: fields{family: Alpine, release: "3.16"},
|
||||
now: time.Date(2024, 5, 23, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.17 not found",
|
||||
fields: fields{family: Alpine, release: "3.17"},
|
||||
now: time.Date(2022, 1, 14, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// freebsd
|
||||
{
|
||||
name: "freebsd 11 supported",
|
||||
fields: fields{family: FreeBSD, release: "11"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 11 eol on 2021-9-30",
|
||||
fields: fields{family: FreeBSD, release: "11"},
|
||||
now: time.Date(2021, 10, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 12 supported",
|
||||
fields: fields{family: FreeBSD, release: "12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 13 supported",
|
||||
fields: fields{family: FreeBSD, release: "13"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 10 eol",
|
||||
fields: fields{family: FreeBSD, release: "10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
// Fedora
|
||||
{
|
||||
name: "Fedora 32 supported",
|
||||
fields: fields{family: Fedora, release: "32"},
|
||||
now: time.Date(2021, 5, 25, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 32 eol on 2021-5-25",
|
||||
fields: fields{family: Fedora, release: "32"},
|
||||
now: time.Date(2021, 5, 26, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 33 supported",
|
||||
fields: fields{family: Fedora, release: "33"},
|
||||
now: time.Date(2021, 11, 30, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 33 eol on 2021-5-26",
|
||||
fields: fields{family: Fedora, release: "32"},
|
||||
now: time.Date(2021, 5, 27, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 34 supported",
|
||||
fields: fields{family: Fedora, release: "34"},
|
||||
now: time.Date(2022, 5, 17, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 32 eol on 2022-5-17",
|
||||
fields: fields{family: Fedora, release: "34"},
|
||||
now: time.Date(2022, 5, 18, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 35 supported",
|
||||
fields: fields{family: Fedora, release: "35"},
|
||||
now: time.Date(2022, 12, 7, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 35 eol on 2022-12-7",
|
||||
fields: fields{family: Fedora, release: "35"},
|
||||
now: time.Date(2022, 12, 8, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
eol, found := GetEOL(tt.fields.family, tt.fields.release)
|
||||
if found != tt.found {
|
||||
t.Errorf("GetEOL.found = %v, want %v", found, tt.found)
|
||||
}
|
||||
if found {
|
||||
if got := eol.IsStandardSupportEnded(tt.now); got != tt.stdEnded {
|
||||
t.Errorf("EOL.IsStandardSupportEnded() = %v, want %v", got, tt.stdEnded)
|
||||
}
|
||||
if got := eol.IsExtendedSuppportEnded(tt.now); got != tt.extEnded {
|
||||
t.Errorf("EOL.IsExtendedSupportEnded() = %v, want %v", got, tt.extEnded)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_majorDotMinor(t *testing.T) {
|
||||
type args struct {
|
||||
osVer string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantMajorDotMinor string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
args: args{
|
||||
osVer: "",
|
||||
},
|
||||
wantMajorDotMinor: "",
|
||||
},
|
||||
{
|
||||
name: "major",
|
||||
args: args{
|
||||
osVer: "3",
|
||||
},
|
||||
wantMajorDotMinor: "3",
|
||||
},
|
||||
{
|
||||
name: "major dot minor",
|
||||
args: args{
|
||||
osVer: "3.1",
|
||||
},
|
||||
wantMajorDotMinor: "3.1",
|
||||
},
|
||||
{
|
||||
name: "major dot minor dot release",
|
||||
args: args{
|
||||
osVer: "3.1.4",
|
||||
},
|
||||
wantMajorDotMinor: "3.1",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if gotMajorDotMinor := majorDotMinor(tt.args.osVer); gotMajorDotMinor != tt.wantMajorDotMinor {
|
||||
t.Errorf("majorDotMinor() = %v, want %v", gotMajorDotMinor, tt.wantMajorDotMinor)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,222 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// PortScanConf is the setting for using an external port scanner
|
||||
type PortScanConf struct {
|
||||
IsUseExternalScanner bool `toml:"-" json:"-"`
|
||||
|
||||
// Path to external scanner
|
||||
ScannerBinPath string `toml:"scannerBinPath,omitempty" json:"scannerBinPath,omitempty"`
|
||||
|
||||
// set user has privileged
|
||||
HasPrivileged bool `toml:"hasPrivileged,omitempty" json:"hasPrivileged,omitempty"`
|
||||
|
||||
// set the ScanTechniques for ScannerBinPath
|
||||
ScanTechniques []string `toml:"scanTechniques,omitempty" json:"scanTechniques,omitempty"`
|
||||
|
||||
// set the FIREWALL/IDS EVASION AND SPOOFING(Use given port number)
|
||||
SourcePort string `toml:"sourcePort,omitempty" json:"sourcePort,omitempty"`
|
||||
}
|
||||
|
||||
// ScanTechnique is implemented to represent the supported ScanTechniques in an Enum.
|
||||
type ScanTechnique int
|
||||
|
||||
const (
|
||||
// NotSupportTechnique is a ScanTechnique that is currently not supported.
|
||||
NotSupportTechnique ScanTechnique = iota
|
||||
// TCPSYN is SYN scan
|
||||
TCPSYN
|
||||
// TCPConnect is TCP connect scan
|
||||
TCPConnect
|
||||
// TCPACK is ACK scan
|
||||
TCPACK
|
||||
// TCPWindow is Window scan
|
||||
TCPWindow
|
||||
// TCPMaimon is Maimon scan
|
||||
TCPMaimon
|
||||
// TCPNull is Null scan
|
||||
TCPNull
|
||||
// TCPFIN is FIN scan
|
||||
TCPFIN
|
||||
// TCPXmas is Xmas scan
|
||||
TCPXmas
|
||||
)
|
||||
|
||||
var scanTechniqueMap = map[ScanTechnique]string{
|
||||
TCPSYN: "sS",
|
||||
TCPConnect: "sT",
|
||||
TCPACK: "sA",
|
||||
TCPWindow: "sW",
|
||||
TCPMaimon: "sM",
|
||||
TCPNull: "sN",
|
||||
TCPFIN: "sF",
|
||||
TCPXmas: "sX",
|
||||
}
|
||||
|
||||
func (s ScanTechnique) String() string {
|
||||
switch s {
|
||||
case TCPSYN:
|
||||
return "TCPSYN"
|
||||
case TCPConnect:
|
||||
return "TCPConnect"
|
||||
case TCPACK:
|
||||
return "TCPACK"
|
||||
case TCPWindow:
|
||||
return "TCPWindow"
|
||||
case TCPMaimon:
|
||||
return "TCPMaimon"
|
||||
case TCPNull:
|
||||
return "TCPNull"
|
||||
case TCPFIN:
|
||||
return "TCPFIN"
|
||||
case TCPXmas:
|
||||
return "TCPXmas"
|
||||
default:
|
||||
return "NotSupportTechnique"
|
||||
}
|
||||
}
|
||||
|
||||
// GetScanTechniques converts ScanTechniques loaded from config.toml to []scanTechniques.
|
||||
func (c *PortScanConf) GetScanTechniques() []ScanTechnique {
|
||||
if len(c.ScanTechniques) == 0 {
|
||||
return []ScanTechnique{}
|
||||
}
|
||||
|
||||
scanTechniques := []ScanTechnique{}
|
||||
for _, technique := range c.ScanTechniques {
|
||||
findScanTechniqueFlag := false
|
||||
for key, value := range scanTechniqueMap {
|
||||
if strings.EqualFold(value, technique) {
|
||||
scanTechniques = append(scanTechniques, key)
|
||||
findScanTechniqueFlag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !findScanTechniqueFlag {
|
||||
scanTechniques = append(scanTechniques, NotSupportTechnique)
|
||||
}
|
||||
}
|
||||
|
||||
if len(scanTechniques) == 0 {
|
||||
return []ScanTechnique{NotSupportTechnique}
|
||||
}
|
||||
return scanTechniques
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *PortScanConf) Validate() (errs []error) {
|
||||
if !c.IsUseExternalScanner {
|
||||
if c.IsZero() {
|
||||
return
|
||||
}
|
||||
errs = append(errs, xerrors.New("To enable the PortScan option, ScannerBinPath must be set."))
|
||||
}
|
||||
|
||||
if _, err := os.Stat(c.ScannerBinPath); err != nil {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"scanner is not found. ScannerBinPath: %s not exists", c.ScannerBinPath))
|
||||
}
|
||||
|
||||
scanTechniques := c.GetScanTechniques()
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique == NotSupportTechnique {
|
||||
errs = append(errs, xerrors.New("There is an unsupported option in ScanTechniques."))
|
||||
}
|
||||
}
|
||||
|
||||
// It does not currently support multiple ScanTechniques.
|
||||
// But if it supports UDP scanning, it will need to accept multiple ScanTechniques.
|
||||
if len(scanTechniques) > 1 {
|
||||
errs = append(errs, xerrors.New("Currently multiple ScanTechniques are not supported."))
|
||||
}
|
||||
|
||||
if c.HasPrivileged {
|
||||
if os.Geteuid() != 0 {
|
||||
output, err := exec.Command("getcap", c.ScannerBinPath).Output()
|
||||
if err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Failed to check capability of %s. error message: %w", c.ScannerBinPath, err))
|
||||
} else {
|
||||
parseOutput := strings.SplitN(string(output), "=", 2)
|
||||
if len(parseOutput) != 2 {
|
||||
errs = append(errs, xerrors.Errorf("Failed to parse getcap outputs. please execute this command: `$ getcap %s`. If the following string (`/usr/bin/nmap = ... `) is not displayed, you need to set the capability with the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", c.ScannerBinPath, c.ScannerBinPath))
|
||||
} else {
|
||||
parseCapability := strings.Split(strings.TrimSpace(parseOutput[1]), "+")
|
||||
capabilities := strings.Split(parseCapability[0], ",")
|
||||
for _, needCap := range []string{"cap_net_bind_service", "cap_net_admin", "cap_net_raw"} {
|
||||
existCapFlag := false
|
||||
for _, cap := range capabilities {
|
||||
if needCap == cap {
|
||||
existCapFlag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if existCapFlag {
|
||||
continue
|
||||
}
|
||||
|
||||
errs = append(errs, xerrors.Errorf("Not enough capability to execute. needs: ['cap_net_bind_service', 'cap_net_admin', 'cap_net_raw'], actual: %s. To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", capabilities, c.ScannerBinPath))
|
||||
break
|
||||
}
|
||||
|
||||
if parseCapability[1] != "eip" {
|
||||
errs = append(errs, xerrors.Errorf("Capability(`cap_net_bind_service,cap_net_admin,cap_net_raw`) must belong to the following capability set(need: eip, actual: %s). To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", parseCapability[1], c.ScannerBinPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !c.HasPrivileged {
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique != TCPConnect && scanTechnique != NotSupportTechnique {
|
||||
errs = append(errs, xerrors.New("If not privileged, only TCPConnect Scan(-sT) can be used."))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.SourcePort != "" {
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique == TCPConnect {
|
||||
errs = append(errs, xerrors.New("SourcePort Option(-g/--source-port) is incompatible with the default TCPConnect Scan(-sT)."))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
portNumber, err := strconv.Atoi(c.SourcePort)
|
||||
if err != nil {
|
||||
errs = append(errs, xerrors.Errorf("SourcePort conversion failed. %w", err))
|
||||
} else {
|
||||
if portNumber < 0 || 65535 < portNumber {
|
||||
errs = append(errs, xerrors.Errorf("SourcePort(%s) must be between 0 and 65535.", c.SourcePort))
|
||||
}
|
||||
|
||||
if portNumber == 0 {
|
||||
errs = append(errs, xerrors.New("SourcePort(0) may not work on all systems."))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// IsZero return whether this struct is not specified in config.toml
|
||||
func (c PortScanConf) IsZero() bool {
|
||||
return c.ScannerBinPath == "" && !c.HasPrivileged && len(c.ScanTechniques) == 0 && c.SourcePort == ""
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPortScanConf_getScanTechniques(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
techniques []string
|
||||
want []ScanTechnique
|
||||
}{
|
||||
{
|
||||
name: "nil",
|
||||
techniques: []string{},
|
||||
want: []ScanTechnique{},
|
||||
},
|
||||
{
|
||||
name: "single",
|
||||
techniques: []string{"sS"},
|
||||
want: []ScanTechnique{TCPSYN},
|
||||
},
|
||||
{
|
||||
name: "multiple",
|
||||
techniques: []string{"sS", "sT"},
|
||||
want: []ScanTechnique{TCPSYN, TCPConnect},
|
||||
},
|
||||
{
|
||||
name: "unknown",
|
||||
techniques: []string{"sU"},
|
||||
want: []ScanTechnique{NotSupportTechnique},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
c := PortScanConf{ScanTechniques: tt.techniques}
|
||||
if got := c.GetScanTechniques(); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("PortScanConf.getScanTechniques() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPortScanConf_IsZero(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
conf PortScanConf
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "not zero",
|
||||
conf: PortScanConf{ScannerBinPath: "/usr/bin/nmap"},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "zero",
|
||||
conf: PortScanConf{},
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.conf.IsZero(); got != tt.want {
|
||||
t.Errorf("PortScanConf.IsZero() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SaasConf is FutureVuls config
|
||||
type SaasConf struct {
|
||||
GroupID int64 `json:"-"`
|
||||
Token string `json:"-"`
|
||||
URL string `json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SaasConf) Validate() (errs []error) {
|
||||
if c.GroupID == 0 {
|
||||
errs = append(errs, xerrors.New("GroupID must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.Token) == 0 {
|
||||
errs = append(errs, xerrors.New("Token must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.URL) == 0 {
|
||||
errs = append(errs, xerrors.New("URL must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ScanMode has a type of scan mode. fast, fast-root, deep and offline
|
||||
type ScanMode struct {
|
||||
flag byte
|
||||
}
|
||||
|
||||
const (
|
||||
// Fast is fast scan mode
|
||||
Fast = byte(1 << iota)
|
||||
// FastRoot is scanmode
|
||||
FastRoot
|
||||
// Deep is scanmode
|
||||
Deep
|
||||
// Offline is scanmode
|
||||
Offline
|
||||
|
||||
fastStr = "fast"
|
||||
fastRootStr = "fast-root"
|
||||
deepStr = "deep"
|
||||
offlineStr = "offline"
|
||||
)
|
||||
|
||||
// Set mode
|
||||
func (s *ScanMode) Set(f byte) {
|
||||
s.flag |= f
|
||||
}
|
||||
|
||||
// IsFast return whether scan mode is fast
|
||||
func (s ScanMode) IsFast() bool {
|
||||
return s.flag&Fast == Fast
|
||||
}
|
||||
|
||||
// IsFastRoot return whether scan mode is fastroot
|
||||
func (s ScanMode) IsFastRoot() bool {
|
||||
return s.flag&FastRoot == FastRoot
|
||||
}
|
||||
|
||||
// IsDeep return whether scan mode is deep
|
||||
func (s ScanMode) IsDeep() bool {
|
||||
return s.flag&Deep == Deep
|
||||
}
|
||||
|
||||
// IsOffline return whether scan mode is offline
|
||||
func (s ScanMode) IsOffline() bool {
|
||||
return s.flag&Offline == Offline
|
||||
}
|
||||
|
||||
func (s *ScanMode) ensure() error {
|
||||
numTrue := 0
|
||||
for _, b := range []bool{s.IsFast(), s.IsFastRoot(), s.IsDeep()} {
|
||||
if b {
|
||||
numTrue++
|
||||
}
|
||||
}
|
||||
if numTrue == 0 {
|
||||
s.Set(Fast)
|
||||
} else if s.IsDeep() && s.IsOffline() {
|
||||
return xerrors.New("Don't specify both of deep and offline")
|
||||
} else if numTrue != 1 {
|
||||
return xerrors.New("Specify only one of offline, fast, fast-root or deep")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s ScanMode) String() string {
|
||||
ss := ""
|
||||
if s.IsFast() {
|
||||
ss = fastStr
|
||||
} else if s.IsFastRoot() {
|
||||
ss = fastRootStr
|
||||
} else if s.IsDeep() {
|
||||
ss = deepStr
|
||||
}
|
||||
if s.IsOffline() {
|
||||
ss += " " + offlineStr
|
||||
}
|
||||
return ss + " mode"
|
||||
}
|
||||
|
||||
func setScanMode(server *ServerInfo) error {
|
||||
if len(server.ScanMode) == 0 {
|
||||
server.ScanMode = Conf.Default.ScanMode
|
||||
}
|
||||
for _, m := range server.ScanMode {
|
||||
switch strings.ToLower(m) {
|
||||
case fastStr:
|
||||
server.Mode.Set(Fast)
|
||||
case fastRootStr:
|
||||
server.Mode.Set(FastRoot)
|
||||
case deepStr:
|
||||
server.Mode.Set(Deep)
|
||||
case offlineStr:
|
||||
server.Mode.Set(Offline)
|
||||
default:
|
||||
return xerrors.Errorf("scanMode: %s of %s is invalid. Specify -fast, -fast-root, -deep or offline",
|
||||
m, server.ServerName)
|
||||
}
|
||||
}
|
||||
if err := server.Mode.ensure(); err != nil {
|
||||
return xerrors.Errorf("%s in %s", err, server.ServerName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ScanModule has a type of scan module
|
||||
type ScanModule struct {
|
||||
flag byte
|
||||
}
|
||||
|
||||
const (
|
||||
// OSPkg is scanmodule
|
||||
OSPkg = byte(1 << iota)
|
||||
// WordPress is scanmodule
|
||||
WordPress
|
||||
// Lockfile is scanmodule
|
||||
Lockfile
|
||||
// Port is scanmodule
|
||||
Port
|
||||
|
||||
osPkgStr = "ospkg"
|
||||
wordPressStr = "wordpress"
|
||||
lockfileStr = "lockfile"
|
||||
portStr = "port"
|
||||
)
|
||||
|
||||
var allModules = []string{osPkgStr, wordPressStr, lockfileStr, portStr}
|
||||
|
||||
// Set module
|
||||
func (s *ScanModule) Set(f byte) {
|
||||
s.flag |= f
|
||||
}
|
||||
|
||||
// IsScanOSPkg return whether scanning os pkg
|
||||
func (s ScanModule) IsScanOSPkg() bool {
|
||||
return s.flag&OSPkg == OSPkg
|
||||
}
|
||||
|
||||
// IsScanWordPress return whether scanning wordpress
|
||||
func (s ScanModule) IsScanWordPress() bool {
|
||||
return s.flag&WordPress == WordPress
|
||||
}
|
||||
|
||||
// IsScanLockFile whether scanning lock file
|
||||
func (s ScanModule) IsScanLockFile() bool {
|
||||
return s.flag&Lockfile == Lockfile
|
||||
}
|
||||
|
||||
// IsScanPort whether scanning listening ports
|
||||
func (s ScanModule) IsScanPort() bool {
|
||||
return s.flag&Port == Port
|
||||
}
|
||||
|
||||
// IsZero return the struct value are all false
|
||||
func (s ScanModule) IsZero() bool {
|
||||
return !(s.IsScanOSPkg() || s.IsScanWordPress() || s.IsScanLockFile() || s.IsScanPort())
|
||||
}
|
||||
|
||||
func (s *ScanModule) ensure() error {
|
||||
if s.IsZero() {
|
||||
s.Set(OSPkg)
|
||||
s.Set(WordPress)
|
||||
s.Set(Lockfile)
|
||||
s.Set(Port)
|
||||
} else if !s.IsScanOSPkg() && s.IsScanPort() {
|
||||
return xerrors.New("When specifying the Port, Specify OSPkg as well")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setScanModules(server *ServerInfo, d ServerInfo) error {
|
||||
if len(server.ScanModules) == 0 {
|
||||
server.ScanModules = d.ScanModules
|
||||
}
|
||||
for _, m := range server.ScanModules {
|
||||
switch strings.ToLower(m) {
|
||||
case osPkgStr:
|
||||
server.Module.Set(OSPkg)
|
||||
case wordPressStr:
|
||||
server.Module.Set(WordPress)
|
||||
case lockfileStr:
|
||||
server.Module.Set(Lockfile)
|
||||
case portStr:
|
||||
server.Module.Set(Port)
|
||||
default:
|
||||
return xerrors.Errorf("scanMode: %s of %s is invalid. Specify %s",
|
||||
m, server.ServerName, allModules)
|
||||
}
|
||||
}
|
||||
if err := server.Module.ensure(); err != nil {
|
||||
return xerrors.Errorf("%s in %s", err, server.ServerName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestScanModule_IsZero(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
modes []byte
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "not zero",
|
||||
modes: []byte{OSPkg},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "zero",
|
||||
modes: []byte{},
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := ScanModule{}
|
||||
for _, b := range tt.modes {
|
||||
s.Set(b)
|
||||
}
|
||||
if got := s.IsZero(); got != tt.want {
|
||||
t.Errorf("ScanModule.IsZero() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestScanModule_validate(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
modes []byte
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "valid",
|
||||
modes: []byte{},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "err",
|
||||
modes: []byte{WordPress, Lockfile, Port},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := ScanModule{}
|
||||
for _, b := range tt.modes {
|
||||
s.Set(b)
|
||||
}
|
||||
if err := s.ensure(); (err != nil) != tt.wantErr {
|
||||
t.Errorf("ScanModule.validate() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SlackConf is slack config
|
||||
type SlackConf struct {
|
||||
HookURL string `valid:"url" json:"-" toml:"hookURL,omitempty"`
|
||||
LegacyToken string `json:"-" toml:"legacyToken,omitempty"`
|
||||
Channel string `json:"-" toml:"channel,omitempty"`
|
||||
IconEmoji string `json:"-" toml:"iconEmoji,omitempty"`
|
||||
AuthUser string `json:"-" toml:"authUser,omitempty"`
|
||||
NotifyUsers []string `toml:"notifyUsers,omitempty" json:"-"`
|
||||
Text string `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SlackConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
|
||||
if len(c.HookURL) == 0 && len(c.LegacyToken) == 0 {
|
||||
errs = append(errs, xerrors.New("slack.hookURL or slack.LegacyToken must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.Channel) == 0 {
|
||||
errs = append(errs, xerrors.New("slack.channel must not be empty"))
|
||||
} else {
|
||||
if !(strings.HasPrefix(c.Channel, "#") ||
|
||||
c.Channel == "${servername}") {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"channel's prefix must be '#', channel: %s", c.Channel))
|
||||
}
|
||||
}
|
||||
|
||||
if len(c.AuthUser) == 0 {
|
||||
errs = append(errs, xerrors.New("slack.authUser must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SMTPConf is smtp config
|
||||
type SMTPConf struct {
|
||||
SMTPAddr string `toml:"smtpAddr,omitempty" json:"-"`
|
||||
SMTPPort string `toml:"smtpPort,omitempty" valid:"port" json:"-"`
|
||||
User string `toml:"user,omitempty" json:"-"`
|
||||
Password string `toml:"password,omitempty" json:"-"`
|
||||
From string `toml:"from,omitempty" json:"-"`
|
||||
To []string `toml:"to,omitempty" json:"-"`
|
||||
Cc []string `toml:"cc,omitempty" json:"-"`
|
||||
SubjectPrefix string `toml:"subjectPrefix,omitempty" json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func checkEmails(emails []string) (errs []error) {
|
||||
for _, addr := range emails {
|
||||
if len(addr) == 0 {
|
||||
return
|
||||
}
|
||||
if ok := govalidator.IsEmail(addr); !ok {
|
||||
errs = append(errs, xerrors.Errorf("Invalid email address. email: %s", addr))
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Validate SMTP configuration
|
||||
func (c *SMTPConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
emails := []string{}
|
||||
emails = append(emails, c.From)
|
||||
emails = append(emails, c.To...)
|
||||
emails = append(emails, c.Cc...)
|
||||
|
||||
if emailErrs := checkEmails(emails); 0 < len(emailErrs) {
|
||||
errs = append(errs, emailErrs...)
|
||||
}
|
||||
|
||||
if c.SMTPAddr == "" {
|
||||
errs = append(errs, xerrors.New("email.smtpAddr must not be empty"))
|
||||
}
|
||||
if c.SMTPPort == "" {
|
||||
errs = append(errs, xerrors.New("email.smtpPort must not be empty"))
|
||||
}
|
||||
if len(c.To) == 0 {
|
||||
errs = append(errs, xerrors.New("email.To required at least one address"))
|
||||
}
|
||||
if len(c.From) == 0 {
|
||||
errs = append(errs, xerrors.New("email.From required at least one address"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log/syslog"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SyslogConf is syslog config
|
||||
type SyslogConf struct {
|
||||
Protocol string `json:"-"`
|
||||
Host string `valid:"host" json:"-"`
|
||||
Port string `valid:"port" json:"-"`
|
||||
Severity string `json:"-"`
|
||||
Facility string `json:"-"`
|
||||
Tag string `json:"-"`
|
||||
Verbose bool `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SyslogConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return nil
|
||||
}
|
||||
// If protocol is empty, it will connect to the local syslog server.
|
||||
if len(c.Protocol) > 0 && c.Protocol != "tcp" && c.Protocol != "udp" {
|
||||
errs = append(errs, errors.New(`syslog.protocol must be "tcp" or "udp"`))
|
||||
}
|
||||
|
||||
// Default port: 514
|
||||
if c.Port == "" {
|
||||
c.Port = "514"
|
||||
}
|
||||
|
||||
if _, err := c.GetSeverity(); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if _, err := c.GetFacility(); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
// GetSeverity gets severity
|
||||
func (c *SyslogConf) GetSeverity() (syslog.Priority, error) {
|
||||
if c.Severity == "" {
|
||||
return syslog.LOG_INFO, nil
|
||||
}
|
||||
|
||||
switch c.Severity {
|
||||
case "emerg":
|
||||
return syslog.LOG_EMERG, nil
|
||||
case "alert":
|
||||
return syslog.LOG_ALERT, nil
|
||||
case "crit":
|
||||
return syslog.LOG_CRIT, nil
|
||||
case "err":
|
||||
return syslog.LOG_ERR, nil
|
||||
case "warning":
|
||||
return syslog.LOG_WARNING, nil
|
||||
case "notice":
|
||||
return syslog.LOG_NOTICE, nil
|
||||
case "info":
|
||||
return syslog.LOG_INFO, nil
|
||||
case "debug":
|
||||
return syslog.LOG_DEBUG, nil
|
||||
default:
|
||||
return -1, xerrors.Errorf("Invalid severity: %s", c.Severity)
|
||||
}
|
||||
}
|
||||
|
||||
// GetFacility gets facility
|
||||
func (c *SyslogConf) GetFacility() (syslog.Priority, error) {
|
||||
if c.Facility == "" {
|
||||
return syslog.LOG_AUTH, nil
|
||||
}
|
||||
|
||||
switch c.Facility {
|
||||
case "kern":
|
||||
return syslog.LOG_KERN, nil
|
||||
case "user":
|
||||
return syslog.LOG_USER, nil
|
||||
case "mail":
|
||||
return syslog.LOG_MAIL, nil
|
||||
case "daemon":
|
||||
return syslog.LOG_DAEMON, nil
|
||||
case "auth":
|
||||
return syslog.LOG_AUTH, nil
|
||||
case "syslog":
|
||||
return syslog.LOG_SYSLOG, nil
|
||||
case "lpr":
|
||||
return syslog.LOG_LPR, nil
|
||||
case "news":
|
||||
return syslog.LOG_NEWS, nil
|
||||
case "uucp":
|
||||
return syslog.LOG_UUCP, nil
|
||||
case "cron":
|
||||
return syslog.LOG_CRON, nil
|
||||
case "authpriv":
|
||||
return syslog.LOG_AUTHPRIV, nil
|
||||
case "ftp":
|
||||
return syslog.LOG_FTP, nil
|
||||
case "local0":
|
||||
return syslog.LOG_LOCAL0, nil
|
||||
case "local1":
|
||||
return syslog.LOG_LOCAL1, nil
|
||||
case "local2":
|
||||
return syslog.LOG_LOCAL2, nil
|
||||
case "local3":
|
||||
return syslog.LOG_LOCAL3, nil
|
||||
case "local4":
|
||||
return syslog.LOG_LOCAL4, nil
|
||||
case "local5":
|
||||
return syslog.LOG_LOCAL5, nil
|
||||
case "local6":
|
||||
return syslog.LOG_LOCAL6, nil
|
||||
case "local7":
|
||||
return syslog.LOG_LOCAL7, nil
|
||||
default:
|
||||
return -1, xerrors.Errorf("Invalid facility: %s", c.Facility)
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// TelegramConf is Telegram config
|
||||
type TelegramConf struct {
|
||||
Token string `json:"-"`
|
||||
ChatID string `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *TelegramConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.ChatID) == 0 {
|
||||
errs = append(errs, xerrors.New("TelegramConf.ChatID must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.Token) == 0 {
|
||||
errs = append(errs, xerrors.New("TelegramConf.Token must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,328 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/c-robinson/iplib"
|
||||
"github.com/knqyf263/go-cpe/naming"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
// TOMLLoader loads config
|
||||
type TOMLLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration TOML file specified by path arg.
|
||||
func (c TOMLLoader) Load(pathToToml string) error {
|
||||
// util.Log.Infof("Loading config: %s", pathToToml)
|
||||
if _, err := toml.DecodeFile(pathToToml, &Conf); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, cnf := range []VulnDictInterface{
|
||||
&Conf.CveDict,
|
||||
&Conf.OvalDict,
|
||||
&Conf.Gost,
|
||||
&Conf.Exploit,
|
||||
&Conf.Metasploit,
|
||||
&Conf.KEVuln,
|
||||
&Conf.Cti,
|
||||
} {
|
||||
cnf.Init()
|
||||
}
|
||||
|
||||
index := 0
|
||||
servers := map[string]ServerInfo{}
|
||||
for name, server := range Conf.Servers {
|
||||
server.BaseName = name
|
||||
|
||||
if server.Type != constant.ServerTypePseudo && server.Host == "" {
|
||||
return xerrors.New("Failed to find hosts. err: server.host is empty")
|
||||
}
|
||||
serverHosts, err := hosts(server.Host, server.IgnoreIPAddresses)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to find hosts. err: %w", err)
|
||||
}
|
||||
if len(serverHosts) == 0 {
|
||||
return xerrors.New("Failed to find hosts. err: zero enumerated hosts")
|
||||
}
|
||||
|
||||
if err := setDefaultIfEmpty(&server); err != nil {
|
||||
return xerrors.Errorf("Failed to set default value to config. server: %s, err: %w", name, err)
|
||||
}
|
||||
|
||||
if err := setScanMode(&server); err != nil {
|
||||
return xerrors.Errorf("Failed to set ScanMode: %w", err)
|
||||
}
|
||||
|
||||
if err := setScanModules(&server, Conf.Default); err != nil {
|
||||
return xerrors.Errorf("Failed to set ScanModule: %w", err)
|
||||
}
|
||||
|
||||
if len(server.CpeNames) == 0 {
|
||||
server.CpeNames = Conf.Default.CpeNames
|
||||
}
|
||||
for i, n := range server.CpeNames {
|
||||
uri, err := toCpeURI(n)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse CPENames %s in %s, err: %w", n, name, err)
|
||||
}
|
||||
server.CpeNames[i] = uri
|
||||
}
|
||||
|
||||
for _, cve := range Conf.Default.IgnoreCves {
|
||||
found := false
|
||||
for _, c := range server.IgnoreCves {
|
||||
if cve == c {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
server.IgnoreCves = append(server.IgnoreCves, cve)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range Conf.Default.IgnorePkgsRegexp {
|
||||
found := false
|
||||
for _, p := range server.IgnorePkgsRegexp {
|
||||
if pkg == p {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
server.IgnorePkgsRegexp = append(server.IgnorePkgsRegexp, pkg)
|
||||
}
|
||||
}
|
||||
for _, reg := range server.IgnorePkgsRegexp {
|
||||
_, err := regexp.Compile(reg)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse %s in %s. err: %w", reg, name, err)
|
||||
}
|
||||
}
|
||||
for contName, cont := range server.Containers {
|
||||
for _, reg := range cont.IgnorePkgsRegexp {
|
||||
_, err := regexp.Compile(reg)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse %s in %s@%s. err: %w", reg, contName, name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ownerRepo, githubSetting := range server.GitHubRepos {
|
||||
if ss := strings.Split(ownerRepo, "/"); len(ss) != 2 {
|
||||
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s in %s", ownerRepo, name)
|
||||
}
|
||||
if githubSetting.Token == "" {
|
||||
return xerrors.Errorf("GitHub owner/repo: %s in %s token is empty", ownerRepo, name)
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.Enablerepo) == 0 {
|
||||
server.Enablerepo = Conf.Default.Enablerepo
|
||||
}
|
||||
if len(server.Enablerepo) != 0 {
|
||||
for _, repo := range server.Enablerepo {
|
||||
switch repo {
|
||||
case "base", "updates":
|
||||
// nop
|
||||
default:
|
||||
return xerrors.Errorf("For now, enablerepo have to be base or updates: %s", server.Enablerepo)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan.ScannerBinPath != "" {
|
||||
server.PortScan.IsUseExternalScanner = true
|
||||
}
|
||||
|
||||
if !isCIDRNotation(server.Host) {
|
||||
server.ServerName = name
|
||||
servers[server.ServerName] = server
|
||||
continue
|
||||
}
|
||||
for _, host := range serverHosts {
|
||||
server.Host = host
|
||||
server.ServerName = fmt.Sprintf("%s(%s)", name, host)
|
||||
server.LogMsgAnsiColor = Colors[index%len(Colors)]
|
||||
index++
|
||||
servers[server.ServerName] = server
|
||||
}
|
||||
}
|
||||
Conf.Servers = servers
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func hosts(host string, ignores []string) ([]string, error) {
|
||||
hostMap := map[string]struct{}{}
|
||||
hosts, err := enumerateHosts(host)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to enumarate hosts. err: %w", err)
|
||||
}
|
||||
for _, host := range hosts {
|
||||
hostMap[host] = struct{}{}
|
||||
}
|
||||
|
||||
for _, ignore := range ignores {
|
||||
hosts, err := enumerateHosts(ignore)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to enumarate hosts. err: %w", err)
|
||||
}
|
||||
if len(hosts) == 1 && net.ParseIP(hosts[0]) == nil {
|
||||
return nil, xerrors.Errorf("Failed to ignore hosts. err: a non-IP address has been entered in ignoreIPAddress")
|
||||
}
|
||||
for _, host := range hosts {
|
||||
delete(hostMap, host)
|
||||
}
|
||||
}
|
||||
|
||||
hosts = []string{}
|
||||
for host := range hostMap {
|
||||
hosts = append(hosts, host)
|
||||
}
|
||||
return hosts, nil
|
||||
}
|
||||
|
||||
func enumerateHosts(host string) ([]string, error) {
|
||||
if !isCIDRNotation(host) {
|
||||
return []string{host}, nil
|
||||
}
|
||||
|
||||
ipAddr, ipNet, err := net.ParseCIDR(host)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to parse CIDR. err: %w", err)
|
||||
}
|
||||
maskLen, _ := ipNet.Mask.Size()
|
||||
|
||||
addrs := []string{}
|
||||
if net.ParseIP(ipAddr.String()).To4() != nil {
|
||||
n := iplib.NewNet4(ipAddr, int(maskLen))
|
||||
for _, addr := range n.Enumerate(int(n.Count()), 0) {
|
||||
addrs = append(addrs, addr.String())
|
||||
}
|
||||
} else if net.ParseIP(ipAddr.String()).To16() != nil {
|
||||
n := iplib.NewNet6(ipAddr, int(maskLen), 0)
|
||||
if !n.Count().IsInt64() {
|
||||
return nil, xerrors.Errorf("Failed to enumerate IP address. err: mask bitsize too big")
|
||||
}
|
||||
for _, addr := range n.Enumerate(int(n.Count().Int64()), 0) {
|
||||
addrs = append(addrs, addr.String())
|
||||
}
|
||||
}
|
||||
return addrs, nil
|
||||
}
|
||||
|
||||
func isCIDRNotation(host string) bool {
|
||||
ss := strings.Split(host, "/")
|
||||
if len(ss) == 1 || net.ParseIP(ss[0]) == nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func setDefaultIfEmpty(server *ServerInfo) error {
|
||||
if server.Type != constant.ServerTypePseudo {
|
||||
if len(server.JumpServer) == 0 {
|
||||
server.JumpServer = Conf.Default.JumpServer
|
||||
}
|
||||
|
||||
if server.Port == "" {
|
||||
server.Port = Conf.Default.Port
|
||||
}
|
||||
|
||||
if server.User == "" {
|
||||
server.User = Conf.Default.User
|
||||
}
|
||||
|
||||
if server.SSHConfigPath == "" {
|
||||
server.SSHConfigPath = Conf.Default.SSHConfigPath
|
||||
}
|
||||
|
||||
if server.KeyPath == "" {
|
||||
server.KeyPath = Conf.Default.KeyPath
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.Lockfiles) == 0 {
|
||||
server.Lockfiles = Conf.Default.Lockfiles
|
||||
}
|
||||
|
||||
if len(server.ContainersIncluded) == 0 {
|
||||
server.ContainersIncluded = Conf.Default.ContainersIncluded
|
||||
}
|
||||
|
||||
if len(server.ContainersExcluded) == 0 {
|
||||
server.ContainersExcluded = Conf.Default.ContainersExcluded
|
||||
}
|
||||
|
||||
if server.ContainerType == "" {
|
||||
server.ContainerType = Conf.Default.ContainerType
|
||||
}
|
||||
|
||||
for contName, cont := range server.Containers {
|
||||
cont.IgnoreCves = append(cont.IgnoreCves, Conf.Default.IgnoreCves...)
|
||||
server.Containers[contName] = cont
|
||||
}
|
||||
|
||||
if server.OwaspDCXMLPath == "" {
|
||||
server.OwaspDCXMLPath = Conf.Default.OwaspDCXMLPath
|
||||
}
|
||||
|
||||
if server.Memo == "" {
|
||||
server.Memo = Conf.Default.Memo
|
||||
}
|
||||
|
||||
if server.WordPress == nil {
|
||||
server.WordPress = Conf.Default.WordPress
|
||||
if server.WordPress == nil {
|
||||
server.WordPress = &WordPressConf{}
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan == nil {
|
||||
server.PortScan = Conf.Default.PortScan
|
||||
if server.PortScan == nil {
|
||||
server.PortScan = &PortScanConf{}
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.IgnoredJSONKeys) == 0 {
|
||||
server.IgnoredJSONKeys = Conf.Default.IgnoredJSONKeys
|
||||
}
|
||||
|
||||
opt := map[string]interface{}{}
|
||||
for k, v := range Conf.Default.Optional {
|
||||
opt[k] = v
|
||||
}
|
||||
for k, v := range server.Optional {
|
||||
opt[k] = v
|
||||
}
|
||||
server.Optional = opt
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func toCpeURI(cpename string) (string, error) {
|
||||
if strings.HasPrefix(cpename, "cpe:2.3:") {
|
||||
wfn, err := naming.UnbindFS(cpename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return naming.BindToURI(wfn), nil
|
||||
} else if strings.HasPrefix(cpename, "cpe:/") {
|
||||
wfn, err := naming.UnbindURI(cpename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return naming.BindToURI(wfn), nil
|
||||
}
|
||||
return "", xerrors.Errorf("Unknown CPE format: %s", cpename)
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestHosts(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in string
|
||||
ignore []string
|
||||
expected []string
|
||||
err bool
|
||||
}{
|
||||
{
|
||||
in: "127.0.0.1",
|
||||
expected: []string{"127.0.0.1"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "127.0.0.1",
|
||||
ignore: []string{"127.0.0.1"},
|
||||
expected: []string{},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "ssh/host",
|
||||
expected: []string{"ssh/host"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "192.168.1.1/30",
|
||||
expected: []string{"192.168.1.1", "192.168.1.2"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "192.168.1.1/30",
|
||||
ignore: []string{"192.168.1.1"},
|
||||
expected: []string{"192.168.1.2"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "192.168.1.1/30",
|
||||
ignore: []string{"ignore"},
|
||||
err: true,
|
||||
},
|
||||
{
|
||||
in: "192.168.1.1/30",
|
||||
ignore: []string{"192.168.1.1/30"},
|
||||
expected: []string{},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "192.168.1.1/31",
|
||||
expected: []string{"192.168.1.0", "192.168.1.1"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "192.168.1.1/32",
|
||||
expected: []string{"192.168.1.1"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "2001:4860:4860::8888/126",
|
||||
expected: []string{"2001:4860:4860::8888", "2001:4860:4860::8889", "2001:4860:4860::888a", "2001:4860:4860::888b"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "2001:4860:4860::8888/127",
|
||||
expected: []string{"2001:4860:4860::8888", "2001:4860:4860::8889"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "2001:4860:4860::8888/128",
|
||||
expected: []string{"2001:4860:4860::8888"},
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "2001:4860:4860::8888/32",
|
||||
err: true,
|
||||
},
|
||||
}
|
||||
for i, tt := range tests {
|
||||
actual, err := hosts(tt.in, tt.ignore)
|
||||
sort.Slice(actual, func(i, j int) bool { return actual[i] < actual[j] })
|
||||
if err != nil && !tt.err {
|
||||
t.Errorf("[%d] unexpected error occurred, in: %s act: %s, exp: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
} else if err == nil && tt.err {
|
||||
t.Errorf("[%d] expected error is not occurred, in: %s act: %s, exp: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
}
|
||||
if !reflect.DeepEqual(actual, tt.expected) {
|
||||
t.Errorf("[%d] in: %s, actual: %q, expected: %q", i, tt.in, actual, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestToCpeURI(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in string
|
||||
expected string
|
||||
err bool
|
||||
}{
|
||||
{
|
||||
in: "",
|
||||
expected: "",
|
||||
err: true,
|
||||
},
|
||||
{
|
||||
in: "cpe:/a:microsoft:internet_explorer:10",
|
||||
expected: "cpe:/a:microsoft:internet_explorer:10",
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "cpe:2.3:a:microsoft:internet_explorer:10:*:*:*:*:*:*:*",
|
||||
expected: "cpe:/a:microsoft:internet_explorer:10",
|
||||
err: false,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
actual, err := toCpeURI(tt.in)
|
||||
if err != nil && !tt.err {
|
||||
t.Errorf("[%d] unexpected error occurred, in: %s act: %s, exp: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
} else if err == nil && tt.err {
|
||||
t.Errorf("[%d] expected error is not occurred, in: %s act: %s, exp: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
}
|
||||
if actual != tt.expected {
|
||||
t.Errorf("[%d] in: %s, actual: %s, expected: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,330 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// VulnDictInterface is an interface of vulnsrc
|
||||
type VulnDictInterface interface {
|
||||
Init()
|
||||
Validate() error
|
||||
IsFetchViaHTTP() bool
|
||||
CheckHTTPHealth() error
|
||||
GetName() string
|
||||
GetType() string
|
||||
GetURL() string
|
||||
GetSQLite3Path() string
|
||||
GetDebugSQL() bool
|
||||
}
|
||||
|
||||
// VulnDict is a base struct of vuln dicts
|
||||
type VulnDict struct {
|
||||
Name string
|
||||
|
||||
// DB type of CVE dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://cve-dictionary.com:1323 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/cve.sqlite3
|
||||
SQLite3Path string
|
||||
|
||||
DebugSQL bool
|
||||
}
|
||||
|
||||
// GetType returns type
|
||||
func (cnf VulnDict) GetType() string {
|
||||
return cnf.Type
|
||||
}
|
||||
|
||||
// GetName returns name
|
||||
func (cnf VulnDict) GetName() string {
|
||||
return cnf.Name
|
||||
}
|
||||
|
||||
// GetURL returns url
|
||||
func (cnf VulnDict) GetURL() string {
|
||||
return cnf.URL
|
||||
}
|
||||
|
||||
// GetSQLite3Path return the path of SQLite3
|
||||
func (cnf VulnDict) GetSQLite3Path() string {
|
||||
return cnf.SQLite3Path
|
||||
}
|
||||
|
||||
// GetDebugSQL return debugSQL flag
|
||||
func (cnf VulnDict) GetDebugSQL() bool {
|
||||
return cnf.DebugSQL
|
||||
}
|
||||
|
||||
// Validate settings
|
||||
func (cnf VulnDict) Validate() error {
|
||||
logging.Log.Infof("%s.type=%s, %s.url=%s, %s.SQLite3Path=%s",
|
||||
cnf.Name, cnf.Type, cnf.Name, cnf.URL, cnf.Name, cnf.SQLite3Path)
|
||||
|
||||
switch cnf.Type {
|
||||
case "sqlite3":
|
||||
if cnf.URL != "" {
|
||||
return xerrors.Errorf("To use SQLite3, specify %s.type=sqlite3 and %s.SQLite3Path. To use as HTTP server mode, specify %s.type=http and %s.url",
|
||||
cnf.Name, cnf.Name, cnf.Name, cnf.Name)
|
||||
}
|
||||
if ok, _ := govalidator.IsFilePath(cnf.SQLite3Path); !ok {
|
||||
return xerrors.Errorf("SQLite3 path must be a *Absolute* file path. %s.SQLite3Path: %s",
|
||||
cnf.Name, cnf.SQLite3Path)
|
||||
}
|
||||
if _, err := os.Stat(cnf.SQLite3Path); os.IsNotExist(err) {
|
||||
logging.Log.Warnf("%s.SQLite3Path=%s file not found", cnf.Name, cnf.SQLite3Path)
|
||||
}
|
||||
case "mysql":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`MySQL connection string is needed. %s.url="user:pass@tcp(localhost:3306)/dbname"`, cnf.Name)
|
||||
}
|
||||
case "postgres":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`PostgreSQL connection string is needed. %s.url="host=myhost user=user dbname=dbname sslmode=disable password=password"`, cnf.Name)
|
||||
}
|
||||
case "redis":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`Redis connection string is needed. %s.url="redis://localhost/0"`, cnf.Name)
|
||||
}
|
||||
case "http":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`URL is needed. -%s-url="http://localhost:1323"`, cnf.Name)
|
||||
}
|
||||
default:
|
||||
return xerrors.Errorf("%s.type must be either 'sqlite3', 'mysql', 'postgres', 'redis' or 'http'. %s.type: %s", cnf.Name, cnf.Name, cnf.Type)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Init the struct
|
||||
func (cnf VulnDict) Init() {}
|
||||
|
||||
func (cnf *VulnDict) setDefault(sqlite3Name string) {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, sqlite3Name)
|
||||
}
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns if fetch via HTTP
|
||||
func (cnf VulnDict) IsFetchViaHTTP() bool {
|
||||
return cnf.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth checks http server status
|
||||
func (cnf VulnDict) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Timeout(10 * time.Second).SetDebug(Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to request to CVE server. url: %s, errs: %s",
|
||||
url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GovalDictConf is goval-dictionary config
|
||||
type GovalDictConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const govalType = "OVALDB_TYPE"
|
||||
const govalURL = "OVALDB_URL"
|
||||
const govalPATH = "OVALDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GovalDictConf) Init() {
|
||||
cnf.Name = "ovalDict"
|
||||
if os.Getenv(govalType) != "" {
|
||||
cnf.Type = os.Getenv(govalType)
|
||||
}
|
||||
if os.Getenv(govalURL) != "" {
|
||||
cnf.URL = os.Getenv(govalURL)
|
||||
}
|
||||
if os.Getenv(govalPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(govalPATH)
|
||||
}
|
||||
cnf.setDefault("oval.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// ExploitConf is exploit config
|
||||
type ExploitConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const exploitDBType = "EXPLOITDB_TYPE"
|
||||
const exploitDBURL = "EXPLOITDB_URL"
|
||||
const exploitDBPATH = "EXPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *ExploitConf) Init() {
|
||||
cnf.Name = "exploit"
|
||||
if os.Getenv(exploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(exploitDBType)
|
||||
}
|
||||
if os.Getenv(exploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(exploitDBURL)
|
||||
}
|
||||
if os.Getenv(exploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(exploitDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-exploitdb.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// GoCveDictConf is GoCveDict config
|
||||
type GoCveDictConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const cveDBType = "CVEDB_TYPE"
|
||||
const cveDBURL = "CVEDB_URL"
|
||||
const cveDBPATH = "CVEDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GoCveDictConf) Init() {
|
||||
cnf.Name = "cveDict"
|
||||
if os.Getenv(cveDBType) != "" {
|
||||
cnf.Type = os.Getenv(cveDBType)
|
||||
}
|
||||
if os.Getenv(cveDBURL) != "" {
|
||||
cnf.URL = os.Getenv(cveDBURL)
|
||||
}
|
||||
if os.Getenv(cveDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(cveDBPATH)
|
||||
}
|
||||
cnf.setDefault("cve.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// GostConf is gost config
|
||||
type GostConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const gostDBType = "GOSTDB_TYPE"
|
||||
const gostDBURL = "GOSTDB_URL"
|
||||
const gostDBPATH = "GOSTDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GostConf) Init() {
|
||||
cnf.Name = "gost"
|
||||
if os.Getenv(gostDBType) != "" {
|
||||
cnf.Type = os.Getenv(gostDBType)
|
||||
}
|
||||
if os.Getenv(gostDBURL) != "" {
|
||||
cnf.URL = os.Getenv(gostDBURL)
|
||||
}
|
||||
if os.Getenv(gostDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(gostDBPATH)
|
||||
}
|
||||
cnf.setDefault("gost.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// MetasploitConf is go-msfdb config
|
||||
type MetasploitConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const metasploitDBType = "METASPLOITDB_TYPE"
|
||||
const metasploitDBURL = "METASPLOITDB_URL"
|
||||
const metasploitDBPATH = "METASPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *MetasploitConf) Init() {
|
||||
cnf.Name = "metasploit"
|
||||
if os.Getenv(metasploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(metasploitDBType)
|
||||
}
|
||||
if os.Getenv(metasploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(metasploitDBURL)
|
||||
}
|
||||
if os.Getenv(metasploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(metasploitDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-msfdb.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// KEVulnConf is go-kev config
|
||||
type KEVulnConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const kevulnDBType = "KEVULN_TYPE"
|
||||
const kevulnDBURL = "KEVULN_URL"
|
||||
const kevulnDBPATH = "KEVULN_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *KEVulnConf) Init() {
|
||||
cnf.Name = "kevuln"
|
||||
if os.Getenv(kevulnDBType) != "" {
|
||||
cnf.Type = os.Getenv(kevulnDBType)
|
||||
}
|
||||
if os.Getenv(kevulnDBURL) != "" {
|
||||
cnf.URL = os.Getenv(kevulnDBURL)
|
||||
}
|
||||
if os.Getenv(kevulnDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(kevulnDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-kev.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// CtiConf is go-cti config
|
||||
type CtiConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const ctiDBType = "CTI_TYPE"
|
||||
const ctiDBURL = "CTI_URL"
|
||||
const ctiDBPATH = "CTI_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *CtiConf) Init() {
|
||||
cnf.Name = "cti"
|
||||
if os.Getenv(ctiDBType) != "" {
|
||||
cnf.Type = os.Getenv(ctiDBType)
|
||||
}
|
||||
if os.Getenv(ctiDBURL) != "" {
|
||||
cnf.URL = os.Getenv(ctiDBURL)
|
||||
}
|
||||
if os.Getenv(ctiDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(ctiDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-cti.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
package constant
|
||||
|
||||
// Global constant
|
||||
// Pkg local constants should not be defined here.
|
||||
// Define them in the each package.
|
||||
|
||||
const (
|
||||
// RedHat is
|
||||
RedHat = "redhat"
|
||||
|
||||
// Debian is
|
||||
Debian = "debian"
|
||||
|
||||
// Ubuntu is
|
||||
Ubuntu = "ubuntu"
|
||||
|
||||
// CentOS is
|
||||
CentOS = "centos"
|
||||
|
||||
// Alma is
|
||||
Alma = "alma"
|
||||
|
||||
// Rocky is
|
||||
Rocky = "rocky"
|
||||
|
||||
// Fedora is
|
||||
Fedora = "fedora"
|
||||
|
||||
// Amazon is
|
||||
Amazon = "amazon"
|
||||
|
||||
// Oracle is
|
||||
Oracle = "oracle"
|
||||
|
||||
// FreeBSD is
|
||||
FreeBSD = "freebsd"
|
||||
|
||||
// Raspbian is
|
||||
Raspbian = "raspbian"
|
||||
|
||||
// Windows is
|
||||
Windows = "windows"
|
||||
|
||||
// OpenSUSE is
|
||||
OpenSUSE = "opensuse"
|
||||
|
||||
// OpenSUSELeap is
|
||||
OpenSUSELeap = "opensuse.leap"
|
||||
|
||||
// SUSEEnterpriseServer is
|
||||
SUSEEnterpriseServer = "suse.linux.enterprise.server"
|
||||
|
||||
// SUSEEnterpriseDesktop is
|
||||
SUSEEnterpriseDesktop = "suse.linux.enterprise.desktop"
|
||||
|
||||
// Alpine is
|
||||
Alpine = "alpine"
|
||||
|
||||
// ServerTypePseudo is used for ServerInfo.Type, r.Family
|
||||
ServerTypePseudo = "pseudo"
|
||||
|
||||
// DeepSecurity is
|
||||
DeepSecurity = "deepsecurity"
|
||||
)
|
||||
@@ -1,33 +0,0 @@
|
||||
FROM golang:alpine as builder
|
||||
|
||||
RUN apk add --no-cache \
|
||||
git \
|
||||
make \
|
||||
gcc \
|
||||
musl-dev
|
||||
|
||||
ENV REPOSITORY github.com/future-architect/vuls
|
||||
COPY . $GOPATH/src/$REPOSITORY
|
||||
RUN cd $GOPATH/src/$REPOSITORY && \
|
||||
make build-scanner && mv vuls $GOPATH/bin && \
|
||||
make build-trivy-to-vuls && mv trivy-to-vuls $GOPATH/bin && \
|
||||
make build-future-vuls && mv future-vuls $GOPATH/bin
|
||||
|
||||
FROM alpine:3.15
|
||||
|
||||
ENV LOGDIR /var/log/vuls
|
||||
ENV WORKDIR /vuls
|
||||
|
||||
RUN apk add --no-cache \
|
||||
openssh-client \
|
||||
ca-certificates \
|
||||
git \
|
||||
nmap \
|
||||
&& mkdir -p $WORKDIR $LOGDIR
|
||||
|
||||
COPY --from=builder /go/bin/vuls /go/bin/trivy-to-vuls /go/bin/future-vuls /usr/local/bin/
|
||||
COPY --from=aquasec/trivy:latest /usr/local/bin/trivy /usr/local/bin/trivy
|
||||
|
||||
VOLUME ["$WORKDIR", "$LOGDIR"]
|
||||
WORKDIR $WORKDIR
|
||||
ENV PWD $WORKDIR
|
||||
@@ -1,38 +0,0 @@
|
||||
# future-vuls
|
||||
|
||||
## Main Features
|
||||
|
||||
- upload vuls results json to future-vuls
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
git clone https://github.com/future-architect/vuls.git
|
||||
make build-future-vuls
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
```
|
||||
Upload to FutureVuls
|
||||
|
||||
Usage:
|
||||
future-vuls upload [flags]
|
||||
|
||||
Flags:
|
||||
--config string config file (default is $HOME/.cobra.yaml)
|
||||
-g, --group-id int future vuls group id, ENV: VULS_GROUP_ID
|
||||
-h, --help help for upload
|
||||
-s, --stdin input from stdin. ENV: VULS_STDIN
|
||||
-t, --token string future vuls token
|
||||
--url string future vuls upload url
|
||||
--uuid string server uuid. ENV: VULS_SERVER_UUID
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- update results json
|
||||
|
||||
```
|
||||
cat results.json | future-vuls upload --stdin --token xxxx --url https://xxxx --group-id 1 --uuid xxxx
|
||||
```
|
||||
@@ -1,118 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/saas"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
configFile string
|
||||
stdIn bool
|
||||
jsonDir string
|
||||
serverUUID string
|
||||
groupID int64
|
||||
token string
|
||||
tags []string
|
||||
url string
|
||||
)
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
var cmdFvulsUploader = &cobra.Command{
|
||||
Use: "upload",
|
||||
Short: "Upload to FutureVuls",
|
||||
Long: `Upload to FutureVuls`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(serverUUID) == 0 {
|
||||
serverUUID = os.Getenv("VULS_SERVER_UUID")
|
||||
}
|
||||
if groupID == 0 {
|
||||
envGroupID := os.Getenv("VULS_GROUP_ID")
|
||||
if groupID, err = strconv.ParseInt(envGroupID, 10, 64); err != nil {
|
||||
fmt.Printf("Invalid GroupID: %s\n", envGroupID)
|
||||
return
|
||||
}
|
||||
}
|
||||
if len(url) == 0 {
|
||||
url = os.Getenv("VULS_URL")
|
||||
}
|
||||
if len(token) == 0 {
|
||||
token = os.Getenv("VULS_TOKEN")
|
||||
}
|
||||
if len(tags) == 0 {
|
||||
tags = strings.Split(os.Getenv("VULS_TAGS"), ",")
|
||||
}
|
||||
|
||||
var scanResultJSON []byte
|
||||
if stdIn {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err = buf.ReadFrom(reader); err != nil {
|
||||
return
|
||||
}
|
||||
scanResultJSON = buf.Bytes()
|
||||
} else {
|
||||
fmt.Println("use --stdin option")
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
|
||||
var scanResult models.ScanResult
|
||||
if err = json.Unmarshal(scanResultJSON, &scanResult); err != nil {
|
||||
fmt.Println("Failed to parse json", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
scanResult.ServerUUID = serverUUID
|
||||
if 0 < len(tags) {
|
||||
if scanResult.Optional == nil {
|
||||
scanResult.Optional = map[string]interface{}{}
|
||||
}
|
||||
scanResult.Optional["VULS_TAGS"] = tags
|
||||
}
|
||||
|
||||
config.Conf.Saas.GroupID = groupID
|
||||
config.Conf.Saas.Token = token
|
||||
config.Conf.Saas.URL = url
|
||||
if err = (saas.Writer{}).Write(scanResult); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
return
|
||||
},
|
||||
}
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show version",
|
||||
Long: "Show version",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("future-vuls-%s-%s\n", config.Version, config.Revision)
|
||||
},
|
||||
}
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&serverUUID, "uuid", "", "server uuid. ENV: VULS_SERVER_UUID")
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&configFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
|
||||
cmdFvulsUploader.PersistentFlags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin. ENV: VULS_STDIN")
|
||||
// TODO Read JSON file from directory
|
||||
// cmdFvulsUploader.Flags().StringVarP(&jsonDir, "results-dir", "d", "./", "vuls scan results json dir")
|
||||
cmdFvulsUploader.PersistentFlags().Int64VarP(&groupID, "group-id", "g", 0, "future vuls group id, ENV: VULS_GROUP_ID")
|
||||
cmdFvulsUploader.PersistentFlags().StringVarP(&token, "token", "t", "", "future vuls token")
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&url, "url", "", "future vuls upload url")
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "future-vuls"}
|
||||
rootCmd.AddCommand(cmdFvulsUploader)
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
}
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/knqyf263/go-cpe/naming"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type analysis struct {
|
||||
Dependencies []dependency `xml:"dependencies>dependency"`
|
||||
}
|
||||
|
||||
type dependency struct {
|
||||
Identifiers []vulnerabilityID `xml:"identifiers>vulnerabilityIds"`
|
||||
}
|
||||
|
||||
type vulnerabilityID struct {
|
||||
ID string `xml:"id"`
|
||||
}
|
||||
|
||||
func appendIfMissing(slice []string, str string) []string {
|
||||
for _, s := range slice {
|
||||
if s == str {
|
||||
return slice
|
||||
}
|
||||
}
|
||||
return append(slice, str)
|
||||
}
|
||||
|
||||
// Parse parses OWASP dependency check XML and collect list of cpe
|
||||
func Parse(path string) ([]string, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
log.Warnf("OWASP Dependency Check XML is not found: %s", path)
|
||||
return []string{}, nil
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
b, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
log.Warnf("Failed to read OWASP Dependency Check XML: %s", path)
|
||||
return []string{}, nil
|
||||
}
|
||||
|
||||
var anal analysis
|
||||
if err := xml.Unmarshal(b, &anal); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to unmarshal: %s", err)
|
||||
}
|
||||
|
||||
cpes := []string{}
|
||||
for _, d := range anal.Dependencies {
|
||||
for _, ident := range d.Identifiers {
|
||||
id := ident.ID // Start with cpe:2.3:
|
||||
// Convert from CPE 2.3 to CPE 2.2
|
||||
if strings.HasPrefix(id, "cpe:2.3:") {
|
||||
wfn, err := naming.UnbindFS(id)
|
||||
if err != nil {
|
||||
return []string{}, err
|
||||
}
|
||||
id = naming.BindToURI(wfn)
|
||||
}
|
||||
cpes = appendIfMissing(cpes, id)
|
||||
}
|
||||
}
|
||||
return cpes, nil
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
# trivy-to-vuls
|
||||
|
||||
## Main Features
|
||||
|
||||
- convert trivy's results json to vuls's report json
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
git clone https://github.com/future-architect/vuls.git
|
||||
make build-trivy-to-vuls
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
```
|
||||
Parse trivy json to vuls results
|
||||
|
||||
Usage:
|
||||
trivy-to-vuls parse [flags]
|
||||
|
||||
Flags:
|
||||
-h, --help help for parse
|
||||
-s, --stdin input from stdin
|
||||
-d, --trivy-json-dir string trivy json dir (default "./")
|
||||
-f, --trivy-json-file-name string trivy json file name (default "results.json")
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- use trivy output
|
||||
|
||||
```
|
||||
trivy -q image -f=json python:3.4-alpine | trivy-to-vuls parse --stdin
|
||||
```
|
||||
@@ -1,87 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/contrib/trivy/parser"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
serverUUID string
|
||||
stdIn bool
|
||||
jsonDir string
|
||||
jsonFileName string
|
||||
)
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
var cmdTrivyToVuls = &cobra.Command{
|
||||
Use: "parse",
|
||||
Short: "Parse trivy json to vuls results",
|
||||
Long: `Parse trivy json to vuls results`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
jsonFilePath := filepath.Join(jsonDir, jsonFileName)
|
||||
var trivyJSON []byte
|
||||
if stdIn {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err = buf.ReadFrom(reader); err != nil {
|
||||
fmt.Printf("Failed to read file. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
trivyJSON = buf.Bytes()
|
||||
} else {
|
||||
if trivyJSON, err = os.ReadFile(jsonFilePath); err != nil {
|
||||
fmt.Printf("Failed to read file. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
parser, err := parser.NewParser(trivyJSON)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to new parser. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
scanResult, err := parser.Parse(trivyJSON)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to parse. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
var resultJSON []byte
|
||||
if resultJSON, err = json.MarshalIndent(scanResult, "", " "); err != nil {
|
||||
fmt.Printf("Failed to create json. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
fmt.Println(string(resultJSON))
|
||||
},
|
||||
}
|
||||
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show version",
|
||||
Long: "Show version",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("trivy-to-vuls-%s-%s\n", config.Version, config.Revision)
|
||||
},
|
||||
}
|
||||
|
||||
cmdTrivyToVuls.Flags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonDir, "trivy-json-dir", "d", "./", "trivy json dir")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonFileName, "trivy-json-file-name", "f", "results.json", "trivy json file name")
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "trivy-to-vuls"}
|
||||
rootCmd.AddCommand(cmdTrivyToVuls)
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Printf("Failed to execute command. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
v2 "github.com/future-architect/vuls/contrib/trivy/parser/v2"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Parser is a parser interface
|
||||
type Parser interface {
|
||||
Parse(vulnJSON []byte) (result *models.ScanResult, err error)
|
||||
}
|
||||
|
||||
// Report is used for judgeing the scheme version of trivy
|
||||
type Report struct {
|
||||
SchemaVersion int `json:",omitempty"`
|
||||
}
|
||||
|
||||
// NewParser make a parser for the schema version of trivy
|
||||
func NewParser(vulnJSON []byte) (Parser, error) {
|
||||
r := Report{}
|
||||
if err := json.Unmarshal(vulnJSON, &r); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to parse JSON. Please use the latest version of trivy, trivy-to-vuls and future-vuls")
|
||||
}
|
||||
switch r.SchemaVersion {
|
||||
case 2:
|
||||
return v2.ParserV2{}, nil
|
||||
default:
|
||||
return nil, xerrors.Errorf("Failed to parse trivy json. SchemeVersion %d is not supported yet. Please contact support", r.SchemaVersion)
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
package v2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/contrib/trivy/pkg"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// ParserV2 is a parser for scheme v2
|
||||
type ParserV2 struct {
|
||||
}
|
||||
|
||||
// Parse trivy's JSON and convert to the Vuls struct
|
||||
func (p ParserV2) Parse(vulnJSON []byte) (result *models.ScanResult, err error) {
|
||||
var report types.Report
|
||||
if err = json.Unmarshal(vulnJSON, &report); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
scanResult, err := pkg.Convert(report.Results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := setScanResultMeta(scanResult, &report); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
var dockerTagPattern = regexp.MustCompile(`^(.*):(.*)$`)
|
||||
|
||||
func setScanResultMeta(scanResult *models.ScanResult, report *types.Report) error {
|
||||
if len(report.Results) == 0 {
|
||||
return xerrors.Errorf("scanned images or libraries are not supported by Trivy. see https://aquasecurity.github.io/trivy/dev/vulnerability/detection/os/, https://aquasecurity.github.io/trivy/dev/vulnerability/detection/language/")
|
||||
}
|
||||
|
||||
scanResult.ServerName = report.ArtifactName
|
||||
if report.ArtifactType == "container_image" {
|
||||
matches := dockerTagPattern.FindStringSubmatch(report.ArtifactName)
|
||||
var imageName, imageTag string
|
||||
if 2 < len(matches) {
|
||||
// including the image tag
|
||||
imageName = matches[1]
|
||||
imageTag = matches[2]
|
||||
} else {
|
||||
// no image tag
|
||||
imageName = report.ArtifactName
|
||||
imageTag = "latest" // Complement if the tag is omitted
|
||||
}
|
||||
scanResult.ServerName = fmt.Sprintf("%s:%s", imageName, imageTag)
|
||||
if scanResult.Optional == nil {
|
||||
scanResult.Optional = map[string]interface{}{}
|
||||
}
|
||||
scanResult.Optional["TRIVY_IMAGE_NAME"] = imageName
|
||||
scanResult.Optional["TRIVY_IMAGE_TAG"] = imageTag
|
||||
}
|
||||
|
||||
if report.Metadata.OS != nil {
|
||||
scanResult.Family = report.Metadata.OS.Family
|
||||
scanResult.Release = report.Metadata.OS.Name
|
||||
} else {
|
||||
scanResult.Family = constant.ServerTypePseudo
|
||||
}
|
||||
|
||||
scanResult.ScannedAt = time.Now()
|
||||
scanResult.ScannedBy = "trivy"
|
||||
scanResult.ScannedVia = "trivy"
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,805 +0,0 @@
|
||||
package v2
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/d4l3k/messagediff"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
vulnJSON []byte
|
||||
expected *models.ScanResult
|
||||
}{
|
||||
"image redis": {
|
||||
vulnJSON: redisTrivy,
|
||||
expected: redisSR,
|
||||
},
|
||||
"image struts": {
|
||||
vulnJSON: strutsTrivy,
|
||||
expected: strutsSR,
|
||||
},
|
||||
"image osAndLib": {
|
||||
vulnJSON: osAndLibTrivy,
|
||||
expected: osAndLibSR,
|
||||
},
|
||||
}
|
||||
|
||||
for testcase, v := range cases {
|
||||
actual, err := ParserV2{}.Parse(v.vulnJSON)
|
||||
if err != nil {
|
||||
t.Errorf("%s", err)
|
||||
}
|
||||
|
||||
diff, equal := messagediff.PrettyDiff(
|
||||
v.expected,
|
||||
actual,
|
||||
messagediff.IgnoreStructField("ScannedAt"),
|
||||
messagediff.IgnoreStructField("Title"),
|
||||
messagediff.IgnoreStructField("Summary"),
|
||||
messagediff.IgnoreStructField("LastModified"),
|
||||
messagediff.IgnoreStructField("Published"),
|
||||
)
|
||||
if !equal {
|
||||
t.Errorf("test: %s, diff %s", testcase, diff)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var redisTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "redis",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"OS": {
|
||||
"Family": "debian",
|
||||
"Name": "10.10"
|
||||
},
|
||||
"ImageID": "sha256:ddcca4b8a6f0367b5de2764dfe76b0a4bfa6d75237932185923705da47004347",
|
||||
"DiffIDs": [
|
||||
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
|
||||
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
|
||||
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
|
||||
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
|
||||
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
|
||||
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
|
||||
],
|
||||
"RepoTags": [
|
||||
"redis:latest"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"redis@sha256:66ce9bc742609650afc3de7009658473ed601db4e926a5b16d239303383bacad"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "fa59f1c2817c9095f8f7272a4ab9b11db0332b33efb3a82c00a3d1fec8763684",
|
||||
"created": "2021-08-17T14:30:06.550779326Z",
|
||||
"docker_version": "20.10.7",
|
||||
"history": [
|
||||
{
|
||||
"created": "2021-08-17T01:24:06Z",
|
||||
"created_by": "/bin/sh -c #(nop) ADD file:87b4e60fe3af680c6815448374365a44e9ea461bc8ade2960b4639c25aed3ba9 in / "
|
||||
},
|
||||
{
|
||||
"created": "2021-08-17T14:30:06Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"redis-server\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
|
||||
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
|
||||
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
|
||||
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
|
||||
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
|
||||
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"redis-server"
|
||||
],
|
||||
"Entrypoint": [
|
||||
"docker-entrypoint.sh"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"GOSU_VERSION=1.12",
|
||||
"REDIS_VERSION=6.2.5",
|
||||
"REDIS_DOWNLOAD_URL=http://download.redis.io/releases/redis-6.2.5.tar.gz",
|
||||
"REDIS_DOWNLOAD_SHA=4b9a75709a1b74b3785e20a6c158cab94cf52298aa381eea947a678a60d551ae"
|
||||
],
|
||||
"Image": "sha256:befbd3fc62bffcd0115008969a014faaad07828b2c54b4bcfd2d9fc3aa2508cd",
|
||||
"Volumes": {
|
||||
"/data": {}
|
||||
},
|
||||
"WorkingDir": "/data"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "redis (debian 10.10)",
|
||||
"Class": "os-pkgs",
|
||||
"Type": "debian",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "adduser",
|
||||
"Version": "3.118",
|
||||
"SrcName": "adduser",
|
||||
"SrcVersion": "3.118",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "apt",
|
||||
"Version": "1.8.2.3",
|
||||
"SrcName": "apt",
|
||||
"SrcVersion": "1.8.2.3",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "bsdutils",
|
||||
"Version": "1:2.33.1-0.1",
|
||||
"SrcName": "util-linux",
|
||||
"SrcVersion": "2.33.1-0.1",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "pkgA",
|
||||
"Version": "1:2.33.1-0.1",
|
||||
"SrcName": "util-linux",
|
||||
"SrcVersion": "2.33.1-0.1",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2011-3374",
|
||||
"PkgName": "apt",
|
||||
"InstalledVersion": "1.8.2.3",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
},
|
||||
"SeveritySource": "debian",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2011-3374",
|
||||
"Description": "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
|
||||
"Severity": "LOW",
|
||||
"CweIDs": [
|
||||
"CWE-347"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:L/A:N",
|
||||
"V2Score": 4.3,
|
||||
"V3Score": 3.7
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://access.redhat.com/security/cve/cve-2011-3374"
|
||||
],
|
||||
"PublishedDate": "2019-11-26T00:15:00Z",
|
||||
"LastModifiedDate": "2021-02-09T16:08:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`)
|
||||
var redisSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "redis:latest",
|
||||
Family: "debian",
|
||||
Release: "10.10",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2011-3374": {
|
||||
CveID: "CVE-2011-3374",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
models.PackageFixStatus{
|
||||
Name: "apt",
|
||||
NotFixedYet: true,
|
||||
FixState: "Affected",
|
||||
FixedIn: "",
|
||||
}},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "",
|
||||
Summary: "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
|
||||
Cvss3Severity: "LOW",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://access.redhat.com/security/cve/cve-2011-3374"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{},
|
||||
Packages: models.Packages{
|
||||
"apt": models.Package{
|
||||
Name: "apt",
|
||||
Version: "1.8.2.3",
|
||||
},
|
||||
"adduser": models.Package{
|
||||
Name: "adduser",
|
||||
Version: "3.118",
|
||||
},
|
||||
"bsdutils": models.Package{
|
||||
Name: "bsdutils",
|
||||
Version: "1:2.33.1-0.1",
|
||||
},
|
||||
"pkgA": models.Package{
|
||||
Name: "pkgA",
|
||||
Version: "1:2.33.1-0.1",
|
||||
},
|
||||
},
|
||||
SrcPackages: models.SrcPackages{
|
||||
"util-linux": models.SrcPackage{
|
||||
Name: "util-linux",
|
||||
Version: "2.33.1-0.1",
|
||||
BinaryNames: []string{"bsdutils", "pkgA"},
|
||||
},
|
||||
},
|
||||
Optional: map[string]interface{}{
|
||||
"TRIVY_IMAGE_NAME": "redis",
|
||||
"TRIVY_IMAGE_TAG": "latest",
|
||||
},
|
||||
}
|
||||
|
||||
var strutsTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "/data/struts-1.2.7/lib",
|
||||
"ArtifactType": "filesystem",
|
||||
"Metadata": {
|
||||
"ImageConfig": {
|
||||
"architecture": "",
|
||||
"created": "0001-01-01T00:00:00Z",
|
||||
"os": "",
|
||||
"rootfs": {
|
||||
"type": "",
|
||||
"diff_ids": null
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "Java",
|
||||
"Class": "lang-pkgs",
|
||||
"Type": "jar",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "oro:oro",
|
||||
"Version": "2.0.7",
|
||||
"Layer": {}
|
||||
},
|
||||
{
|
||||
"Name": "struts:struts",
|
||||
"Version": "1.2.7",
|
||||
"Layer": {}
|
||||
},
|
||||
{
|
||||
"Name": "commons-beanutils:commons-beanutils",
|
||||
"Version": "1.7.0",
|
||||
"Layer": {}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2014-0114",
|
||||
"PkgName": "commons-beanutils:commons-beanutils",
|
||||
"InstalledVersion": "1.7.0",
|
||||
"FixedVersion": "1.9.2",
|
||||
"Layer": {},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2014-0114",
|
||||
"Title": "Apache Struts 1: Class Loader manipulation via request parameters",
|
||||
"Description": "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
|
||||
"Severity": "HIGH",
|
||||
"CweIDs": [
|
||||
"CWE-20"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V2Score": 7.5
|
||||
},
|
||||
"redhat": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V2Score": 7.5
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"http://advisories.mageia.org/MGASA-2014-0219.html"
|
||||
],
|
||||
"PublishedDate": "2014-04-30T10:49:00Z",
|
||||
"LastModifiedDate": "2021-01-26T18:15:00Z"
|
||||
},
|
||||
{
|
||||
"VulnerabilityID": "CVE-2012-1007",
|
||||
"PkgName": "struts:struts",
|
||||
"InstalledVersion": "1.2.7",
|
||||
"Layer": {},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2012-1007",
|
||||
"Title": "struts: multiple XSS flaws",
|
||||
"Description": "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
|
||||
"Severity": "MEDIUM",
|
||||
"CweIDs": [
|
||||
"CWE-79"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V2Score": 4.3
|
||||
},
|
||||
"redhat": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V2Score": 4.3
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"
|
||||
],
|
||||
"PublishedDate": "2012-02-07T04:09:00Z",
|
||||
"LastModifiedDate": "2018-10-17T01:29:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`)
|
||||
|
||||
var strutsSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "/data/struts-1.2.7/lib",
|
||||
Family: "pseudo",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2014-0114": {
|
||||
CveID: "CVE-2014-0114",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "Apache Struts 1: Class Loader manipulation via request parameters",
|
||||
Summary: "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
|
||||
Cvss3Severity: "HIGH",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "http://advisories.mageia.org/MGASA-2014-0219.html"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "jar",
|
||||
Name: "commons-beanutils:commons-beanutils",
|
||||
FixedIn: "1.9.2",
|
||||
//TODO use Artifactname?
|
||||
Path: "Java",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
"CVE-2012-1007": {
|
||||
CveID: "CVE-2012-1007",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "struts: multiple XSS flaws",
|
||||
Summary: "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
|
||||
Cvss3Severity: "MEDIUM",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "jar",
|
||||
Name: "struts:struts",
|
||||
FixedIn: "",
|
||||
//TODO use Artifactname?
|
||||
Path: "Java",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{
|
||||
models.LibraryScanner{
|
||||
Type: "jar",
|
||||
LockfilePath: "Java",
|
||||
Libs: []models.Library{
|
||||
{
|
||||
Name: "commons-beanutils:commons-beanutils",
|
||||
Version: "1.7.0",
|
||||
},
|
||||
{
|
||||
Name: "oro:oro",
|
||||
Version: "2.0.7",
|
||||
},
|
||||
{
|
||||
Name: "struts:struts",
|
||||
Version: "1.2.7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Packages: models.Packages{},
|
||||
SrcPackages: models.SrcPackages{},
|
||||
Optional: nil,
|
||||
}
|
||||
|
||||
var osAndLibTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"OS": {
|
||||
"Family": "debian",
|
||||
"Name": "10.2"
|
||||
},
|
||||
"ImageID": "sha256:5a992077baba51b97f27591a10d54d2f2723dc9c81a3fe419e261023f2554933",
|
||||
"DiffIDs": [
|
||||
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
|
||||
],
|
||||
"RepoTags": [
|
||||
"quay.io/fluentd_elasticsearch/fluentd:v2.9.0"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"quay.io/fluentd_elasticsearch/fluentd@sha256:54716d825ec9791ffb403ac17a1e82159c98ac6161e02b2a054595ad01aa6726"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "232f3fc7ddffd71dc3ff52c6c0c3a5feea2f51acffd9b53850a8fc6f1a15319a",
|
||||
"created": "2020-03-04T13:59:39.161374106Z",
|
||||
"docker_version": "19.03.4",
|
||||
"history": [
|
||||
{
|
||||
"created": "2020-03-04T13:59:39.161374106Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"/run.sh\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"/run.sh"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
|
||||
],
|
||||
"Image": "sha256:2a538358cddc4824e9eff1531e0c63ae5e3cda85d2984c647df9b1c816b9b86b",
|
||||
"ExposedPorts": {
|
||||
"80/tcp": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
"Class": "os-pkgs",
|
||||
"Type": "debian",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "libgnutls30",
|
||||
"Version": "3.6.7-4",
|
||||
"SrcName": "gnutls28",
|
||||
"SrcVersion": "3.6.7-4",
|
||||
"Layer": {
|
||||
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
|
||||
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2021-20231",
|
||||
"PkgName": "libgnutls30",
|
||||
"InstalledVersion": "3.6.7-4",
|
||||
"FixedVersion": "3.6.7-4+deb10u7",
|
||||
"Layer": {
|
||||
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
|
||||
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
|
||||
},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2021-20231",
|
||||
"Title": "gnutls: Use after free in client key_share extension",
|
||||
"Description": "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
|
||||
"Severity": "CRITICAL",
|
||||
"CweIDs": [
|
||||
"CWE-416"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V2Score": 7.5,
|
||||
"V3Score": 9.8
|
||||
},
|
||||
"redhat": {
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:L",
|
||||
"V3Score": 3.7
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://bugzilla.redhat.com/show_bug.cgi?id=1922276"
|
||||
],
|
||||
"PublishedDate": "2021-03-12T19:15:00Z",
|
||||
"LastModifiedDate": "2021-06-01T14:07:00Z"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Target": "Ruby",
|
||||
"Class": "lang-pkgs",
|
||||
"Type": "gemspec",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "activesupport",
|
||||
"Version": "6.0.2.1",
|
||||
"License": "MIT",
|
||||
"Layer": {
|
||||
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
|
||||
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
|
||||
},
|
||||
"FilePath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec"
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2020-8165",
|
||||
"PkgName": "activesupport",
|
||||
"PkgPath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
|
||||
"InstalledVersion": "6.0.2.1",
|
||||
"FixedVersion": "6.0.3.1, 5.2.4.3",
|
||||
"Layer": {
|
||||
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
|
||||
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
|
||||
},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2020-8165",
|
||||
"Title": "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
|
||||
"Description": "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
|
||||
"Severity": "CRITICAL",
|
||||
"CweIDs": [
|
||||
"CWE-502"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V2Score": 7.5,
|
||||
"V3Score": 9.8
|
||||
},
|
||||
"redhat": {
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V3Score": 9.8
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://www.debian.org/security/2020/dsa-4766"
|
||||
],
|
||||
"PublishedDate": "2020-06-19T18:15:00Z",
|
||||
"LastModifiedDate": "2020-10-17T12:15:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`)
|
||||
|
||||
var osAndLibSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "quay.io/fluentd_elasticsearch/fluentd:v2.9.0",
|
||||
Family: "debian",
|
||||
Release: "10.2",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2021-20231": {
|
||||
CveID: "CVE-2021-20231",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
models.PackageFixStatus{
|
||||
Name: "libgnutls30",
|
||||
NotFixedYet: false,
|
||||
FixState: "",
|
||||
FixedIn: "3.6.7-4+deb10u7",
|
||||
}},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "gnutls: Use after free in client key_share extension",
|
||||
Summary: "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
|
||||
Cvss3Severity: "CRITICAL",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://bugzilla.redhat.com/show_bug.cgi?id=1922276"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
},
|
||||
"CVE-2020-8165": {
|
||||
CveID: "CVE-2020-8165",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
|
||||
Summary: "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
|
||||
Cvss3Severity: "CRITICAL",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://www.debian.org/security/2020/dsa-4766"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "gemspec",
|
||||
Name: "activesupport",
|
||||
FixedIn: "6.0.3.1, 5.2.4.3",
|
||||
Path: "Ruby",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{
|
||||
models.LibraryScanner{
|
||||
Type: "gemspec",
|
||||
LockfilePath: "Ruby",
|
||||
Libs: []models.Library{
|
||||
{
|
||||
Name: "activesupport",
|
||||
Version: "6.0.2.1",
|
||||
FilePath: "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Packages: models.Packages{
|
||||
"libgnutls30": models.Package{
|
||||
Name: "libgnutls30",
|
||||
Version: "3.6.7-4",
|
||||
},
|
||||
},
|
||||
SrcPackages: models.SrcPackages{
|
||||
"gnutls28": models.SrcPackage{
|
||||
Name: "gnutls28",
|
||||
Version: "3.6.7-4",
|
||||
BinaryNames: []string{"libgnutls30"},
|
||||
},
|
||||
},
|
||||
Optional: map[string]interface{}{
|
||||
"TRIVY_IMAGE_NAME": "quay.io/fluentd_elasticsearch/fluentd",
|
||||
"TRIVY_IMAGE_TAG": "v2.9.0",
|
||||
},
|
||||
}
|
||||
|
||||
func TestParseError(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
vulnJSON []byte
|
||||
expected error
|
||||
}{
|
||||
"image hello-world": {
|
||||
vulnJSON: helloWorldTrivy,
|
||||
expected: xerrors.Errorf("scanned images or libraries are not supported by Trivy. see https://aquasecurity.github.io/trivy/dev/vulnerability/detection/os/, https://aquasecurity.github.io/trivy/dev/vulnerability/detection/language/"),
|
||||
},
|
||||
}
|
||||
|
||||
for testcase, v := range cases {
|
||||
_, err := ParserV2{}.Parse(v.vulnJSON)
|
||||
|
||||
diff, equal := messagediff.PrettyDiff(
|
||||
v.expected,
|
||||
err,
|
||||
messagediff.IgnoreStructField("frame"),
|
||||
)
|
||||
if !equal {
|
||||
t.Errorf("test: %s, diff %s", testcase, diff)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var helloWorldTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "hello-world:latest",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"ImageID": "sha256:feb5d9fea6a5e9606aa995e879d862b825965ba48de054caab5ef356dc6b3412",
|
||||
"DiffIDs": [
|
||||
"sha256:e07ee1baac5fae6a26f30cabfe54a36d3402f96afda318fe0a96cec4ca393359"
|
||||
],
|
||||
"RepoTags": [
|
||||
"hello-world:latest"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"hello-world@sha256:97a379f4f88575512824f3b352bc03cd75e239179eea0fecc38e597b2209f49a"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "8746661ca3c2f215da94e6d3f7dfdcafaff5ec0b21c9aff6af3dc379a82fbc72",
|
||||
"created": "2021-09-23T23:47:57.442225064Z",
|
||||
"docker_version": "20.10.7",
|
||||
"history": [
|
||||
{
|
||||
"created": "2021-09-23T23:47:57Z",
|
||||
"created_by": "/bin/sh -c #(nop) COPY file:50563a97010fd7ce1ceebd1fa4f4891ac3decdf428333fb2683696f4358af6c2 in / "
|
||||
},
|
||||
{
|
||||
"created": "2021-09-23T23:47:57Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"/hello\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:e07ee1baac5fae6a26f30cabfe54a36d3402f96afda318fe0a96cec4ca393359"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"/hello"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
],
|
||||
"Image": "sha256:b9935d4e8431fb1a7f0989304ec86b3329a99a25f5efdc7f09f3f8c41434ca6d"
|
||||
}
|
||||
}
|
||||
}
|
||||
}`)
|
||||
@@ -1,200 +0,0 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// Convert :
|
||||
func Convert(results types.Results) (result *models.ScanResult, err error) {
|
||||
scanResult := &models.ScanResult{
|
||||
JSONVersion: models.JSONVersion,
|
||||
ScannedCves: models.VulnInfos{},
|
||||
}
|
||||
|
||||
pkgs := models.Packages{}
|
||||
srcPkgs := models.SrcPackages{}
|
||||
vulnInfos := models.VulnInfos{}
|
||||
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
|
||||
for _, trivyResult := range results {
|
||||
for _, vuln := range trivyResult.Vulnerabilities {
|
||||
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
|
||||
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
|
||||
CveID: vuln.VulnerabilityID,
|
||||
Confidences: models.Confidences{
|
||||
{
|
||||
Score: 100,
|
||||
DetectionMethod: models.TrivyMatchStr,
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
// VulnType : "",
|
||||
}
|
||||
}
|
||||
vulnInfo := vulnInfos[vuln.VulnerabilityID]
|
||||
var notFixedYet bool
|
||||
fixState := ""
|
||||
if len(vuln.FixedVersion) == 0 {
|
||||
notFixedYet = true
|
||||
fixState = "Affected"
|
||||
}
|
||||
var references models.References
|
||||
for _, reference := range vuln.References {
|
||||
references = append(references, models.Reference{
|
||||
Source: "trivy",
|
||||
Link: reference,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(references, func(i, j int) bool {
|
||||
return references[i].Link < references[j].Link
|
||||
})
|
||||
|
||||
var published time.Time
|
||||
if vuln.PublishedDate != nil {
|
||||
published = *vuln.PublishedDate
|
||||
}
|
||||
|
||||
var lastModified time.Time
|
||||
if vuln.LastModifiedDate != nil {
|
||||
lastModified = *vuln.LastModifiedDate
|
||||
}
|
||||
|
||||
vulnInfo.CveContents = models.CveContents{
|
||||
models.Trivy: []models.CveContent{{
|
||||
Cvss3Severity: vuln.Severity,
|
||||
References: references,
|
||||
Title: vuln.Title,
|
||||
Summary: vuln.Description,
|
||||
Published: published,
|
||||
LastModified: lastModified,
|
||||
}},
|
||||
}
|
||||
// do only if image type is Vuln
|
||||
if isTrivySupportedOS(trivyResult.Type) {
|
||||
pkgs[vuln.PkgName] = models.Package{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
}
|
||||
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
|
||||
Name: vuln.PkgName,
|
||||
NotFixedYet: notFixedYet,
|
||||
FixState: fixState,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
} else {
|
||||
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
|
||||
Key: trivyResult.Type,
|
||||
Name: vuln.PkgName,
|
||||
Path: trivyResult.Target,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Type = trivyResult.Type
|
||||
libScanner.Libs = append(libScanner.Libs, models.Library{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
FilePath: vuln.PkgPath,
|
||||
})
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
vulnInfos[vuln.VulnerabilityID] = vulnInfo
|
||||
}
|
||||
|
||||
// --list-all-pkgs flg of trivy will output all installed packages, so collect them.
|
||||
if trivyResult.Class == types.ClassOSPkg {
|
||||
for _, p := range trivyResult.Packages {
|
||||
pkgs[p.Name] = models.Package{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
}
|
||||
if p.Name != p.SrcName {
|
||||
if v, ok := srcPkgs[p.SrcName]; !ok {
|
||||
srcPkgs[p.SrcName] = models.SrcPackage{
|
||||
Name: p.SrcName,
|
||||
Version: p.SrcVersion,
|
||||
BinaryNames: []string{p.Name},
|
||||
}
|
||||
} else {
|
||||
v.AddBinaryName(p.Name)
|
||||
srcPkgs[p.SrcName] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if trivyResult.Class == types.ClassLangPkg {
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Type = trivyResult.Type
|
||||
for _, p := range trivyResult.Packages {
|
||||
libScanner.Libs = append(libScanner.Libs, models.Library{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
FilePath: p.FilePath,
|
||||
})
|
||||
}
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
}
|
||||
|
||||
// flatten and unique libraries
|
||||
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
|
||||
for path, v := range uniqueLibraryScannerPaths {
|
||||
uniqueLibrary := map[string]models.Library{}
|
||||
for _, lib := range v.Libs {
|
||||
uniqueLibrary[lib.Name+lib.Version] = lib
|
||||
}
|
||||
|
||||
var libraries []models.Library
|
||||
for _, library := range uniqueLibrary {
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
sort.Slice(libraries, func(i, j int) bool {
|
||||
return libraries[i].Name < libraries[j].Name
|
||||
})
|
||||
|
||||
libscanner := models.LibraryScanner{
|
||||
Type: v.Type,
|
||||
LockfilePath: path,
|
||||
Libs: libraries,
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscanner)
|
||||
}
|
||||
sort.Slice(libraryScanners, func(i, j int) bool {
|
||||
return libraryScanners[i].LockfilePath < libraryScanners[j].LockfilePath
|
||||
})
|
||||
scanResult.ScannedCves = vulnInfos
|
||||
scanResult.Packages = pkgs
|
||||
scanResult.SrcPackages = srcPkgs
|
||||
scanResult.LibraryScanners = libraryScanners
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
func isTrivySupportedOS(family string) bool {
|
||||
supportedFamilies := map[string]struct{}{
|
||||
os.RedHat: {},
|
||||
os.Debian: {},
|
||||
os.Ubuntu: {},
|
||||
os.CentOS: {},
|
||||
os.Rocky: {},
|
||||
os.Alma: {},
|
||||
os.Fedora: {},
|
||||
os.Amazon: {},
|
||||
os.Oracle: {},
|
||||
os.Windows: {},
|
||||
os.OpenSUSE: {},
|
||||
os.OpenSUSELeap: {},
|
||||
os.OpenSUSETumbleweed: {},
|
||||
os.SLES: {},
|
||||
os.Photon: {},
|
||||
os.Alpine: {},
|
||||
}
|
||||
_, ok := supportedFamilies[family]
|
||||
return ok
|
||||
}
|
||||
4052
cti/cti.go
4052
cti/cti.go
File diff suppressed because it is too large
Load Diff
129
cwe/cwe.go
129
cwe/cwe.go
@@ -1,129 +0,0 @@
|
||||
package cwe
|
||||
|
||||
// CweTopTwentyfives has CWE-ID in CWE Top 25
|
||||
var CweTopTwentyfives = map[string]map[string]string{
|
||||
"2019": cweTopTwentyfive2019,
|
||||
"2020": cweTopTwentyfive2020,
|
||||
"2021": cweTopTwentyfive2021,
|
||||
"2022": cweTopTwentyfive2022,
|
||||
}
|
||||
|
||||
var cweTopTwentyfive2019 = map[string]string{
|
||||
"119": "1",
|
||||
"79": "2",
|
||||
"20": "3",
|
||||
"200": "4",
|
||||
"125": "5",
|
||||
"89": "6",
|
||||
"416": "7",
|
||||
"190": "8",
|
||||
"352": "9",
|
||||
"22": "10",
|
||||
"78": "11",
|
||||
"787": "12",
|
||||
"287": "13",
|
||||
"476": "14",
|
||||
"732": "16",
|
||||
"434": "16",
|
||||
"611": "17",
|
||||
"94": "18",
|
||||
"798": "19",
|
||||
"400": "20",
|
||||
"772": "21",
|
||||
"426": "22",
|
||||
"502": "23",
|
||||
"269": "24",
|
||||
"295": "25",
|
||||
}
|
||||
|
||||
var cweTopTwentyfive2020 = map[string]string{
|
||||
"79": "1",
|
||||
"787": "2",
|
||||
"20": "3",
|
||||
"125": "4",
|
||||
"119": "5",
|
||||
"89": "6",
|
||||
"200": "7",
|
||||
"416": "8",
|
||||
"352": "9",
|
||||
"78": "10",
|
||||
"190": "11",
|
||||
"22": "12",
|
||||
"476": "13",
|
||||
"287": "14",
|
||||
"434": "16",
|
||||
"732": "16",
|
||||
"94": "17",
|
||||
"522": "18",
|
||||
"611": "19",
|
||||
"798": "20",
|
||||
"502": "21",
|
||||
"269": "22",
|
||||
"400": "23",
|
||||
"306": "24",
|
||||
"862": "25",
|
||||
}
|
||||
|
||||
var cweTopTwentyfive2021 = map[string]string{
|
||||
"787": "1",
|
||||
"79": "2",
|
||||
"125": "3",
|
||||
"20": "4",
|
||||
"78": "5",
|
||||
"89": "6",
|
||||
"416": "7",
|
||||
"22": "8",
|
||||
"352": "9",
|
||||
"434": "10",
|
||||
"306": "11",
|
||||
"190": "12",
|
||||
"502": "13",
|
||||
"287": "14",
|
||||
"476": "16",
|
||||
"798": "16",
|
||||
"119": "17",
|
||||
"862": "18",
|
||||
"276": "19",
|
||||
"200": "20",
|
||||
"522": "21",
|
||||
"732": "22",
|
||||
"611": "23",
|
||||
"918": "24",
|
||||
"77": "25",
|
||||
}
|
||||
|
||||
var cweTopTwentyfive2022 = map[string]string{
|
||||
"787": "1",
|
||||
"79": "2",
|
||||
"89": "3",
|
||||
"20": "4",
|
||||
"125": "5",
|
||||
"78": "6",
|
||||
"416": "7",
|
||||
"22": "8",
|
||||
"352": "9",
|
||||
"434": "10",
|
||||
"476": "11",
|
||||
"502": "12",
|
||||
"190": "13",
|
||||
"287": "14",
|
||||
"798": "16",
|
||||
"862": "16",
|
||||
"77": "17",
|
||||
"306": "18",
|
||||
"119": "19",
|
||||
"276": "20",
|
||||
"918": "21",
|
||||
"362": "22",
|
||||
"400": "23",
|
||||
"611": "24",
|
||||
"94": "25",
|
||||
}
|
||||
|
||||
// CweTopTwentyfiveURLs has CWE Top25 links
|
||||
var CweTopTwentyfiveURLs = map[string]string{
|
||||
"2019": "https://cwe.mitre.org/top25/archive/2019/2019_cwe_top25.html",
|
||||
"2020": "https://cwe.mitre.org/top25/archive/2020/2020_cwe_top25.html",
|
||||
"2021": "https://cwe.mitre.org/top25/archive/2021/2021_cwe_top25.html",
|
||||
"2022": "https://cwe.mitre.org/top25/archive/2022/2022_cwe_top25.html",
|
||||
}
|
||||
305
cwe/owasp.go
305
cwe/owasp.go
@@ -1,305 +0,0 @@
|
||||
package cwe
|
||||
|
||||
// OwaspTopTens has CWE-ID in OWASP Top 10
|
||||
var OwaspTopTens = map[string]map[string]string{
|
||||
"2017": owaspTopTen2017,
|
||||
"2021": owaspTopTen2021,
|
||||
}
|
||||
|
||||
var owaspTopTen2017 = map[string]string{
|
||||
"77": "1",
|
||||
"89": "1",
|
||||
"564": "1",
|
||||
"917": "1",
|
||||
|
||||
"287": "2",
|
||||
"384": "2",
|
||||
|
||||
"220": "3",
|
||||
"310": "3",
|
||||
"312": "3",
|
||||
"319": "3",
|
||||
"326": "3",
|
||||
"359": "3",
|
||||
|
||||
"611": "4",
|
||||
|
||||
"22": "5",
|
||||
"284": "5",
|
||||
"285": "5",
|
||||
"639": "5",
|
||||
|
||||
"2": "6",
|
||||
"16": "6",
|
||||
"388": "6",
|
||||
|
||||
"79": "7",
|
||||
|
||||
"502": "8",
|
||||
|
||||
"223": "10",
|
||||
"778": "10",
|
||||
}
|
||||
|
||||
var owaspTopTen2021 = map[string]string{
|
||||
"22": "1",
|
||||
"23": "1",
|
||||
"35": "1",
|
||||
"59": "1",
|
||||
"200": "1",
|
||||
"201": "1",
|
||||
"219": "1",
|
||||
"264": "1",
|
||||
"275": "1",
|
||||
"276": "1",
|
||||
"284": "1",
|
||||
"285": "1",
|
||||
"352": "1",
|
||||
"359": "1",
|
||||
"377": "1",
|
||||
"402": "1",
|
||||
"425": "1",
|
||||
"441": "1",
|
||||
"497": "1",
|
||||
"538": "1",
|
||||
"540": "1",
|
||||
"552": "1",
|
||||
"566": "1",
|
||||
"601": "1",
|
||||
"639": "1",
|
||||
"651": "1",
|
||||
"668": "1",
|
||||
"706": "1",
|
||||
"862": "1",
|
||||
"863": "1",
|
||||
"913": "1",
|
||||
"922": "1",
|
||||
"1275": "1",
|
||||
|
||||
"261": "2",
|
||||
"296": "2",
|
||||
"310": "2",
|
||||
"319": "2",
|
||||
"321": "2",
|
||||
"322": "2",
|
||||
"323": "2",
|
||||
"324": "2",
|
||||
"325": "2",
|
||||
"326": "2",
|
||||
"327": "2",
|
||||
"328": "2",
|
||||
"329": "2",
|
||||
"330": "2",
|
||||
"331": "2",
|
||||
"335": "2",
|
||||
"336": "2",
|
||||
"337": "2",
|
||||
"338": "2",
|
||||
"340": "2",
|
||||
"347": "2",
|
||||
"523": "2",
|
||||
"720": "2",
|
||||
"757": "2",
|
||||
"759": "2",
|
||||
"760": "2",
|
||||
"780": "2",
|
||||
"818": "2",
|
||||
"916": "2",
|
||||
|
||||
"20": "3",
|
||||
"74": "3",
|
||||
"75": "3",
|
||||
"77": "3",
|
||||
"78": "3",
|
||||
"79": "3",
|
||||
"80": "3",
|
||||
"83": "3",
|
||||
"87": "3",
|
||||
"88": "3",
|
||||
"89": "3",
|
||||
"90": "3",
|
||||
"91": "3",
|
||||
"93": "3",
|
||||
"94": "3",
|
||||
"95": "3",
|
||||
"96": "3",
|
||||
"97": "3",
|
||||
"98": "3",
|
||||
"99": "3",
|
||||
"100": "3",
|
||||
"113": "3",
|
||||
"116": "3",
|
||||
"138": "3",
|
||||
"184": "3",
|
||||
"470": "3",
|
||||
"471": "3",
|
||||
"564": "3",
|
||||
"610": "3",
|
||||
"643": "3",
|
||||
"644": "3",
|
||||
"652": "3",
|
||||
"917": "3",
|
||||
|
||||
"73": "4",
|
||||
"183": "4",
|
||||
"209": "4",
|
||||
"213": "4",
|
||||
"235": "4",
|
||||
"256": "4",
|
||||
"257": "4",
|
||||
"266": "4",
|
||||
"269": "4",
|
||||
"280": "4",
|
||||
"311": "4",
|
||||
"312": "4",
|
||||
"313": "4",
|
||||
"316": "4",
|
||||
"419": "4",
|
||||
"430": "4",
|
||||
"434": "4",
|
||||
"444": "4",
|
||||
"451": "4",
|
||||
"472": "4",
|
||||
"501": "4",
|
||||
"522": "4",
|
||||
"525": "4",
|
||||
"539": "4",
|
||||
"579": "4",
|
||||
"598": "4",
|
||||
"602": "4",
|
||||
"642": "4",
|
||||
"646": "4",
|
||||
"650": "4",
|
||||
"653": "4",
|
||||
"656": "4",
|
||||
"657": "4",
|
||||
"799": "4",
|
||||
"807": "4",
|
||||
"840": "4",
|
||||
"841": "4",
|
||||
"927": "4",
|
||||
"1021": "4",
|
||||
"1173": "4",
|
||||
|
||||
"2": "5",
|
||||
"11": "5",
|
||||
"13": "5",
|
||||
"15": "5",
|
||||
"16": "5",
|
||||
"260": "5",
|
||||
"315": "5",
|
||||
"520": "5",
|
||||
"526": "5",
|
||||
"537": "5",
|
||||
"541": "5",
|
||||
"547": "5",
|
||||
"611": "5",
|
||||
"614": "5",
|
||||
"756": "5",
|
||||
"776": "5",
|
||||
"942": "5",
|
||||
"1004": "5",
|
||||
"1032": "5",
|
||||
"1174": "5",
|
||||
|
||||
"937": "6",
|
||||
"1035": "6",
|
||||
"1104": "6",
|
||||
|
||||
"255": "7",
|
||||
"259": "7",
|
||||
"287": "7",
|
||||
"288": "7",
|
||||
"290": "7",
|
||||
"294": "7",
|
||||
"295": "7",
|
||||
"297": "7",
|
||||
"300": "7",
|
||||
"302": "7",
|
||||
"304": "7",
|
||||
"306": "7",
|
||||
"307": "7",
|
||||
"346": "7",
|
||||
"384": "7",
|
||||
"521": "7",
|
||||
"613": "7",
|
||||
"620": "7",
|
||||
"640": "7",
|
||||
"798": "7",
|
||||
"940": "7",
|
||||
"1216": "7",
|
||||
|
||||
"345": "8",
|
||||
"353": "8",
|
||||
"426": "8",
|
||||
"494": "8",
|
||||
"502": "8",
|
||||
"565": "8",
|
||||
"784": "8",
|
||||
"829": "8",
|
||||
"830": "8",
|
||||
"915": "8",
|
||||
|
||||
"117": "9",
|
||||
"223": "9",
|
||||
"532": "9",
|
||||
"778": "9",
|
||||
|
||||
"918": "10",
|
||||
}
|
||||
|
||||
// OwaspTopTenURLsEn has GitHub links
|
||||
var OwaspTopTenURLsEn = map[string]map[string]string{
|
||||
"2017": {
|
||||
"1": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa1-injection.md",
|
||||
"2": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa2-broken-authentication.md",
|
||||
"3": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa3-sensitive-data-disclosure.md",
|
||||
"4": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa4-xxe.md",
|
||||
"5": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa5-broken-access-control.md",
|
||||
"6": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa6-security-misconfiguration.md",
|
||||
"7": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa7-xss.md",
|
||||
"8": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa8-insecure-deserialization.md",
|
||||
"9": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa9-known-vulns.md",
|
||||
"10": "https://github.com/OWASP/Top10/blob/master/2017/en/0xaa-logging-detection-response.md",
|
||||
},
|
||||
"2021": {
|
||||
"1": "https://github.com/OWASP/Top10/blob/master/2021/docs/A01_2021-Broken_Access_Control.md",
|
||||
"2": "https://github.com/OWASP/Top10/blob/master/2021/docs/A02_2021-Cryptographic_Failures.md",
|
||||
"3": "https://github.com/OWASP/Top10/blob/master/2021/docs/A03_2021-Injection.md",
|
||||
"4": "https://github.com/OWASP/Top10/blob/master/2021/docs/A04_2021-Insecure_Design.md",
|
||||
"5": "https://github.com/OWASP/Top10/blob/master/2021/docs/A05_2021-Security_Misconfiguration.md",
|
||||
"6": "https://github.com/OWASP/Top10/blob/master/2021/docs/A06_2021-Vulnerable_and_Outdated_Components.md",
|
||||
"7": "https://github.com/OWASP/Top10/blob/master/2021/docs/A07_2021-Identification_and_Authentication_Failures.md",
|
||||
"8": "https://github.com/OWASP/Top10/blob/master/2021/docs/A08_2021-Software_and_Data_Integrity_Failures.md",
|
||||
"9": "https://github.com/OWASP/Top10/blob/master/2021/docs/A09_2021-Security_Logging_and_Monitoring_Failures.md",
|
||||
"10": "https://github.com/OWASP/Top10/blob/master/2021/docs/A10_2021-Server-Side_Request_Forgery_(SSRF).md",
|
||||
},
|
||||
}
|
||||
|
||||
// OwaspTopTenURLsJa has GitHub links
|
||||
var OwaspTopTenURLsJa = map[string]map[string]string{
|
||||
"2017": {
|
||||
"1": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa1-injection.md",
|
||||
"2": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa2-broken-authentication.md",
|
||||
"3": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa3-sensitive-data-disclosure.md",
|
||||
"4": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa4-xxe.md",
|
||||
"5": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa5-broken-access-control.md",
|
||||
"6": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa6-security-misconfiguration.md",
|
||||
"7": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa7-xss.md",
|
||||
"8": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa8-insecure-deserialization.md",
|
||||
"9": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa9-known-vulns.md",
|
||||
"10": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xaa-logging-detection-response.md",
|
||||
},
|
||||
"2021": {
|
||||
"1": "https://github.com/OWASP/Top10/blob/master/2021/docs/A01_2021-Broken_Access_Control.ja.md",
|
||||
"2": "https://github.com/OWASP/Top10/blob/master/2021/docs/A02_2021-Cryptographic_Failures.ja.md",
|
||||
"3": "https://github.com/OWASP/Top10/blob/master/2021/docs/A03_2021-Injection.ja.md",
|
||||
"4": "https://github.com/OWASP/Top10/blob/master/2021/docs/A04_2021-Insecure_Design.ja.md",
|
||||
"5": "https://github.com/OWASP/Top10/blob/master/2021/docs/A05_2021-Security_Misconfiguration.ja.md",
|
||||
"6": "https://github.com/OWASP/Top10/blob/master/2021/docs/A06_2021-Vulnerable_and_Outdated_Components.ja.md",
|
||||
"7": "https://github.com/OWASP/Top10/blob/master/2021/docs/A07_2021-Identification_and_Authentication_Failures.ja.md",
|
||||
"8": "https://github.com/OWASP/Top10/blob/master/2021/docs/A08_2021-Software_and_Data_Integrity_Failures.ja.md",
|
||||
"9": "https://github.com/OWASP/Top10/blob/master/2021/docs/A09_2021-Security_Logging_and_Monitoring_Failures.ja.md",
|
||||
"10": "https://github.com/OWASP/Top10/blob/master/2021/docs/A10_2021-Server-Side_Request_Forgery_(SSRF).ja.md",
|
||||
},
|
||||
}
|
||||
99
cwe/sans.go
99
cwe/sans.go
@@ -1,99 +0,0 @@
|
||||
package cwe
|
||||
|
||||
// SansTopTwentyfives has CWE-ID in CWE/SANS Top 25
|
||||
var SansTopTwentyfives = map[string]map[string]string{
|
||||
"2010": sansTopTwentyfive2010,
|
||||
"2011": sansTopTwentyfive2011,
|
||||
"latest": sansTopTwentyfiveLatest,
|
||||
}
|
||||
|
||||
var sansTopTwentyfive2010 = map[string]string{
|
||||
"79": "1",
|
||||
"89": "2",
|
||||
"120": "3",
|
||||
"352": "4",
|
||||
"285": "5",
|
||||
"807": "6",
|
||||
"22": "7",
|
||||
"434": "8",
|
||||
"78": "9",
|
||||
"311": "10",
|
||||
"798": "11",
|
||||
"805": "12",
|
||||
"98": "13",
|
||||
"129": "14",
|
||||
"754": "15",
|
||||
"209": "16",
|
||||
"190": "17",
|
||||
"131": "18",
|
||||
"306": "19",
|
||||
"494": "20",
|
||||
"732": "21",
|
||||
"770": "22",
|
||||
"601": "23",
|
||||
"327": "24",
|
||||
"362": "25",
|
||||
}
|
||||
|
||||
var sansTopTwentyfive2011 = map[string]string{
|
||||
"89": "1",
|
||||
"78": "2",
|
||||
"120": "3",
|
||||
"79": "4",
|
||||
"306": "5",
|
||||
"862": "6",
|
||||
"798": "7",
|
||||
"311": "8",
|
||||
"434": "9",
|
||||
"807": "10",
|
||||
"250": "11",
|
||||
"352": "12",
|
||||
"22": "13",
|
||||
"494": "14",
|
||||
"863": "15",
|
||||
"829": "16",
|
||||
"732": "17",
|
||||
"676": "18",
|
||||
"327": "19",
|
||||
"131": "20",
|
||||
"307": "21",
|
||||
"601": "22",
|
||||
"134": "23",
|
||||
"190": "24",
|
||||
"759": "25",
|
||||
}
|
||||
|
||||
var sansTopTwentyfiveLatest = map[string]string{
|
||||
"119": "1",
|
||||
"79": "2",
|
||||
"20": "3",
|
||||
"200": "4",
|
||||
"125": "5",
|
||||
"89": "6",
|
||||
"416": "7",
|
||||
"190": "8",
|
||||
"352": "9",
|
||||
"22": "10",
|
||||
"78": "11",
|
||||
"787": "12",
|
||||
"287": "13",
|
||||
"476": "14",
|
||||
"732": "15",
|
||||
"434": "16",
|
||||
"611": "17",
|
||||
"94": "18",
|
||||
"798": "19",
|
||||
"400": "20",
|
||||
"772": "21",
|
||||
"426": "22",
|
||||
"502": "23",
|
||||
"269": "24",
|
||||
"295": "25",
|
||||
}
|
||||
|
||||
// SansTopTwentyfiveURLs has CWE/SANS Top25 links
|
||||
var SansTopTwentyfiveURLs = map[string]string{
|
||||
"2010": "https://cwe.mitre.org/top25/archive/2010/2010_cwe_sans_top25.html",
|
||||
"2011": "https://cwe.mitre.org/top25/archive/2011/2011_cwe_sans_top25.html",
|
||||
"latest": "https://www.sans.org/top25-software-errors/",
|
||||
}
|
||||
222
detector/cti.go
222
detector/cti.go
@@ -1,222 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
ctidb "github.com/vulsio/go-cti/db"
|
||||
ctilog "github.com/vulsio/go-cti/utils"
|
||||
)
|
||||
|
||||
// goCTIDBClient is a DB Driver
|
||||
type goCTIDBClient struct {
|
||||
driver ctidb.DB
|
||||
baseURL string
|
||||
}
|
||||
|
||||
// closeDB close a DB connection
|
||||
func (client goCTIDBClient) closeDB() error {
|
||||
if client.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return client.driver.CloseDB()
|
||||
}
|
||||
|
||||
func newGoCTIDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goCTIDBClient, error) {
|
||||
if err := ctilog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to set go-cti logger. err: %w", err)
|
||||
}
|
||||
|
||||
db, err := newCTIDB(cnf)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to newCTIDB. err: %w", err)
|
||||
}
|
||||
return &goCTIDBClient{driver: db, baseURL: cnf.GetURL()}, nil
|
||||
}
|
||||
|
||||
// FillWithCTI :
|
||||
func FillWithCTI(r *models.ScanResult, cnf config.CtiConf, logOpts logging.LogOpts) error {
|
||||
client, err := newGoCTIDBClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nCti := 0
|
||||
if client.driver == nil {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(client.baseURL, "cves")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
responses, err := getCTIsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, res := range responses {
|
||||
var techniqueIDs []string
|
||||
if err := json.Unmarshal([]byte(res.json), &techniqueIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Ctis = techniqueIDs
|
||||
nCti++
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
}
|
||||
} else {
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
techniqueIDs, err := client.driver.GetTechniqueIDsByCveID(cveID)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to get CTIs by CVE-ID. err: %w", err)
|
||||
}
|
||||
if len(techniqueIDs) == 0 {
|
||||
continue
|
||||
}
|
||||
vuln.Ctis = techniqueIDs
|
||||
nCti++
|
||||
r.ScannedCves[cveID] = vuln
|
||||
}
|
||||
}
|
||||
|
||||
logging.Log.Infof("%s: Cyber Threat Intelligences are detected for %d CVEs", r.FormatServerName(), nCti)
|
||||
return nil
|
||||
}
|
||||
|
||||
type ctiResponse struct {
|
||||
request ctiRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getCTIsViaHTTP(cveIDs []string, urlPrefix string) (responses []ctiResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan ctiRequest, nReq)
|
||||
resChan := make(chan ctiResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- ctiRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetCTI(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching CTI")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch CTI. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type ctiRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetCTI(url string, req ctiRequest, resChan chan<- ctiResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
if err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify); err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- ctiResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func newCTIDB(cnf config.VulnDictInterface) (ctidb.DB, error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err := ctidb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), ctidb.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("Failed to init cti DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to init cti DB. DB Path: %s, err: %w", path, err)
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
@@ -1,224 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/util"
|
||||
cvedb "github.com/vulsio/go-cve-dictionary/db"
|
||||
cvelog "github.com/vulsio/go-cve-dictionary/log"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
type goCveDictClient struct {
|
||||
driver cvedb.DB
|
||||
baseURL string
|
||||
}
|
||||
|
||||
func newGoCveDictClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goCveDictClient, error) {
|
||||
if err := cvelog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to set go-cve-dictionary logger. err: %w", err)
|
||||
}
|
||||
|
||||
driver, err := newCveDB(cnf)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to newCveDB. err: %w", err)
|
||||
}
|
||||
return &goCveDictClient{driver: driver, baseURL: cnf.GetURL()}, nil
|
||||
}
|
||||
|
||||
func (client goCveDictClient) closeDB() error {
|
||||
if client.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return client.driver.CloseDB()
|
||||
}
|
||||
|
||||
type response struct {
|
||||
Key string
|
||||
CveDetail cvemodels.CveDetail
|
||||
}
|
||||
|
||||
func (client goCveDictClient) fetchCveDetails(cveIDs []string) (cveDetails []cvemodels.CveDetail, err error) {
|
||||
if client.driver == nil {
|
||||
reqChan := make(chan string, len(cveIDs))
|
||||
resChan := make(chan response, len(cveIDs))
|
||||
errChan := make(chan error, len(cveIDs))
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- cveID
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for range cveIDs {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case cveID := <-reqChan:
|
||||
url, err := util.URLPathJoin(client.baseURL, "cves", cveID)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(cveID, url, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for range cveIDs {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
cveDetails = append(cveDetails, res.CveDetail)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching CVE")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil,
|
||||
xerrors.Errorf("Failed to fetch CVE. err: %w", errs)
|
||||
}
|
||||
} else {
|
||||
m, err := client.driver.GetMulti(cveIDs)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to GetMulti. err: %w", err)
|
||||
}
|
||||
for _, v := range m {
|
||||
cveDetails = append(cveDetails, v)
|
||||
}
|
||||
}
|
||||
return cveDetails, nil
|
||||
}
|
||||
|
||||
func httpGet(key, url string, resChan chan<- response, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
f := func() (err error) {
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("HTTP GET Error, url: %s, resp: %v, err: %+v",
|
||||
url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error: %w", err)
|
||||
return
|
||||
}
|
||||
cveDetail := cvemodels.CveDetail{}
|
||||
if err := json.Unmarshal([]byte(body), &cveDetail); err != nil {
|
||||
errChan <- xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
|
||||
return
|
||||
}
|
||||
resChan <- response{
|
||||
key,
|
||||
cveDetail,
|
||||
}
|
||||
}
|
||||
|
||||
func (client goCveDictClient) detectCveByCpeURI(cpeURI string, useJVN bool) (cves []cvemodels.CveDetail, err error) {
|
||||
if client.driver == nil {
|
||||
url, err := util.URLPathJoin(client.baseURL, "cpes")
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to join URLPath. err: %w", err)
|
||||
}
|
||||
|
||||
query := map[string]string{"name": cpeURI}
|
||||
logging.Log.Debugf("HTTP Request to %s, query: %#v", url, query)
|
||||
if cves, err = httpPost(url, query); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to post HTTP Request. err: %w", err)
|
||||
}
|
||||
} else {
|
||||
if cves, err = client.driver.GetByCpeURI(cpeURI); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to get CVEs by CPEURI. err: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if useJVN {
|
||||
return cves, nil
|
||||
}
|
||||
|
||||
nvdCves := []cvemodels.CveDetail{}
|
||||
for _, cve := range cves {
|
||||
if !cve.HasNvd() {
|
||||
continue
|
||||
}
|
||||
cve.Jvns = []cvemodels.Jvn{}
|
||||
nvdCves = append(nvdCves, cve)
|
||||
}
|
||||
return nvdCves, nil
|
||||
}
|
||||
|
||||
func httpPost(url string, query map[string]string) ([]cvemodels.CveDetail, error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
f := func() (err error) {
|
||||
req := gorequest.New().Timeout(10 * time.Second).Post(url)
|
||||
for key := range query {
|
||||
req = req.Send(fmt.Sprintf("%s=%s", key, query[key])).Type("json")
|
||||
}
|
||||
resp, body, errs = req.End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("HTTP POST error. url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP POST. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("HTTP Error: %w", err)
|
||||
}
|
||||
|
||||
cveDetails := []cvemodels.CveDetail{}
|
||||
if err := json.Unmarshal([]byte(body), &cveDetails); err != nil {
|
||||
return nil,
|
||||
xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
|
||||
}
|
||||
return cveDetails, nil
|
||||
}
|
||||
|
||||
func newCveDB(cnf config.VulnDictInterface) (cvedb.DB, error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err := cvedb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), cvedb.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("Failed to init CVE DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to init CVE DB. DB Path: %s, err: %w", path, err)
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
@@ -1,621 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/contrib/owasp-dependency-check/parser"
|
||||
"github.com/future-architect/vuls/cwe"
|
||||
"github.com/future-architect/vuls/gost"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/oval"
|
||||
"github.com/future-architect/vuls/reporter"
|
||||
"github.com/future-architect/vuls/util"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
// Cpe :
|
||||
type Cpe struct {
|
||||
CpeURI string
|
||||
UseJVN bool
|
||||
}
|
||||
|
||||
// Detect vulns and fill CVE detailed information
|
||||
func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
|
||||
// Use the same reportedAt for all rs
|
||||
reportedAt := time.Now()
|
||||
for i, r := range rs {
|
||||
if !config.Conf.RefreshCve && !needToRefreshCve(r) {
|
||||
logging.Log.Info("No need to refresh")
|
||||
continue
|
||||
}
|
||||
|
||||
if !reuseScannedCves(&r) {
|
||||
r.ScannedCves = models.VulnInfos{}
|
||||
}
|
||||
|
||||
if err := DetectLibsCves(&r, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Library dependency: %w", err)
|
||||
}
|
||||
|
||||
if err := DetectPkgCves(&r, config.Conf.OvalDict, config.Conf.Gost, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect Pkg CVE: %w", err)
|
||||
}
|
||||
|
||||
cpeURIs, owaspDCXMLPath := []string{}, ""
|
||||
cpes := []Cpe{}
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
cpeURIs = config.Conf.Servers[r.ServerName].CpeNames
|
||||
owaspDCXMLPath = config.Conf.Servers[r.ServerName].OwaspDCXMLPath
|
||||
} else {
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
if con, ok := s.Containers[r.Container.Name]; ok {
|
||||
cpeURIs = con.Cpes
|
||||
owaspDCXMLPath = con.OwaspDCXMLPath
|
||||
}
|
||||
}
|
||||
}
|
||||
if owaspDCXMLPath != "" {
|
||||
cpes, err := parser.Parse(owaspDCXMLPath)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to read OWASP Dependency Check XML on %s, `%s`, err: %w",
|
||||
r.ServerInfo(), owaspDCXMLPath, err)
|
||||
}
|
||||
cpeURIs = append(cpeURIs, cpes...)
|
||||
}
|
||||
for _, uri := range cpeURIs {
|
||||
cpes = append(cpes, Cpe{
|
||||
CpeURI: uri,
|
||||
UseJVN: true,
|
||||
})
|
||||
}
|
||||
if err := DetectCpeURIsCves(&r, cpes, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect CVE of `%s`: %w", cpeURIs, err)
|
||||
}
|
||||
|
||||
repos := config.Conf.Servers[r.ServerName].GitHubRepos
|
||||
if err := DetectGitHubCves(&r, repos); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect GitHub Cves: %w", err)
|
||||
}
|
||||
|
||||
if err := DetectWordPressCves(&r, config.Conf.WpScan); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect WordPress Cves: %w", err)
|
||||
}
|
||||
|
||||
if err := gost.FillCVEsWithRedHat(&r, config.Conf.Gost, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with gost: %w", err)
|
||||
}
|
||||
|
||||
if err := FillCvesWithNvdJvn(&r, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with CVE: %w", err)
|
||||
}
|
||||
|
||||
nExploitCve, err := FillWithExploit(&r, config.Conf.Exploit, config.Conf.LogOpts)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with exploit: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d PoC are detected", r.FormatServerName(), nExploitCve)
|
||||
|
||||
nMetasploitCve, err := FillWithMetasploit(&r, config.Conf.Metasploit, config.Conf.LogOpts)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with metasploit: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d exploits are detected", r.FormatServerName(), nMetasploitCve)
|
||||
|
||||
if err := FillWithKEVuln(&r, config.Conf.KEVuln, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Known Exploited Vulnerabilities: %w", err)
|
||||
}
|
||||
|
||||
if err := FillWithCTI(&r, config.Conf.Cti, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Cyber Threat Intelligences: %w", err)
|
||||
}
|
||||
|
||||
FillCweDict(&r)
|
||||
|
||||
r.ReportedBy, _ = os.Hostname()
|
||||
r.Lang = config.Conf.Lang
|
||||
r.ReportedAt = reportedAt
|
||||
r.ReportedVersion = config.Version
|
||||
r.ReportedRevision = config.Revision
|
||||
r.Config.Report = config.Conf
|
||||
r.Config.Report.Servers = map[string]config.ServerInfo{
|
||||
r.ServerName: config.Conf.Servers[r.ServerName],
|
||||
}
|
||||
rs[i] = r
|
||||
}
|
||||
|
||||
// Overwrite the json file every time to clear the fields specified in config.IgnoredJSONKeys
|
||||
for _, r := range rs {
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
r = r.ClearFields(s.IgnoredJSONKeys)
|
||||
}
|
||||
//TODO don't call here
|
||||
if err := reporter.OverwriteJSONFile(dir, r); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to write JSON: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if config.Conf.DiffPlus || config.Conf.DiffMinus {
|
||||
prevs, err := loadPrevious(rs, config.Conf.ResultsDir)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to load previous results. err: %w", err)
|
||||
}
|
||||
rs = diff(rs, prevs, config.Conf.DiffPlus, config.Conf.DiffMinus)
|
||||
}
|
||||
|
||||
for i, r := range rs {
|
||||
nFiltered := 0
|
||||
logging.Log.Infof("%s: total %d CVEs detected", r.FormatServerName(), len(r.ScannedCves))
|
||||
|
||||
if 0 < config.Conf.CvssScoreOver {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterByCvssOver(config.Conf.CvssScoreOver)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --cvss-over=%g", r.FormatServerName(), nFiltered, config.Conf.CvssScoreOver)
|
||||
}
|
||||
|
||||
if config.Conf.IgnoreUnfixed {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterUnfixed(config.Conf.IgnoreUnfixed)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unfixed", r.FormatServerName(), nFiltered)
|
||||
}
|
||||
|
||||
if 0 < config.Conf.ConfidenceScoreOver {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterByConfidenceOver(config.Conf.ConfidenceScoreOver)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --confidence-over=%d", r.FormatServerName(), nFiltered, config.Conf.ConfidenceScoreOver)
|
||||
}
|
||||
|
||||
// IgnoreCves
|
||||
ignoreCves := []string{}
|
||||
if r.Container.Name == "" {
|
||||
ignoreCves = config.Conf.Servers[r.ServerName].IgnoreCves
|
||||
} else if con, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
|
||||
ignoreCves = con.IgnoreCves
|
||||
}
|
||||
if 0 < len(ignoreCves) {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnoreCves(ignoreCves)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by ignoreCves=%s", r.FormatServerName(), nFiltered, ignoreCves)
|
||||
}
|
||||
|
||||
// ignorePkgs
|
||||
ignorePkgsRegexps := []string{}
|
||||
if r.Container.Name == "" {
|
||||
ignorePkgsRegexps = config.Conf.Servers[r.ServerName].IgnorePkgsRegexp
|
||||
} else if s, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
|
||||
ignorePkgsRegexps = s.IgnorePkgsRegexp
|
||||
}
|
||||
if 0 < len(ignorePkgsRegexps) {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnorePkgs(ignorePkgsRegexps)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by ignorePkgsRegexp=%s", r.FormatServerName(), nFiltered, ignorePkgsRegexps)
|
||||
}
|
||||
|
||||
// IgnoreUnscored
|
||||
if config.Conf.IgnoreUnscoredCves {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FindScoredVulns()
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unscored-cves", r.FormatServerName(), nFiltered)
|
||||
}
|
||||
|
||||
r.FilterInactiveWordPressLibs(config.Conf.WpScan.DetectInactive)
|
||||
rs[i] = r
|
||||
}
|
||||
return rs, nil
|
||||
}
|
||||
|
||||
// DetectPkgCves detects OS pkg cves
|
||||
// pass 2 configs
|
||||
func DetectPkgCves(r *models.ScanResult, ovalCnf config.GovalDictConf, gostCnf config.GostConf, logOpts logging.LogOpts) error {
|
||||
// Pkg Scan
|
||||
if isPkgCvesDetactable(r) {
|
||||
// OVAL, gost(Debian Security Tracker) does not support Package for Raspbian, so skip it.
|
||||
if r.Family == constant.Raspbian {
|
||||
r = r.RemoveRaspbianPackFromResult()
|
||||
}
|
||||
|
||||
// OVAL
|
||||
if err := detectPkgsCvesWithOval(ovalCnf, r, logOpts); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with OVAL: %w", err)
|
||||
}
|
||||
|
||||
// gost
|
||||
if err := detectPkgsCvesWithGost(gostCnf, r, logOpts); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with gost: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
for i, v := range r.ScannedCves {
|
||||
for j, p := range v.AffectedPackages {
|
||||
if p.NotFixedYet && p.FixState == "" {
|
||||
p.FixState = "Not fixed yet"
|
||||
r.ScannedCves[i].AffectedPackages[j] = p
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// To keep backward compatibility
|
||||
// Newer versions use ListenPortStats,
|
||||
// but older versions of Vuls are set to ListenPorts.
|
||||
// Set ListenPorts to ListenPortStats to allow newer Vuls to report old results.
|
||||
for i, pkg := range r.Packages {
|
||||
for j, proc := range pkg.AffectedProcs {
|
||||
for _, ipPort := range proc.ListenPorts {
|
||||
ps, err := models.NewPortStat(ipPort)
|
||||
if err != nil {
|
||||
logging.Log.Warnf("Failed to parse ip:port: %s, err:%+v", ipPort, err)
|
||||
continue
|
||||
}
|
||||
r.Packages[i].AffectedProcs[j].ListenPortStats = append(
|
||||
r.Packages[i].AffectedProcs[j].ListenPortStats, *ps)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// isPkgCvesDetactable checks whether CVEs is detactable with gost and oval from the result
|
||||
func isPkgCvesDetactable(r *models.ScanResult) bool {
|
||||
switch r.Family {
|
||||
case constant.FreeBSD, constant.ServerTypePseudo:
|
||||
logging.Log.Infof("%s type. Skip OVAL and gost detection", r.Family)
|
||||
return false
|
||||
case constant.Windows:
|
||||
return true
|
||||
default:
|
||||
if r.ScannedVia == "trivy" {
|
||||
logging.Log.Infof("r.ScannedVia is trivy. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
|
||||
if r.Release == "" {
|
||||
logging.Log.Infof("r.Release is empty. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
|
||||
if len(r.Packages)+len(r.SrcPackages) == 0 {
|
||||
logging.Log.Infof("Number of packages is 0. Skip OVAL and gost detection")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// DetectGitHubCves fetches CVEs from GitHub Security Alerts
|
||||
func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHubConf) error {
|
||||
if len(githubConfs) == 0 {
|
||||
return nil
|
||||
}
|
||||
for ownerRepo, setting := range githubConfs {
|
||||
ss := strings.Split(ownerRepo, "/")
|
||||
if len(ss) != 2 {
|
||||
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s", ownerRepo)
|
||||
}
|
||||
owner, repo := ss[0], ss[1]
|
||||
n, err := DetectGitHubSecurityAlerts(r, owner, repo, setting.Token, setting.IgnoreGitHubDismissed)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to access GitHub Security Alerts: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d CVEs detected with GHSA %s/%s",
|
||||
r.FormatServerName(), n, owner, repo)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectWordPressCves detects CVEs of WordPress
|
||||
func DetectWordPressCves(r *models.ScanResult, wpCnf config.WpScanConf) error {
|
||||
if len(r.WordPressPackages) == 0 {
|
||||
return nil
|
||||
}
|
||||
logging.Log.Infof("%s: Detect WordPress CVE. Number of pkgs: %d ", r.ServerInfo(), len(r.WordPressPackages))
|
||||
n, err := detectWordPressCves(r, wpCnf)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to detect WordPress CVE: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: found %d WordPress CVEs", r.FormatServerName(), n)
|
||||
return nil
|
||||
}
|
||||
|
||||
// FillCvesWithNvdJvn fills CVE detail with NVD, JVN
|
||||
func FillCvesWithNvdJvn(r *models.ScanResult, cnf config.GoCveDictConf, logOpts logging.LogOpts) (err error) {
|
||||
cveIDs := []string{}
|
||||
for _, v := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, v.CveID)
|
||||
}
|
||||
|
||||
client, err := newGoCveDictClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to newGoCveDictClient. err: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
ds, err := client.fetchCveDetails(cveIDs)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to fetchCveDetails. err: %w", err)
|
||||
}
|
||||
|
||||
for _, d := range ds {
|
||||
nvds, exploits, mitigations := models.ConvertNvdToModel(d.CveID, d.Nvds)
|
||||
jvns := models.ConvertJvnToModel(d.CveID, d.Jvns)
|
||||
|
||||
alerts := fillCertAlerts(&d)
|
||||
for cveID, vinfo := range r.ScannedCves {
|
||||
if vinfo.CveID == d.CveID {
|
||||
if vinfo.CveContents == nil {
|
||||
vinfo.CveContents = models.CveContents{}
|
||||
}
|
||||
for _, con := range nvds {
|
||||
if !con.Empty() {
|
||||
vinfo.CveContents[con.Type] = []models.CveContent{con}
|
||||
}
|
||||
}
|
||||
for _, con := range jvns {
|
||||
if !con.Empty() {
|
||||
found := false
|
||||
for _, cveCont := range vinfo.CveContents[con.Type] {
|
||||
if con.SourceLink == cveCont.SourceLink {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
vinfo.CveContents[con.Type] = append(vinfo.CveContents[con.Type], con)
|
||||
}
|
||||
}
|
||||
}
|
||||
vinfo.AlertDict = alerts
|
||||
vinfo.Exploits = append(vinfo.Exploits, exploits...)
|
||||
vinfo.Mitigations = append(vinfo.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveID] = vinfo
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func fillCertAlerts(cvedetail *cvemodels.CveDetail) (dict models.AlertDict) {
|
||||
for _, nvd := range cvedetail.Nvds {
|
||||
for _, cert := range nvd.Certs {
|
||||
dict.USCERT = append(dict.USCERT, models.Alert{
|
||||
URL: cert.Link,
|
||||
Title: cert.Title,
|
||||
Team: "uscert",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, jvn := range cvedetail.Jvns {
|
||||
for _, cert := range jvn.Certs {
|
||||
dict.JPCERT = append(dict.JPCERT, models.Alert{
|
||||
URL: cert.Link,
|
||||
Title: cert.Title,
|
||||
Team: "jpcert",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return dict
|
||||
}
|
||||
|
||||
// detectPkgsCvesWithOval fetches OVAL database
|
||||
func detectPkgsCvesWithOval(cnf config.GovalDictConf, r *models.ScanResult, logOpts logging.LogOpts) error {
|
||||
client, err := oval.NewOVALClient(r.Family, cnf, logOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := client.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close the OVAL DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
logging.Log.Debugf("Check if oval fetched: %s %s", r.Family, r.Release)
|
||||
ok, err := client.CheckIfOvalFetched(r.Family, r.Release)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !ok {
|
||||
switch r.Family {
|
||||
case constant.Debian:
|
||||
logging.Log.Infof("Skip OVAL and Scan with gost alone.")
|
||||
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), 0)
|
||||
return nil
|
||||
case constant.Windows, constant.FreeBSD, constant.ServerTypePseudo:
|
||||
return nil
|
||||
default:
|
||||
return xerrors.Errorf("OVAL entries of %s %s are not found. Fetch OVAL before reporting. For details, see `https://github.com/vulsio/goval-dictionary#usage`", r.Family, r.Release)
|
||||
}
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Check if oval fresh: %s %s", r.Family, r.Release)
|
||||
_, err = client.CheckIfOvalFresh(r.Family, r.Release)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Fill with oval: %s %s", r.Family, r.Release)
|
||||
nCVEs, err := client.FillWithOval(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), nCVEs)
|
||||
return nil
|
||||
}
|
||||
|
||||
func detectPkgsCvesWithGost(cnf config.GostConf, r *models.ScanResult, logOpts logging.LogOpts) error {
|
||||
client, err := gost.NewGostClient(cnf, r.Family, logOpts)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to new a gost client: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := client.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close the gost DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nCVEs, err := client.DetectCVEs(r, true)
|
||||
if err != nil {
|
||||
if r.Family == constant.Debian {
|
||||
return xerrors.Errorf("Failed to detect CVEs with gost: %w", err)
|
||||
}
|
||||
return xerrors.Errorf("Failed to detect unfixed CVEs with gost: %w", err)
|
||||
}
|
||||
|
||||
if r.Family == constant.Debian {
|
||||
logging.Log.Infof("%s: %d CVEs are detected with gost",
|
||||
r.FormatServerName(), nCVEs)
|
||||
} else {
|
||||
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost",
|
||||
r.FormatServerName(), nCVEs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectCpeURIsCves detects CVEs of given CPE-URIs
|
||||
func DetectCpeURIsCves(r *models.ScanResult, cpes []Cpe, cnf config.GoCveDictConf, logOpts logging.LogOpts) error {
|
||||
client, err := newGoCveDictClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to newGoCveDictClient. err: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nCVEs := 0
|
||||
for _, cpe := range cpes {
|
||||
details, err := client.detectCveByCpeURI(cpe.CpeURI, cpe.UseJVN)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to detectCveByCpeURI. err: %w", err)
|
||||
}
|
||||
|
||||
for _, detail := range details {
|
||||
advisories := []models.DistroAdvisory{}
|
||||
if !detail.HasNvd() && detail.HasJvn() {
|
||||
for _, jvn := range detail.Jvns {
|
||||
advisories = append(advisories, models.DistroAdvisory{
|
||||
AdvisoryID: jvn.JvnID,
|
||||
})
|
||||
}
|
||||
}
|
||||
maxConfidence := getMaxConfidence(detail)
|
||||
|
||||
if val, ok := r.ScannedCves[detail.CveID]; ok {
|
||||
val.CpeURIs = util.AppendIfMissing(val.CpeURIs, cpe.CpeURI)
|
||||
val.Confidences.AppendIfMissing(maxConfidence)
|
||||
val.DistroAdvisories = advisories
|
||||
r.ScannedCves[detail.CveID] = val
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
CveID: detail.CveID,
|
||||
CpeURIs: []string{cpe.CpeURI},
|
||||
Confidences: models.Confidences{maxConfidence},
|
||||
DistroAdvisories: advisories,
|
||||
}
|
||||
r.ScannedCves[detail.CveID] = v
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
logging.Log.Infof("%s: %d CVEs are detected with CPE", r.FormatServerName(), nCVEs)
|
||||
return nil
|
||||
}
|
||||
|
||||
func getMaxConfidence(detail cvemodels.CveDetail) (max models.Confidence) {
|
||||
if !detail.HasNvd() && detail.HasJvn() {
|
||||
return models.JvnVendorProductMatch
|
||||
} else if detail.HasNvd() {
|
||||
for _, nvd := range detail.Nvds {
|
||||
confidence := models.Confidence{}
|
||||
switch nvd.DetectionMethod {
|
||||
case cvemodels.NvdExactVersionMatch:
|
||||
confidence = models.NvdExactVersionMatch
|
||||
case cvemodels.NvdRoughVersionMatch:
|
||||
confidence = models.NvdRoughVersionMatch
|
||||
case cvemodels.NvdVendorProductMatch:
|
||||
confidence = models.NvdVendorProductMatch
|
||||
}
|
||||
if max.Score < confidence.Score {
|
||||
max = confidence
|
||||
}
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
// FillCweDict fills CWE
|
||||
func FillCweDict(r *models.ScanResult) {
|
||||
uniqCweIDMap := map[string]bool{}
|
||||
for _, vinfo := range r.ScannedCves {
|
||||
for _, conts := range vinfo.CveContents {
|
||||
for _, cont := range conts {
|
||||
for _, id := range cont.CweIDs {
|
||||
if strings.HasPrefix(id, "CWE-") {
|
||||
id = strings.TrimPrefix(id, "CWE-")
|
||||
uniqCweIDMap[id] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dict := map[string]models.CweDictEntry{}
|
||||
for id := range uniqCweIDMap {
|
||||
entry := models.CweDictEntry{
|
||||
OwaspTopTens: map[string]string{},
|
||||
CweTopTwentyfives: map[string]string{},
|
||||
SansTopTwentyfives: map[string]string{},
|
||||
}
|
||||
if e, ok := cwe.CweDictEn[id]; ok {
|
||||
fillCweRank(&entry, id)
|
||||
entry.En = &e
|
||||
} else {
|
||||
logging.Log.Debugf("CWE-ID %s is not found in English CWE Dict", id)
|
||||
entry.En = &cwe.Cwe{CweID: id}
|
||||
}
|
||||
|
||||
if r.Lang == "ja" {
|
||||
if e, ok := cwe.CweDictJa[id]; ok {
|
||||
fillCweRank(&entry, id)
|
||||
entry.Ja = &e
|
||||
} else {
|
||||
logging.Log.Debugf("CWE-ID %s is not found in Japanese CWE Dict", id)
|
||||
entry.Ja = &cwe.Cwe{CweID: id}
|
||||
}
|
||||
}
|
||||
|
||||
dict[id] = entry
|
||||
}
|
||||
r.CweDict = dict
|
||||
return
|
||||
}
|
||||
|
||||
func fillCweRank(entry *models.CweDictEntry, id string) {
|
||||
for year, ranks := range cwe.OwaspTopTens {
|
||||
if rank, ok := ranks[id]; ok {
|
||||
entry.OwaspTopTens[year] = rank
|
||||
}
|
||||
}
|
||||
for year, ranks := range cwe.CweTopTwentyfives {
|
||||
if rank, ok := ranks[id]; ok {
|
||||
entry.CweTopTwentyfives[year] = rank
|
||||
}
|
||||
}
|
||||
for year, ranks := range cwe.SansTopTwentyfives {
|
||||
if rank, ok := ranks[id]; ok {
|
||||
entry.SansTopTwentyfives[year] = rank
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
func Test_getMaxConfidence(t *testing.T) {
|
||||
type args struct {
|
||||
detail cvemodels.CveDetail
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantMax models.Confidence
|
||||
}{
|
||||
{
|
||||
name: "JvnVendorProductMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{},
|
||||
Jvns: []cvemodels.Jvn{{}},
|
||||
},
|
||||
},
|
||||
wantMax: models.JvnVendorProductMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdExactVersionMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
{DetectionMethod: cvemodels.NvdExactVersionMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdExactVersionMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdRoughVersionMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdRoughVersionMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdVendorProductMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdVendorProductMatch,
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{},
|
||||
Jvns: []cvemodels.Jvn{},
|
||||
},
|
||||
},
|
||||
wantMax: models.Confidence{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if gotMax := getMaxConfidence(tt.args.detail); !reflect.DeepEqual(gotMax, tt.wantMax) {
|
||||
t.Errorf("getMaxConfidence() = %v, want %v", gotMax, tt.wantMax)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,250 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
exploitdb "github.com/vulsio/go-exploitdb/db"
|
||||
exploitmodels "github.com/vulsio/go-exploitdb/models"
|
||||
exploitlog "github.com/vulsio/go-exploitdb/util"
|
||||
)
|
||||
|
||||
// goExploitDBClient is a DB Driver
|
||||
type goExploitDBClient struct {
|
||||
driver exploitdb.DB
|
||||
baseURL string
|
||||
}
|
||||
|
||||
// closeDB close a DB connection
|
||||
func (client goExploitDBClient) closeDB() error {
|
||||
if client.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return client.driver.CloseDB()
|
||||
}
|
||||
|
||||
func newGoExploitDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goExploitDBClient, error) {
|
||||
if err := exploitlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to set go-exploitdb logger. err: %w", err)
|
||||
}
|
||||
|
||||
db, err := newExploitDB(cnf)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to newExploitDB. err: %w", err)
|
||||
}
|
||||
return &goExploitDBClient{driver: db, baseURL: cnf.GetURL()}, nil
|
||||
}
|
||||
|
||||
// FillWithExploit fills exploit information that has in Exploit
|
||||
func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf, logOpts logging.LogOpts) (nExploitCve int, err error) {
|
||||
client, err := newGoExploitDBClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to newGoExploitDBClient. err: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if client.driver == nil {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(client.baseURL, "cves")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
|
||||
}
|
||||
responses, err := getExploitsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Exploits via HTTP. err: %w", err)
|
||||
}
|
||||
for _, res := range responses {
|
||||
exps := []exploitmodels.Exploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
|
||||
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
|
||||
}
|
||||
exploits := ConvertToModelsExploit(exps)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Exploits = exploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nExploitCve++
|
||||
}
|
||||
} else {
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
es, err := client.driver.GetExploitByCveID(cveID)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Exploits by CVE-ID. err: %w", err)
|
||||
}
|
||||
if len(es) == 0 {
|
||||
continue
|
||||
}
|
||||
exploits := ConvertToModelsExploit(es)
|
||||
vuln.Exploits = exploits
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return nExploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModelsExploit converts exploit model to vuls model
|
||||
func ConvertToModelsExploit(es []exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
for _, e := range es {
|
||||
var documentURL, shellURL *string
|
||||
if e.OffensiveSecurity != nil {
|
||||
os := e.OffensiveSecurity
|
||||
if os.Document != nil {
|
||||
documentURL = &os.Document.DocumentURL
|
||||
}
|
||||
if os.ShellCode != nil {
|
||||
shellURL = &os.ShellCode.ShellCodeURL
|
||||
}
|
||||
}
|
||||
exploit := models.Exploit{
|
||||
ExploitType: e.ExploitType,
|
||||
ID: e.ExploitUniqueID,
|
||||
URL: e.URL,
|
||||
Description: e.Description,
|
||||
DocumentURL: documentURL,
|
||||
ShellCodeURL: shellURL,
|
||||
}
|
||||
exploits = append(exploits, exploit)
|
||||
}
|
||||
return exploits
|
||||
}
|
||||
|
||||
type exploitResponse struct {
|
||||
request exploitRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getExploitsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []exploitResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan exploitRequest, nReq)
|
||||
resChan := make(chan exploitResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- exploitRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetExploit(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching Exploit")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch Exploit. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type exploitRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetExploit(url string, req exploitRequest, resChan chan<- exploitResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- exploitResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func newExploitDB(cnf config.VulnDictInterface) (driver exploitdb.DB, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err := exploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), exploitdb.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("Failed to init exploit DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to init exploit DB. DB Path: %s, err: %w", path, err)
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/errof"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
// DetectGitHubSecurityAlerts access to owner/repo on GitHub and fetch security alerts of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
|
||||
// https://help.github.com/articles/about-security-alerts-for-vulnerable-dependencies/
|
||||
func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string, ignoreDismissed bool) (nCVEs int, err error) {
|
||||
src := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: token},
|
||||
)
|
||||
//TODO Proxy
|
||||
httpClient := oauth2.NewClient(context.Background(), src)
|
||||
|
||||
// TODO Use `https://github.com/shurcooL/githubv4` if the tool supports vulnerabilityAlerts Endpoint
|
||||
// Memo : https://developer.github.com/v4/explorer/
|
||||
const jsonfmt = `{"query":
|
||||
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, states:[OPEN], %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
|
||||
after := ""
|
||||
|
||||
for {
|
||||
jsonStr := fmt.Sprintf(jsonfmt, owner, repo, 100, after)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
"https://api.github.com/graphql",
|
||||
bytes.NewBuffer([]byte(jsonStr)),
|
||||
)
|
||||
defer cancel()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// https://developer.github.com/v4/previews/#repository-vulnerability-alerts
|
||||
// To toggle this preview and access data, need to provide a custom media type in the Accept header:
|
||||
// MEMO: I tried to get the affected version via GitHub API. Bit it seems difficult to determin the affected version if there are multiple dependency files such as package.json.
|
||||
// TODO remove this header if it is no longer preview status in the future.
|
||||
req.Header.Set("Accept", "application/vnd.github.package-deletes-preview+json")
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := httpClient.Do(req)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
alerts := SecurityAlerts{}
|
||||
if err := json.Unmarshal(body, &alerts); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// util.Log.Debugf("%s", pp.Sprint(alerts))
|
||||
// util.Log.Debugf("%s", string(body))
|
||||
if alerts.Data.Repository.URL == "" {
|
||||
return 0, errof.New(errof.ErrFailedToAccessGithubAPI,
|
||||
fmt.Sprintf("Failed to access to GitHub API. Response: %s", string(body)))
|
||||
}
|
||||
|
||||
for _, v := range alerts.Data.Repository.VulnerabilityAlerts.Edges {
|
||||
if ignoreDismissed && v.Node.DismissReason != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
pkgName := fmt.Sprintf("%s %s",
|
||||
alerts.Data.Repository.URL, v.Node.SecurityVulnerability.Package.Name)
|
||||
|
||||
m := models.GitHubSecurityAlert{
|
||||
PackageName: pkgName,
|
||||
FixedIn: v.Node.SecurityVulnerability.FirstPatchedVersion.Identifier,
|
||||
AffectedRange: v.Node.SecurityVulnerability.VulnerableVersionRange,
|
||||
Dismissed: len(v.Node.DismissReason) != 0,
|
||||
DismissedAt: v.Node.DismissedAt,
|
||||
DismissReason: v.Node.DismissReason,
|
||||
}
|
||||
|
||||
cveIDs, other := []string{}, []string{}
|
||||
for _, identifier := range v.Node.SecurityAdvisory.Identifiers {
|
||||
if identifier.Type == "CVE" {
|
||||
cveIDs = append(cveIDs, identifier.Value)
|
||||
} else {
|
||||
other = append(other, identifier.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// If CVE-ID has not been assigned, use the GHSA ID etc as a ID.
|
||||
if len(cveIDs) == 0 {
|
||||
cveIDs = other
|
||||
}
|
||||
|
||||
refs := []models.Reference{}
|
||||
for _, r := range v.Node.SecurityAdvisory.References {
|
||||
refs = append(refs, models.Reference{Link: r.URL})
|
||||
}
|
||||
|
||||
for _, cveID := range cveIDs {
|
||||
cveContent := models.CveContent{
|
||||
Type: models.GitHub,
|
||||
CveID: cveID,
|
||||
Title: v.Node.SecurityAdvisory.Summary,
|
||||
Summary: v.Node.SecurityAdvisory.Description,
|
||||
Cvss2Severity: v.Node.SecurityVulnerability.Severity,
|
||||
Cvss3Severity: v.Node.SecurityVulnerability.Severity,
|
||||
SourceLink: v.Node.SecurityAdvisory.Permalink,
|
||||
References: refs,
|
||||
Published: v.Node.SecurityAdvisory.PublishedAt,
|
||||
LastModified: v.Node.SecurityAdvisory.UpdatedAt,
|
||||
}
|
||||
|
||||
if val, ok := r.ScannedCves[cveID]; ok {
|
||||
val.GitHubSecurityAlerts = val.GitHubSecurityAlerts.Add(m)
|
||||
val.CveContents[models.GitHub] = []models.CveContent{cveContent}
|
||||
r.ScannedCves[cveID] = val
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
CveID: cveID,
|
||||
Confidences: models.Confidences{models.GitHubMatch},
|
||||
GitHubSecurityAlerts: models.GitHubSecurityAlerts{m},
|
||||
CveContents: models.NewCveContents(cveContent),
|
||||
}
|
||||
r.ScannedCves[cveID] = v
|
||||
}
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
if !alerts.Data.Repository.VulnerabilityAlerts.PageInfo.HasNextPage {
|
||||
break
|
||||
}
|
||||
after = fmt.Sprintf(`after: \"%s\"`, alerts.Data.Repository.VulnerabilityAlerts.PageInfo.EndCursor)
|
||||
}
|
||||
return nCVEs, err
|
||||
}
|
||||
|
||||
//SecurityAlerts has detected CVE-IDs, PackageNames, Refs
|
||||
type SecurityAlerts struct {
|
||||
Data struct {
|
||||
Repository struct {
|
||||
URL string `json:"url"`
|
||||
VulnerabilityAlerts struct {
|
||||
PageInfo struct {
|
||||
EndCursor string `json:"endCursor"`
|
||||
HasNextPage bool `json:"hasNextPage"`
|
||||
StartCursor string `json:"startCursor"`
|
||||
} `json:"pageInfo"`
|
||||
Edges []struct {
|
||||
Node struct {
|
||||
ID string `json:"id"`
|
||||
DismissReason string `json:"dismissReason"`
|
||||
DismissedAt time.Time `json:"dismissedAt"`
|
||||
SecurityVulnerability struct {
|
||||
Package struct {
|
||||
Name string `json:"name"`
|
||||
Ecosystem string `json:"ecosystem"`
|
||||
} `json:"package"`
|
||||
Severity string `json:"severity"`
|
||||
VulnerableVersionRange string `json:"vulnerableVersionRange"`
|
||||
FirstPatchedVersion struct {
|
||||
Identifier string `json:"identifier"`
|
||||
} `json:"firstPatchedVersion"`
|
||||
} `json:"securityVulnerability"`
|
||||
SecurityAdvisory struct {
|
||||
Description string `json:"description"`
|
||||
GhsaID string `json:"ghsaId"`
|
||||
Permalink string `json:"permalink"`
|
||||
PublishedAt time.Time `json:"publishedAt"`
|
||||
Summary string `json:"summary"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
WithdrawnAt time.Time `json:"withdrawnAt"`
|
||||
Origin string `json:"origin"`
|
||||
Severity string `json:"severity"`
|
||||
References []struct {
|
||||
URL string `json:"url"`
|
||||
} `json:"references"`
|
||||
Identifiers []struct {
|
||||
Type string `json:"type"`
|
||||
Value string `json:"value"`
|
||||
} `json:"identifiers"`
|
||||
} `json:"securityAdvisory"`
|
||||
} `json:"node"`
|
||||
} `json:"edges"`
|
||||
} `json:"vulnerabilityAlerts"`
|
||||
} `json:"repository"`
|
||||
} `json:"data"`
|
||||
}
|
||||
@@ -1,245 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
kevulndb "github.com/vulsio/go-kev/db"
|
||||
kevulnmodels "github.com/vulsio/go-kev/models"
|
||||
kevulnlog "github.com/vulsio/go-kev/utils"
|
||||
)
|
||||
|
||||
// goKEVulnDBClient is a DB Driver
|
||||
type goKEVulnDBClient struct {
|
||||
driver kevulndb.DB
|
||||
baseURL string
|
||||
}
|
||||
|
||||
// closeDB close a DB connection
|
||||
func (client goKEVulnDBClient) closeDB() error {
|
||||
if client.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return client.driver.CloseDB()
|
||||
}
|
||||
|
||||
func newGoKEVulnDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goKEVulnDBClient, error) {
|
||||
if err := kevulnlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to set go-kev logger. err: %w", err)
|
||||
}
|
||||
|
||||
db, err := newKEVulnDB(cnf)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to newKEVulnDB. err: %w", err)
|
||||
}
|
||||
return &goKEVulnDBClient{driver: db, baseURL: cnf.GetURL()}, nil
|
||||
}
|
||||
|
||||
// FillWithKEVuln :
|
||||
func FillWithKEVuln(r *models.ScanResult, cnf config.KEVulnConf, logOpts logging.LogOpts) error {
|
||||
client, err := newGoKEVulnDBClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nKEV := 0
|
||||
if client.driver == nil {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(client.baseURL, "cves")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
responses, err := getKEVulnsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, res := range responses {
|
||||
kevulns := []kevulnmodels.KEVuln{}
|
||||
if err := json.Unmarshal([]byte(res.json), &kevulns); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
alerts := []models.Alert{}
|
||||
if len(kevulns) > 0 {
|
||||
alerts = append(alerts, models.Alert{
|
||||
Title: "Known Exploited Vulnerabilities Catalog",
|
||||
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
|
||||
Team: "cisa",
|
||||
})
|
||||
}
|
||||
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.AlertDict.CISA = alerts
|
||||
nKEV++
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
}
|
||||
} else {
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
kevulns, err := client.driver.GetKEVulnByCveID(cveID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(kevulns) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
alerts := []models.Alert{}
|
||||
if len(kevulns) > 0 {
|
||||
alerts = append(alerts, models.Alert{
|
||||
Title: "Known Exploited Vulnerabilities Catalog",
|
||||
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
|
||||
Team: "cisa",
|
||||
})
|
||||
}
|
||||
|
||||
vuln.AlertDict.CISA = alerts
|
||||
nKEV++
|
||||
r.ScannedCves[cveID] = vuln
|
||||
}
|
||||
}
|
||||
|
||||
logging.Log.Infof("%s: Known Exploited Vulnerabilities are detected for %d CVEs", r.FormatServerName(), nKEV)
|
||||
return nil
|
||||
}
|
||||
|
||||
type kevulnResponse struct {
|
||||
request kevulnRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getKEVulnsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []kevulnResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan kevulnRequest, nReq)
|
||||
resChan := make(chan kevulnResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- kevulnRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetKEVuln(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching KEVuln")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch KEVuln. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type kevulnRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetKEVuln(url string, req kevulnRequest, resChan chan<- kevulnResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- kevulnResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func newKEVulnDB(cnf config.VulnDictInterface) (kevulndb.DB, error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err := kevulndb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), kevulndb.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("Failed to init kevuln DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to init kevuln DB. DB Path: %s, err: %w", path, err)
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
trivydb "github.com/aquasecurity/trivy-db/pkg/db"
|
||||
"github.com/aquasecurity/trivy-db/pkg/metadata"
|
||||
"github.com/aquasecurity/trivy/pkg/db"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// DetectLibsCves fills LibraryScanner information
|
||||
func DetectLibsCves(r *models.ScanResult, cacheDir string, noProgress bool) (err error) {
|
||||
totalCnt := 0
|
||||
if len(r.LibraryScanners) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// initialize trivy's logger and db
|
||||
err = log.InitLogger(false, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logging.Log.Info("Updating library db...")
|
||||
if err := downloadDB("", cacheDir, noProgress, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := trivydb.Init(cacheDir); err != nil {
|
||||
return err
|
||||
}
|
||||
defer trivydb.Close()
|
||||
|
||||
for _, lib := range r.LibraryScanners {
|
||||
vinfos, err := lib.Scan()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, vinfo := range vinfos {
|
||||
vinfo.Confidences.AppendIfMissing(models.TrivyMatch)
|
||||
if v, ok := r.ScannedCves[vinfo.CveID]; !ok {
|
||||
r.ScannedCves[vinfo.CveID] = vinfo
|
||||
} else {
|
||||
v.LibraryFixedIns = append(v.LibraryFixedIns, vinfo.LibraryFixedIns...)
|
||||
r.ScannedCves[vinfo.CveID] = v
|
||||
}
|
||||
}
|
||||
totalCnt += len(vinfos)
|
||||
}
|
||||
|
||||
logging.Log.Infof("%s: %d CVEs are detected with Library",
|
||||
r.FormatServerName(), totalCnt)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadDB(appVersion, cacheDir string, quiet, skipUpdate bool) error {
|
||||
client := db.NewClient(cacheDir, quiet, false)
|
||||
ctx := context.Background()
|
||||
needsUpdate, err := client.NeedsUpdate(appVersion, skipUpdate)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("database error: %w", err)
|
||||
}
|
||||
|
||||
if needsUpdate {
|
||||
logging.Log.Info("Need to update DB")
|
||||
logging.Log.Info("Downloading DB...")
|
||||
if err := client.Download(ctx, cacheDir); err != nil {
|
||||
return xerrors.Errorf("failed to download vulnerability DB: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// for debug
|
||||
if err := showDBInfo(cacheDir); err != nil {
|
||||
return xerrors.Errorf("failed to show database info: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func showDBInfo(cacheDir string) error {
|
||||
m := metadata.NewClient(cacheDir)
|
||||
meta, err := m.Get()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("something wrong with DB: %w", err)
|
||||
}
|
||||
log.Logger.Debugf("DB Schema: %d, UpdatedAt: %s, NextUpdate: %s, DownloadedAt: %s",
|
||||
meta.Version, meta.UpdatedAt, meta.NextUpdate, meta.DownloadedAt)
|
||||
return nil
|
||||
}
|
||||
244
detector/msf.go
244
detector/msf.go
@@ -1,244 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
metasploitdb "github.com/vulsio/go-msfdb/db"
|
||||
metasploitmodels "github.com/vulsio/go-msfdb/models"
|
||||
metasploitlog "github.com/vulsio/go-msfdb/utils"
|
||||
)
|
||||
|
||||
// goMetasploitDBClient is a DB Driver
|
||||
type goMetasploitDBClient struct {
|
||||
driver metasploitdb.DB
|
||||
baseURL string
|
||||
}
|
||||
|
||||
// closeDB close a DB connection
|
||||
func (client goMetasploitDBClient) closeDB() error {
|
||||
if client.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return client.driver.CloseDB()
|
||||
}
|
||||
|
||||
func newGoMetasploitDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goMetasploitDBClient, error) {
|
||||
if err := metasploitlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to set go-msfdb logger. err: %w", err)
|
||||
}
|
||||
|
||||
db, err := newMetasploitDB(cnf)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to newMetasploitDB. err: %w", err)
|
||||
}
|
||||
return &goMetasploitDBClient{driver: db, baseURL: cnf.GetURL()}, nil
|
||||
}
|
||||
|
||||
// FillWithMetasploit fills metasploit module information that has in module
|
||||
func FillWithMetasploit(r *models.ScanResult, cnf config.MetasploitConf, logOpts logging.LogOpts) (nMetasploitCve int, err error) {
|
||||
client, err := newGoMetasploitDBClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to newGoMetasploitDBClient. err: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if client.driver == nil {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(client.baseURL, "cves")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
|
||||
}
|
||||
responses, err := getMetasploitsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Metasploits via HTTP. err: %w", err)
|
||||
}
|
||||
for _, res := range responses {
|
||||
msfs := []metasploitmodels.Metasploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &msfs); err != nil {
|
||||
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
|
||||
}
|
||||
metasploits := ConvertToModelsMsf(msfs)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Metasploits = metasploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nMetasploitCve++
|
||||
}
|
||||
} else {
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
ms, err := client.driver.GetModuleByCveID(cveID)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Metasploits by CVE-ID. err: %w", err)
|
||||
}
|
||||
if len(ms) == 0 {
|
||||
continue
|
||||
}
|
||||
modules := ConvertToModelsMsf(ms)
|
||||
vuln.Metasploits = modules
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nMetasploitCve++
|
||||
}
|
||||
}
|
||||
return nMetasploitCve, nil
|
||||
}
|
||||
|
||||
type metasploitResponse struct {
|
||||
request metasploitRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getMetasploitsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []metasploitResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan metasploitRequest, nReq)
|
||||
resChan := make(chan metasploitResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- metasploitRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetMetasploit(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching Metasploit")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch Metasploit. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type metasploitRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetMetasploit(url string, req metasploitRequest, resChan chan<- metasploitResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- metasploitResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertToModelsMsf converts metasploit model to vuls model
|
||||
func ConvertToModelsMsf(ms []metasploitmodels.Metasploit) (modules []models.Metasploit) {
|
||||
for _, m := range ms {
|
||||
var links []string
|
||||
if 0 < len(m.References) {
|
||||
for _, u := range m.References {
|
||||
links = append(links, u.Link)
|
||||
}
|
||||
}
|
||||
module := models.Metasploit{
|
||||
Name: m.Name,
|
||||
Title: m.Title,
|
||||
Description: m.Description,
|
||||
URLs: links,
|
||||
}
|
||||
modules = append(modules, module)
|
||||
}
|
||||
return modules
|
||||
}
|
||||
|
||||
func newMetasploitDB(cnf config.VulnDictInterface) (metasploitdb.DB, error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err := metasploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), metasploitdb.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("Failed to init metasploit DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to init metasploit DB. DB Path: %s, err: %w", path, err)
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
269
detector/util.go
269
detector/util.go
@@ -1,269 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func reuseScannedCves(r *models.ScanResult) bool {
|
||||
switch r.Family {
|
||||
case constant.FreeBSD, constant.Raspbian:
|
||||
return true
|
||||
}
|
||||
return r.ScannedBy == "trivy"
|
||||
}
|
||||
|
||||
func needToRefreshCve(r models.ScanResult) bool {
|
||||
for _, cve := range r.ScannedCves {
|
||||
if 0 < len(cve.CveContents) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func loadPrevious(currs models.ScanResults, resultsDir string) (prevs models.ScanResults, err error) {
|
||||
dirs, err := ListValidJSONDirs(resultsDir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, result := range currs {
|
||||
filename := result.ServerName + ".json"
|
||||
if result.Container.Name != "" {
|
||||
filename = fmt.Sprintf("%s@%s.json", result.Container.Name, result.ServerName)
|
||||
}
|
||||
for _, dir := range dirs[1:] {
|
||||
path := filepath.Join(dir, filename)
|
||||
r, err := loadOneServerScanResult(path)
|
||||
if err != nil {
|
||||
logging.Log.Debugf("%+v", err)
|
||||
continue
|
||||
}
|
||||
if r.Family == result.Family && r.Release == result.Release {
|
||||
prevs = append(prevs, *r)
|
||||
logging.Log.Infof("Previous json found: %s", path)
|
||||
break
|
||||
}
|
||||
logging.Log.Infof("Previous json is different family.Release: %s, pre: %s.%s cur: %s.%s",
|
||||
path, r.Family, r.Release, result.Family, result.Release)
|
||||
}
|
||||
}
|
||||
return prevs, nil
|
||||
}
|
||||
|
||||
func diff(curResults, preResults models.ScanResults, isPlus, isMinus bool) (diffed models.ScanResults) {
|
||||
for _, current := range curResults {
|
||||
found := false
|
||||
var previous models.ScanResult
|
||||
for _, r := range preResults {
|
||||
if current.ServerName == r.ServerName && current.Container.Name == r.Container.Name {
|
||||
found = true
|
||||
previous = r
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
diffed = append(diffed, current)
|
||||
continue
|
||||
}
|
||||
|
||||
cves := models.VulnInfos{}
|
||||
if isPlus {
|
||||
cves = getPlusDiffCves(previous, current)
|
||||
}
|
||||
if isMinus {
|
||||
minus := getMinusDiffCves(previous, current)
|
||||
if len(cves) == 0 {
|
||||
cves = minus
|
||||
} else {
|
||||
for k, v := range minus {
|
||||
cves[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
packages := models.Packages{}
|
||||
for _, s := range cves {
|
||||
for _, affected := range s.AffectedPackages {
|
||||
var p models.Package
|
||||
if s.DiffStatus == models.DiffPlus {
|
||||
p = current.Packages[affected.Name]
|
||||
} else {
|
||||
p = previous.Packages[affected.Name]
|
||||
}
|
||||
packages[affected.Name] = p
|
||||
}
|
||||
}
|
||||
current.ScannedCves = cves
|
||||
current.Packages = packages
|
||||
diffed = append(diffed, current)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func getPlusDiffCves(previous, current models.ScanResult) models.VulnInfos {
|
||||
previousCveIDsSet := map[string]bool{}
|
||||
for _, previousVulnInfo := range previous.ScannedCves {
|
||||
previousCveIDsSet[previousVulnInfo.CveID] = true
|
||||
}
|
||||
|
||||
newer := models.VulnInfos{}
|
||||
updated := models.VulnInfos{}
|
||||
for _, v := range current.ScannedCves {
|
||||
if previousCveIDsSet[v.CveID] {
|
||||
if isCveInfoUpdated(v.CveID, previous, current) {
|
||||
v.DiffStatus = models.DiffPlus
|
||||
updated[v.CveID] = v
|
||||
logging.Log.Debugf("updated: %s", v.CveID)
|
||||
|
||||
// TODO commented out because a bug of diff logic when multiple oval defs found for a certain CVE-ID and same updated_at
|
||||
// if these OVAL defs have different affected packages, this logic detects as updated.
|
||||
// This logic will be uncomented after integration with gost https://github.com/vulsio/gost
|
||||
// } else if isCveFixed(v, previous) {
|
||||
// updated[v.CveID] = v
|
||||
// logging.Log.Debugf("fixed: %s", v.CveID)
|
||||
|
||||
} else {
|
||||
logging.Log.Debugf("same: %s", v.CveID)
|
||||
}
|
||||
} else {
|
||||
logging.Log.Debugf("newer: %s", v.CveID)
|
||||
v.DiffStatus = models.DiffPlus
|
||||
newer[v.CveID] = v
|
||||
}
|
||||
}
|
||||
|
||||
if len(updated) == 0 && len(newer) == 0 {
|
||||
logging.Log.Infof("%s: There are %d vulnerabilities, but no difference between current result and previous one.", current.FormatServerName(), len(current.ScannedCves))
|
||||
}
|
||||
|
||||
for cveID, vuln := range newer {
|
||||
updated[cveID] = vuln
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
func getMinusDiffCves(previous, current models.ScanResult) models.VulnInfos {
|
||||
currentCveIDsSet := map[string]bool{}
|
||||
for _, currentVulnInfo := range current.ScannedCves {
|
||||
currentCveIDsSet[currentVulnInfo.CveID] = true
|
||||
}
|
||||
|
||||
clear := models.VulnInfos{}
|
||||
for _, v := range previous.ScannedCves {
|
||||
if !currentCveIDsSet[v.CveID] {
|
||||
v.DiffStatus = models.DiffMinus
|
||||
clear[v.CveID] = v
|
||||
logging.Log.Debugf("clear: %s", v.CveID)
|
||||
}
|
||||
}
|
||||
if len(clear) == 0 {
|
||||
logging.Log.Infof("%s: There are %d vulnerabilities, but no difference between current result and previous one.", current.FormatServerName(), len(current.ScannedCves))
|
||||
}
|
||||
|
||||
return clear
|
||||
}
|
||||
|
||||
func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
|
||||
cTypes := []models.CveContentType{
|
||||
models.Nvd,
|
||||
models.Jvn,
|
||||
models.NewCveContentType(current.Family),
|
||||
}
|
||||
|
||||
prevLastModified := map[models.CveContentType][]time.Time{}
|
||||
preVinfo, ok := previous.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if conts, ok := preVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
prevLastModified[cType] = append(prevLastModified[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
curLastModified := map[models.CveContentType][]time.Time{}
|
||||
curVinfo, ok := current.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if conts, ok := curVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
curLastModified[cType] = append(curLastModified[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, t := range cTypes {
|
||||
if !reflect.DeepEqual(curLastModified[t], prevLastModified[t]) {
|
||||
logging.Log.Debugf("%s LastModified not equal: \n%s\n%s",
|
||||
cveID, curLastModified[t], prevLastModified[t])
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// jsonDirPattern is file name pattern of JSON directory
|
||||
// 2016-11-16T10:43:28+09:00
|
||||
// 2016-11-16T10:43:28Z
|
||||
var jsonDirPattern = regexp.MustCompile(
|
||||
`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:Z|[+-]\d{2}:\d{2})$`)
|
||||
|
||||
// ListValidJSONDirs returns valid json directory as array
|
||||
// Returned array is sorted so that recent directories are at the head
|
||||
func ListValidJSONDirs(resultsDir string) (dirs []string, err error) {
|
||||
var dirInfo []fs.DirEntry
|
||||
if dirInfo, err = os.ReadDir(resultsDir); err != nil {
|
||||
err = xerrors.Errorf("Failed to read %s: %w",
|
||||
config.Conf.ResultsDir, err)
|
||||
return
|
||||
}
|
||||
for _, d := range dirInfo {
|
||||
if d.IsDir() && jsonDirPattern.MatchString(d.Name()) {
|
||||
jsonDir := filepath.Join(resultsDir, d.Name())
|
||||
dirs = append(dirs, jsonDir)
|
||||
}
|
||||
}
|
||||
sort.Slice(dirs, func(i, j int) bool {
|
||||
return dirs[j] < dirs[i]
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// loadOneServerScanResult read JSON data of one server
|
||||
func loadOneServerScanResult(jsonFile string) (*models.ScanResult, error) {
|
||||
var (
|
||||
data []byte
|
||||
err error
|
||||
)
|
||||
if data, err = os.ReadFile(jsonFile); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to read %s: %w", jsonFile, err)
|
||||
}
|
||||
result := &models.ScanResult{}
|
||||
if err := json.Unmarshal(data, result); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to parse %s: %w", jsonFile, err)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
@@ -1,273 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/errof"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
version "github.com/hashicorp/go-version"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
//WpCveInfos is for wpscan json
|
||||
type WpCveInfos struct {
|
||||
ReleaseDate string `json:"release_date"`
|
||||
ChangelogURL string `json:"changelog_url"`
|
||||
// Status string `json:"status"`
|
||||
LatestVersion string `json:"latest_version"`
|
||||
LastUpdated string `json:"last_updated"`
|
||||
// Popular bool `json:"popular"`
|
||||
Vulnerabilities []WpCveInfo `json:"vulnerabilities"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
//WpCveInfo is for wpscan json
|
||||
type WpCveInfo struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
VulnType string `json:"vuln_type"`
|
||||
References References `json:"references"`
|
||||
FixedIn string `json:"fixed_in"`
|
||||
}
|
||||
|
||||
//References is for wpscan json
|
||||
type References struct {
|
||||
URL []string `json:"url"`
|
||||
Cve []string `json:"cve"`
|
||||
Secunia []string `json:"secunia"`
|
||||
}
|
||||
|
||||
// DetectWordPressCves access to wpscan and fetch scurity alerts and then set to the given ScanResult.
|
||||
// https://wpscan.com/
|
||||
func detectWordPressCves(r *models.ScanResult, cnf config.WpScanConf) (int, error) {
|
||||
if len(r.WordPressPackages) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
// Core
|
||||
ver := strings.Replace(r.WordPressPackages.CoreVersion(), ".", "", -1)
|
||||
if ver == "" {
|
||||
return 0, errof.New(errof.ErrFailedToAccessWpScan,
|
||||
fmt.Sprintf("Failed to get WordPress core version."))
|
||||
}
|
||||
url := fmt.Sprintf("https://wpscan.com/api/v3/wordpresses/%s", ver)
|
||||
wpVinfos, err := wpscan(url, ver, cnf.Token, true)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Themes
|
||||
themes := r.WordPressPackages.Themes()
|
||||
if !cnf.DetectInactive {
|
||||
themes = removeInactives(themes)
|
||||
}
|
||||
for _, p := range themes {
|
||||
url := fmt.Sprintf("https://wpscan.com/api/v3/themes/%s", p.Name)
|
||||
candidates, err := wpscan(url, p.Name, cnf.Token, false)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
vulns := detect(p, candidates)
|
||||
wpVinfos = append(wpVinfos, vulns...)
|
||||
}
|
||||
|
||||
// Plugins
|
||||
plugins := r.WordPressPackages.Plugins()
|
||||
if !cnf.DetectInactive {
|
||||
plugins = removeInactives(plugins)
|
||||
}
|
||||
for _, p := range plugins {
|
||||
url := fmt.Sprintf("https://wpscan.com/api/v3/plugins/%s", p.Name)
|
||||
candidates, err := wpscan(url, p.Name, cnf.Token, false)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
vulns := detect(p, candidates)
|
||||
wpVinfos = append(wpVinfos, vulns...)
|
||||
}
|
||||
|
||||
for _, wpVinfo := range wpVinfos {
|
||||
if vinfo, ok := r.ScannedCves[wpVinfo.CveID]; ok {
|
||||
vinfo.CveContents[models.WpScan] = wpVinfo.CveContents[models.WpScan]
|
||||
vinfo.VulnType = wpVinfo.VulnType
|
||||
vinfo.Confidences = append(vinfo.Confidences, wpVinfo.Confidences...)
|
||||
vinfo.WpPackageFixStats = append(vinfo.WpPackageFixStats, wpVinfo.WpPackageFixStats...)
|
||||
r.ScannedCves[wpVinfo.CveID] = vinfo
|
||||
} else {
|
||||
r.ScannedCves[wpVinfo.CveID] = wpVinfo
|
||||
}
|
||||
}
|
||||
return len(wpVinfos), nil
|
||||
}
|
||||
|
||||
func wpscan(url, name, token string, isCore bool) (vinfos []models.VulnInfo, err error) {
|
||||
body, err := httpRequest(url, token)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if body == "" {
|
||||
logging.Log.Debugf("wpscan.com response body is empty. URL: %s", url)
|
||||
}
|
||||
if isCore {
|
||||
name = "core"
|
||||
}
|
||||
return convertToVinfos(name, body)
|
||||
}
|
||||
|
||||
func detect(installed models.WpPackage, candidates []models.VulnInfo) (vulns []models.VulnInfo) {
|
||||
for _, v := range candidates {
|
||||
for _, fixstat := range v.WpPackageFixStats {
|
||||
ok, err := match(installed.Version, fixstat.FixedIn)
|
||||
if err != nil {
|
||||
logging.Log.Warnf("Failed to compare versions %s installed: %s, fixedIn: %s, v: %+v",
|
||||
installed.Name, installed.Version, fixstat.FixedIn, v)
|
||||
// continue scanning
|
||||
continue
|
||||
}
|
||||
if ok {
|
||||
vulns = append(vulns, v)
|
||||
logging.Log.Debugf("Affected: %s installed: %s, fixedIn: %s",
|
||||
installed.Name, installed.Version, fixstat.FixedIn)
|
||||
} else {
|
||||
logging.Log.Debugf("Not affected: %s : %s, fixedIn: %s",
|
||||
installed.Name, installed.Version, fixstat.FixedIn)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func match(installedVer, fixedIn string) (bool, error) {
|
||||
v1, err := version.NewVersion(installedVer)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
v2, err := version.NewVersion(fixedIn)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return v1.LessThan(v2), nil
|
||||
}
|
||||
|
||||
func convertToVinfos(pkgName, body string) (vinfos []models.VulnInfo, err error) {
|
||||
if body == "" {
|
||||
return
|
||||
}
|
||||
// "pkgName" : CVE Detailed data
|
||||
pkgnameCves := map[string]WpCveInfos{}
|
||||
if err = json.Unmarshal([]byte(body), &pkgnameCves); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to unmarshal %s. err: %w", body, err)
|
||||
}
|
||||
|
||||
for _, v := range pkgnameCves {
|
||||
vs := extractToVulnInfos(pkgName, v.Vulnerabilities)
|
||||
vinfos = append(vinfos, vs...)
|
||||
}
|
||||
return vinfos, nil
|
||||
}
|
||||
|
||||
func extractToVulnInfos(pkgName string, cves []WpCveInfo) (vinfos []models.VulnInfo) {
|
||||
for _, vulnerability := range cves {
|
||||
var cveIDs []string
|
||||
|
||||
if len(vulnerability.References.Cve) == 0 {
|
||||
cveIDs = append(cveIDs, fmt.Sprintf("WPVDBID-%s", vulnerability.ID))
|
||||
}
|
||||
for _, cveNumber := range vulnerability.References.Cve {
|
||||
cveIDs = append(cveIDs, "CVE-"+cveNumber)
|
||||
}
|
||||
|
||||
var refs []models.Reference
|
||||
for _, url := range vulnerability.References.URL {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: url,
|
||||
})
|
||||
}
|
||||
|
||||
for _, cveID := range cveIDs {
|
||||
vinfos = append(vinfos, models.VulnInfo{
|
||||
CveID: cveID,
|
||||
CveContents: models.NewCveContents(
|
||||
models.CveContent{
|
||||
Type: models.WpScan,
|
||||
CveID: cveID,
|
||||
Title: vulnerability.Title,
|
||||
References: refs,
|
||||
Published: vulnerability.CreatedAt,
|
||||
LastModified: vulnerability.UpdatedAt,
|
||||
},
|
||||
),
|
||||
VulnType: vulnerability.VulnType,
|
||||
Confidences: []models.Confidence{
|
||||
models.WpScanMatch,
|
||||
},
|
||||
WpPackageFixStats: []models.WpPackageFixStatus{{
|
||||
Name: pkgName,
|
||||
FixedIn: vulnerability.FixedIn,
|
||||
}},
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func httpRequest(url, token string) (string, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
defer cancel()
|
||||
if err != nil {
|
||||
return "", errof.New(errof.ErrFailedToAccessWpScan,
|
||||
fmt.Sprintf("Failed to access to wpscan.com. err: %s", err))
|
||||
}
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Token token=%s", token))
|
||||
client, err := util.GetHTTPClient(config.Conf.HTTPProxy)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", errof.New(errof.ErrFailedToAccessWpScan,
|
||||
fmt.Sprintf("Failed to access to wpscan.com. err: %s", err))
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", errof.New(errof.ErrFailedToAccessWpScan,
|
||||
fmt.Sprintf("Failed to access to wpscan.com. err: %s", err))
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == 200 {
|
||||
return string(body), nil
|
||||
} else if resp.StatusCode == 404 {
|
||||
// This package is not in wpscan
|
||||
return "", nil
|
||||
} else if resp.StatusCode == 429 {
|
||||
return "", errof.New(errof.ErrWpScanAPILimitExceeded,
|
||||
fmt.Sprintf("wpscan.com API limit exceeded: %+v", resp.Status))
|
||||
} else {
|
||||
logging.Log.Warnf("wpscan.com unknown status code: %+v", resp.Status)
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
|
||||
func removeInactives(pkgs models.WordPressPackages) (removed models.WordPressPackages) {
|
||||
for _, p := range pkgs {
|
||||
if p.Status == "inactive" {
|
||||
continue
|
||||
}
|
||||
removed = append(removed, p)
|
||||
}
|
||||
return removed
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
func TestRemoveInactive(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in models.WordPressPackages
|
||||
expected models.WordPressPackages
|
||||
}{
|
||||
{
|
||||
in: models.WordPressPackages{
|
||||
{
|
||||
Name: "akismet",
|
||||
Status: "inactive",
|
||||
Update: "",
|
||||
Version: "",
|
||||
Type: "",
|
||||
},
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
in: models.WordPressPackages{
|
||||
{
|
||||
Name: "akismet",
|
||||
Status: "inactive",
|
||||
Update: "",
|
||||
Version: "",
|
||||
Type: "",
|
||||
},
|
||||
{
|
||||
Name: "BackWPup",
|
||||
Status: "inactive",
|
||||
Update: "",
|
||||
Version: "",
|
||||
Type: "",
|
||||
},
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
in: models.WordPressPackages{
|
||||
{
|
||||
Name: "akismet",
|
||||
Status: "active",
|
||||
Update: "",
|
||||
Version: "",
|
||||
Type: "",
|
||||
},
|
||||
{
|
||||
Name: "BackWPup",
|
||||
Status: "inactive",
|
||||
Update: "",
|
||||
Version: "",
|
||||
Type: "",
|
||||
},
|
||||
},
|
||||
expected: models.WordPressPackages{
|
||||
{
|
||||
Name: "akismet",
|
||||
Status: "active",
|
||||
Update: "",
|
||||
Version: "",
|
||||
Type: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
actual := removeInactives(tt.in)
|
||||
if !reflect.DeepEqual(actual, tt.expected) {
|
||||
t.Errorf("[%d] WordPressPackages error ", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
package errof
|
||||
|
||||
// ErrorCode is vuls error code
|
||||
type ErrorCode string
|
||||
|
||||
// Error is vuls error
|
||||
type Error struct {
|
||||
Code ErrorCode
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e Error) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrFailedToAccessGithubAPI is error of github alert's api access
|
||||
ErrFailedToAccessGithubAPI ErrorCode = "ErrFailedToAccessGithubAPI"
|
||||
|
||||
// ErrFailedToAccessWpScan is error of wpscan.com api access
|
||||
ErrFailedToAccessWpScan ErrorCode = "ErrFailedToAccessWpScan"
|
||||
|
||||
// ErrWpScanAPILimitExceeded is error of wpscan.com api limit exceeded
|
||||
ErrWpScanAPILimitExceeded ErrorCode = "ErrWpScanAPILimitExceeded"
|
||||
)
|
||||
|
||||
// New :
|
||||
func New(code ErrorCode, msg string) Error {
|
||||
return Error{
|
||||
Code: code,
|
||||
Message: msg,
|
||||
}
|
||||
}
|
||||
215
go.mod
215
go.mod
@@ -1,206 +1,75 @@
|
||||
module github.com/future-architect/vuls
|
||||
|
||||
go 1.18
|
||||
go 1.19
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go v66.0.0+incompatible
|
||||
github.com/BurntSushi/toml v1.2.0
|
||||
github.com/CycloneDX/cyclonedx-go v0.7.0
|
||||
github.com/Ullaakut/nmap/v2 v2.1.2-0.20210406060955-59a52fe80a4f
|
||||
github.com/aquasecurity/go-dep-parser v0.0.0-20221024082335-60502daef4ba
|
||||
github.com/aquasecurity/trivy v0.33.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20220627104749-930461748b63
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
|
||||
github.com/aws/aws-sdk-go v1.44.114
|
||||
github.com/c-robinson/iplib v1.0.3
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible
|
||||
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
|
||||
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
|
||||
github.com/emersion/go-smtp v0.14.0
|
||||
github.com/google/subcommands v1.2.0
|
||||
github.com/MakeNowJust/heredoc v1.0.0
|
||||
github.com/go-redis/redis/v9 v9.0.0-rc.1
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gosuri/uitable v0.0.4
|
||||
github.com/hashicorp/go-uuid v1.0.3
|
||||
github.com/hashicorp/go-version v1.6.0
|
||||
github.com/jesseduffield/gocui v0.3.0
|
||||
github.com/k0kubun/pp v3.0.1+incompatible
|
||||
github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f
|
||||
github.com/knqyf263/go-cpe v0.0.0-20201213041631-54f6ab28673f
|
||||
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
|
||||
github.com/knqyf263/go-rpm-version v0.0.0-20220614171824-631e686d1075
|
||||
github.com/kotakanbe/go-pingscanner v0.1.0
|
||||
github.com/kotakanbe/logrus-prefixed-formatter v0.0.0-20180123152602-928f7356cb96
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/nlopes/slack v0.6.0
|
||||
github.com/labstack/echo/v4 v4.9.1
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/package-url/packageurl-go v0.1.1-0.20220203205134-d70459300c8a
|
||||
github.com/parnurzeal/gorequest v0.2.16
|
||||
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
|
||||
github.com/sirupsen/logrus v1.9.0
|
||||
github.com/spf13/cobra v1.6.0
|
||||
github.com/vulsio/go-cti v0.0.2-0.20220613013115-8c7e57a6aa86
|
||||
github.com/vulsio/go-cve-dictionary v0.8.2
|
||||
github.com/vulsio/go-exploitdb v0.4.2
|
||||
github.com/vulsio/go-kev v0.1.1-0.20220118062020-5f69b364106f
|
||||
github.com/vulsio/go-msfdb v0.2.1-0.20211028071756-4a9759bd9f14
|
||||
github.com/vulsio/gost v0.4.2-0.20220630181607-2ed593791ec3
|
||||
github.com/vulsio/goval-dictionary v0.8.0
|
||||
github.com/opencontainers/image-spec v1.1.0-rc2
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/ulikunitz/xz v0.5.10
|
||||
go.etcd.io/bbolt v1.3.6
|
||||
golang.org/x/exp v0.0.0-20220823124025-807a23277127
|
||||
golang.org/x/oauth2 v0.1.0
|
||||
golang.org/x/sync v0.1.0
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f
|
||||
go.uber.org/zap v1.13.0
|
||||
golang.org/x/exp v0.0.0-20221106115401-f9659909a136
|
||||
gorm.io/driver/mysql v1.4.3
|
||||
gorm.io/driver/postgres v1.4.5
|
||||
gorm.io/gorm v1.24.1
|
||||
modernc.org/sqlite v1.19.4
|
||||
oras.land/oras-go/v2 v2.0.0-rc.4
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.103.0 // indirect
|
||||
cloud.google.com/go/compute v1.10.0 // indirect
|
||||
cloud.google.com/go/iam v0.3.0 // indirect
|
||||
cloud.google.com/go/storage v1.23.0 // indirect
|
||||
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||
github.com/Azure/go-autorest/autorest v0.11.28 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.21 // indirect
|
||||
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/autorest/to v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
||||
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.0 // indirect
|
||||
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.6.1 // indirect
|
||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||
github.com/acomagu/bufpipe v1.0.3 // indirect
|
||||
github.com/andybalholm/cascadia v1.2.0 // indirect
|
||||
github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce // indirect
|
||||
github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 // indirect
|
||||
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 // indirect
|
||||
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect
|
||||
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
|
||||
github.com/briandowns/spinner v1.18.1 // indirect
|
||||
github.com/caarlos0/env/v6 v6.10.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/cheggaaa/pb/v3 v3.1.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-minhash v0.0.0-20170608043002-7fe510aff544 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dnaeon/go-vcr v1.2.0 // indirect
|
||||
github.com/docker/cli v20.10.20+incompatible // indirect
|
||||
github.com/docker/distribution v2.8.1+incompatible // indirect
|
||||
github.com/docker/docker v20.10.20+incompatible // indirect
|
||||
github.com/docker/docker-credential-helpers v0.7.0 // indirect
|
||||
github.com/ekzhu/minhash-lsh v0.0.0-20171225071031-5c06ee8586a1 // indirect
|
||||
github.com/emirpasic/gods v1.12.0 // indirect
|
||||
github.com/fatih/color v1.13.0 // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.4 // indirect
|
||||
github.com/go-enry/go-license-detector/v4 v4.3.0 // indirect
|
||||
github.com/go-git/gcfg v1.5.0 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.3.1 // indirect
|
||||
github.com/go-git/go-git/v5 v5.4.2 // indirect
|
||||
github.com/go-redis/redis/v8 v8.11.5 // indirect
|
||||
github.com/go-sql-driver/mysql v1.6.0 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
github.com/gofrs/uuid v4.0.0+incompatible // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/google/go-containerregistry v0.12.0 // indirect
|
||||
github.com/google/licenseclassifier/v2 v2.0.0-pre6 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.1.0 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.5.1 // indirect
|
||||
github.com/googleapis/go-type-adapters v1.0.0 // indirect
|
||||
github.com/gorilla/websocket v1.4.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-getter v1.6.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.1 // indirect
|
||||
github.com/hashicorp/go-safetemp v1.0.0 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/hhatto/gorst v0.0.0-20181029133204-ca9f730cac5b // indirect
|
||||
github.com/imdario/mergo v0.3.13 // indirect
|
||||
github.com/inconshreveable/log15 v0.0.0-20201112154412-8562bdadbbac // indirect
|
||||
github.com/gofrs/uuid v4.2.0+incompatible // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.12.1 // indirect
|
||||
github.com/jackc/pgconn v1.13.0 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.3.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.3.1 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||
github.com/jackc/pgtype v1.11.0 // indirect
|
||||
github.com/jackc/pgx/v4 v4.16.1 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/jdkato/prose v1.1.0 // indirect
|
||||
github.com/jackc/pgtype v1.12.0 // indirect
|
||||
github.com/jackc/pgx/v4 v4.17.2 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/kevinburke/ssh_config v1.1.0 // indirect
|
||||
github.com/klauspost/compress v1.15.11 // indirect
|
||||
github.com/liamg/jfather v0.0.7 // indirect
|
||||
github.com/magiconair/properties v1.8.6 // indirect
|
||||
github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 // indirect
|
||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.14 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/mitchellh/go-testing-interface v1.0.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect
|
||||
github.com/nsf/termbox-go v1.1.1 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/labstack/gommon v0.4.0 // indirect
|
||||
github.com/lib/pq v1.10.6 // indirect
|
||||
github.com/mattn/go-colorable v0.1.11 // indirect
|
||||
github.com/mattn/go-isatty v0.0.16 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.0-rc2 // indirect
|
||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.5 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.3.1 // indirect
|
||||
github.com/rogpeppe/go-internal v1.8.1 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/samber/lo v1.28.2 // indirect
|
||||
github.com/sergi/go-diff v1.2.0 // indirect
|
||||
github.com/shogo82148/go-shuffle v0.0.0-20170808115208-59829097ff3b // indirect
|
||||
github.com/spdx/tools-golang v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.9.2 // indirect
|
||||
github.com/spf13/cast v1.5.0 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/viper v1.13.0 // indirect
|
||||
github.com/stretchr/objx v0.4.0 // indirect
|
||||
github.com/stretchr/testify v1.8.0 // indirect
|
||||
github.com/subosito/gotenv v1.4.1 // indirect
|
||||
github.com/ulikunitz/xz v0.5.10 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
||||
go.opencensus.io v0.23.0 // indirect
|
||||
go.uber.org/atomic v1.9.0 // indirect
|
||||
go.uber.org/goleak v1.1.12 // indirect
|
||||
go.uber.org/multierr v1.8.0 // indirect
|
||||
go.uber.org/zap v1.23.0 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasttemplate v1.2.1 // indirect
|
||||
go.uber.org/atomic v1.6.0 // indirect
|
||||
go.uber.org/multierr v1.5.0 // indirect
|
||||
golang.org/x/crypto v0.1.0 // indirect
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de // indirect
|
||||
golang.org/x/mod v0.6.0 // indirect
|
||||
golang.org/x/net v0.1.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.1.0 // indirect
|
||||
golang.org/x/term v0.1.0 // indirect
|
||||
golang.org/x/text v0.4.0 // indirect
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect
|
||||
golang.org/x/tools v0.2.0 // indirect
|
||||
gonum.org/v1/gonum v0.7.0 // indirect
|
||||
google.golang.org/api v0.98.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20221018160656-63c7b68cfc55 // indirect
|
||||
google.golang.org/grpc v1.50.1 // indirect
|
||||
google.golang.org/protobuf v1.28.1 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/neurosnap/sentences.v1 v1.0.6 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
gorm.io/driver/mysql v1.3.5 // indirect
|
||||
gorm.io/driver/postgres v1.3.8 // indirect
|
||||
gorm.io/driver/sqlite v1.3.6 // indirect
|
||||
gorm.io/gorm v1.23.8 // indirect
|
||||
k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed // indirect
|
||||
moul.io/http2curl v1.0.0 // indirect
|
||||
lukechampine.com/uint128 v1.2.0 // indirect
|
||||
modernc.org/cc/v3 v3.40.0 // indirect
|
||||
modernc.org/ccgo/v3 v3.16.13 // indirect
|
||||
modernc.org/libc v1.21.4 // indirect
|
||||
modernc.org/mathutil v1.5.0 // indirect
|
||||
modernc.org/memory v1.4.0 // indirect
|
||||
modernc.org/opt v0.1.3 // indirect
|
||||
modernc.org/strutil v1.1.3 // indirect
|
||||
modernc.org/token v1.0.1 // indirect
|
||||
)
|
||||
|
||||
// See https://github.com/moby/moby/issues/42939#issuecomment-1114255529
|
||||
replace github.com/docker/docker => github.com/docker/docker v20.10.3-0.20220224222438-c78f6963a1c0+incompatible
|
||||
|
||||
312
gost/debian.go
312
gost/debian.go
@@ -1,312 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
debver "github.com/knqyf263/go-deb-version"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Debian is Gost client for Debian GNU/Linux
|
||||
type Debian struct {
|
||||
Base
|
||||
}
|
||||
|
||||
type packCves struct {
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cves []models.CveContent
|
||||
fixes models.PackageFixStatuses
|
||||
}
|
||||
|
||||
func (deb Debian) supported(major string) bool {
|
||||
_, ok := map[string]string{
|
||||
"8": "jessie",
|
||||
"9": "stretch",
|
||||
"10": "buster",
|
||||
"11": "bullseye",
|
||||
}[major]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (deb Debian) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if !deb.supported(major(r.Release)) {
|
||||
// only logging
|
||||
logging.Log.Warnf("Debian %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// Add linux and set the version of running kernel to search Gost.
|
||||
if r.Container.ContainerID == "" {
|
||||
if r.RunningKernel.Version != "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages["linux-image-"+r.RunningKernel.Release]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
Name: "linux",
|
||||
Version: r.RunningKernel.Version,
|
||||
NewVersion: newVer,
|
||||
}
|
||||
} else {
|
||||
logging.Log.Warnf("Since the exact kernel version is not available, the vulnerability in the linux package is not detected.")
|
||||
}
|
||||
}
|
||||
|
||||
var stashLinuxPackage models.Package
|
||||
if linux, ok := r.Packages["linux"]; ok {
|
||||
stashLinuxPackage = linux
|
||||
}
|
||||
nFixedCVEs, err := deb.detectCVEsWithFixState(r, "resolved")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to detect fixed CVEs. err: %w", err)
|
||||
}
|
||||
|
||||
if stashLinuxPackage.Name != "" {
|
||||
r.Packages["linux"] = stashLinuxPackage
|
||||
}
|
||||
nUnfixedCVEs, err := deb.detectCVEsWithFixState(r, "open")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to detect unfixed CVEs. err: %w", err)
|
||||
}
|
||||
|
||||
return (nFixedCVEs + nUnfixedCVEs), nil
|
||||
}
|
||||
|
||||
func (deb Debian) detectCVEsWithFixState(r *models.ScanResult, fixStatus string) (nCVEs int, err error) {
|
||||
if fixStatus != "resolved" && fixStatus != "open" {
|
||||
return 0, xerrors.Errorf(`Failed to detectCVEsWithFixState. fixStatus is not allowed except "open" and "resolved"(actual: fixStatus -> %s).`, fixStatus)
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if deb.driver == nil {
|
||||
url, err := util.URLPathJoin(deb.baseURL, "debian", major(r.Release), "pkgs")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
|
||||
}
|
||||
|
||||
s := "unfixed-cves"
|
||||
if s == "resolved" {
|
||||
s = "fixed-cves"
|
||||
}
|
||||
responses, err := getCvesWithFixStateViaHTTP(r, url, s)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get CVEs via HTTP. err: %w", err)
|
||||
}
|
||||
|
||||
for _, res := range responses {
|
||||
debCves := map[string]gostmodels.DebianCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &debCves); err != nil {
|
||||
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, debcve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&debcve))
|
||||
fixes = append(fixes, checkPackageFixStatus(&debcve)...)
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for _, pack := range r.Packages {
|
||||
cves, fixes, err := deb.getCvesDebianWithfixStatus(fixStatus, major(r.Release), pack.Name)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get CVEs for Package. err: %w", err)
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range r.SrcPackages {
|
||||
cves, fixes, err := deb.getCvesDebianWithfixStatus(fixStatus, major(r.Release), pack.Name)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get CVEs for SrcPackage. err: %w", err)
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for i, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.DebianSecurityTracker] = []models.CveContent{cve}
|
||||
v.Confidences = models.Confidences{models.DebianSecurityTrackerMatch}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.DebianSecurityTrackerMatch},
|
||||
}
|
||||
|
||||
if fixStatus == "resolved" {
|
||||
versionRelease := ""
|
||||
if p.isSrcPack {
|
||||
versionRelease = r.SrcPackages[p.packName].Version
|
||||
} else {
|
||||
versionRelease = r.Packages[p.packName].FormatVer()
|
||||
}
|
||||
|
||||
if versionRelease == "" {
|
||||
break
|
||||
}
|
||||
|
||||
affected, err := isGostDefAffected(versionRelease, p.fixes[i].FixedIn)
|
||||
if err != nil {
|
||||
logging.Log.Debugf("Failed to parse versions: %s, Ver: %s, Gost: %s",
|
||||
err, versionRelease, p.fixes[i].FixedIn)
|
||||
continue
|
||||
}
|
||||
|
||||
if !affected {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
if p.isSrcPack {
|
||||
if srcPack, ok := r.SrcPackages[p.packName]; ok {
|
||||
for _, binName := range srcPack.BinaryNames {
|
||||
if _, ok := r.Packages[binName]; ok {
|
||||
names = append(names, binName)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, "linux-image-"+r.RunningKernel.Release)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
if fixStatus == "resolved" {
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixedIn: p.fixes[i].FixedIn,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
func isGostDefAffected(versionRelease, gostVersion string) (affected bool, err error) {
|
||||
vera, err := debver.NewVersion(versionRelease)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to parse version. version: %s, err: %w", versionRelease, err)
|
||||
}
|
||||
verb, err := debver.NewVersion(gostVersion)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to parse version. version: %s, err: %w", gostVersion, err)
|
||||
}
|
||||
return vera.LessThan(verb), nil
|
||||
}
|
||||
|
||||
func (deb Debian) getCvesDebianWithfixStatus(fixStatus, release, pkgName string) ([]models.CveContent, []models.PackageFixStatus, error) {
|
||||
var f func(string, string) (map[string]gostmodels.DebianCVE, error)
|
||||
if fixStatus == "resolved" {
|
||||
f = deb.driver.GetFixedCvesDebian
|
||||
} else {
|
||||
f = deb.driver.GetUnfixedCvesDebian
|
||||
}
|
||||
debCves, err := f(release, pkgName)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("Failed to get CVEs. fixStatus: %s, release: %s, src package: %s, err: %w", fixStatus, release, pkgName, err)
|
||||
}
|
||||
|
||||
cves := []models.CveContent{}
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, devbCve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&devbCve))
|
||||
fixes = append(fixes, checkPackageFixStatus(&devbCve)...)
|
||||
}
|
||||
return cves, fixes, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
|
||||
severity := ""
|
||||
for _, p := range cve.Package {
|
||||
for _, r := range p.Release {
|
||||
severity = r.Urgency
|
||||
break
|
||||
}
|
||||
}
|
||||
return &models.CveContent{
|
||||
Type: models.DebianSecurityTracker,
|
||||
CveID: cve.CveID,
|
||||
Summary: cve.Description,
|
||||
Cvss2Severity: severity,
|
||||
Cvss3Severity: severity,
|
||||
SourceLink: "https://security-tracker.debian.org/tracker/" + cve.CveID,
|
||||
Optional: map[string]string{
|
||||
"attack range": cve.Scope,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func checkPackageFixStatus(cve *gostmodels.DebianCVE) []models.PackageFixStatus {
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, p := range cve.Package {
|
||||
for _, r := range p.Release {
|
||||
f := models.PackageFixStatus{Name: p.PackageName}
|
||||
|
||||
if r.Status == "open" {
|
||||
f.NotFixedYet = true
|
||||
} else {
|
||||
f.FixedIn = r.FixedVersion
|
||||
}
|
||||
|
||||
fixes = append(fixes, f)
|
||||
}
|
||||
}
|
||||
|
||||
return fixes
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestDebian_Supported(t *testing.T) {
|
||||
type fields struct {
|
||||
Base Base
|
||||
}
|
||||
type args struct {
|
||||
major string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "8 is supported",
|
||||
args: args{
|
||||
major: "8",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "9 is supported",
|
||||
args: args{
|
||||
major: "9",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "10 is supported",
|
||||
args: args{
|
||||
major: "10",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "11 is supported",
|
||||
args: args{
|
||||
major: "11",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "12 is not supported yet",
|
||||
args: args{
|
||||
major: "12",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "empty string is not supported yet",
|
||||
args: args{
|
||||
major: "",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
deb := Debian{}
|
||||
if got := deb.supported(tt.args.major); got != tt.want {
|
||||
t.Errorf("Debian.Supported() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
100
gost/gost.go
100
gost/gost.go
@@ -1,100 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostdb "github.com/vulsio/gost/db"
|
||||
gostlog "github.com/vulsio/gost/util"
|
||||
)
|
||||
|
||||
// Client is the interface of Gost client.
|
||||
type Client interface {
|
||||
DetectCVEs(*models.ScanResult, bool) (int, error)
|
||||
CloseDB() error
|
||||
}
|
||||
|
||||
// Base is a base struct
|
||||
type Base struct {
|
||||
driver gostdb.DB
|
||||
baseURL string
|
||||
}
|
||||
|
||||
// CloseDB close a DB connection
|
||||
func (b Base) CloseDB() error {
|
||||
if b.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return b.driver.CloseDB()
|
||||
}
|
||||
|
||||
// FillCVEsWithRedHat fills CVE detailed with Red Hat Security
|
||||
func FillCVEsWithRedHat(r *models.ScanResult, cnf config.GostConf, o logging.LogOpts) error {
|
||||
if err := gostlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
db, err := newGostDB(&cnf)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to newGostDB. err: %w", err)
|
||||
}
|
||||
|
||||
client := RedHat{Base{driver: db, baseURL: cnf.GetURL()}}
|
||||
defer func() {
|
||||
if err := client.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
return client.fillCvesWithRedHatAPI(r)
|
||||
}
|
||||
|
||||
// NewGostClient make Client by family
|
||||
func NewGostClient(cnf config.GostConf, family string, o logging.LogOpts) (Client, error) {
|
||||
if err := gostlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to set gost logger. err: %w", err)
|
||||
}
|
||||
|
||||
db, err := newGostDB(&cnf)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to newGostDB. err: %w", err)
|
||||
}
|
||||
|
||||
base := Base{driver: db, baseURL: cnf.GetURL()}
|
||||
switch family {
|
||||
case constant.RedHat, constant.CentOS, constant.Rocky, constant.Alma:
|
||||
return RedHat{base}, nil
|
||||
case constant.Debian, constant.Raspbian:
|
||||
return Debian{base}, nil
|
||||
case constant.Ubuntu:
|
||||
return Ubuntu{base}, nil
|
||||
case constant.Windows:
|
||||
return Microsoft{base}, nil
|
||||
default:
|
||||
return Pseudo{base}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// NewGostDB returns db client for Gost
|
||||
func newGostDB(cnf config.VulnDictInterface) (gostdb.DB, error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err := gostdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), gostdb.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("Failed to init gost DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to init gost DB. DB Path: %s, err: %w", path, err)
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
@@ -1,132 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
func TestSetPackageStates(t *testing.T) {
|
||||
var tests = []struct {
|
||||
pkgstats []gostmodels.RedhatPackageState
|
||||
installed models.Packages
|
||||
release string
|
||||
in models.VulnInfo
|
||||
out models.PackageFixStatuses
|
||||
}{
|
||||
|
||||
//0 one
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "Will not fix",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{},
|
||||
out: []models.PackageFixStatus{
|
||||
{
|
||||
Name: "bouncycastle",
|
||||
FixState: "Will not fix",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
//1 two
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "Will not fix",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
{
|
||||
FixState: "Fix deferred",
|
||||
PackageName: "pack_a",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
// ignore not-installed-package
|
||||
{
|
||||
FixState: "Fix deferred",
|
||||
PackageName: "pack_b",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
"pack_a": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{},
|
||||
out: []models.PackageFixStatus{
|
||||
{
|
||||
Name: "bouncycastle",
|
||||
FixState: "Will not fix",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
{
|
||||
Name: "pack_a",
|
||||
FixState: "Fix deferred",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
//2 ignore affected
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "affected",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
out: models.PackageFixStatuses{},
|
||||
},
|
||||
|
||||
//3 look only the same os release.
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "Will not fix",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:6",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
out: models.PackageFixStatuses{},
|
||||
},
|
||||
}
|
||||
|
||||
r := RedHat{}
|
||||
for i, tt := range tests {
|
||||
out := r.mergePackageStates(tt.in, tt.pkgstats, tt.installed, tt.release)
|
||||
if ok := reflect.DeepEqual(tt.out, out); !ok {
|
||||
t.Errorf("[%d]\nexpected: %v:%T\n actual: %v:%T\n", i, tt.out, tt.out, out, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,222 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/exp/slices"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Microsoft is Gost client for windows
|
||||
type Microsoft struct {
|
||||
Base
|
||||
}
|
||||
|
||||
var kbIDPattern = regexp.MustCompile(`KB(\d{6,7})`)
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (ms Microsoft) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if ms.driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
var osName string
|
||||
osName, ok := r.Optional["OSName"].(string)
|
||||
if !ok {
|
||||
logging.Log.Warnf("This Windows has wrong type option(OSName). UUID: %s", r.ServerUUID)
|
||||
}
|
||||
|
||||
var products []string
|
||||
if _, ok := r.Optional["InstalledProducts"]; ok {
|
||||
switch ps := r.Optional["InstalledProducts"].(type) {
|
||||
case []interface{}:
|
||||
for _, p := range ps {
|
||||
pname, ok := p.(string)
|
||||
if !ok {
|
||||
logging.Log.Warnf("skip products: %v", p)
|
||||
continue
|
||||
}
|
||||
products = append(products, pname)
|
||||
}
|
||||
case []string:
|
||||
for _, p := range ps {
|
||||
products = append(products, p)
|
||||
}
|
||||
case nil:
|
||||
logging.Log.Warnf("This Windows has no option(InstalledProducts). UUID: %s", r.ServerUUID)
|
||||
}
|
||||
}
|
||||
|
||||
applied, unapplied := map[string]struct{}{}, map[string]struct{}{}
|
||||
if _, ok := r.Optional["KBID"]; ok {
|
||||
switch kbIDs := r.Optional["KBID"].(type) {
|
||||
case []interface{}:
|
||||
for _, kbID := range kbIDs {
|
||||
s, ok := kbID.(string)
|
||||
if !ok {
|
||||
logging.Log.Warnf("skip KBID: %v", kbID)
|
||||
continue
|
||||
}
|
||||
unapplied[strings.TrimPrefix(s, "KB")] = struct{}{}
|
||||
}
|
||||
case []string:
|
||||
for _, kbID := range kbIDs {
|
||||
unapplied[strings.TrimPrefix(kbID, "KB")] = struct{}{}
|
||||
}
|
||||
case nil:
|
||||
logging.Log.Warnf("This Windows has no option(KBID). UUID: %s", r.ServerUUID)
|
||||
}
|
||||
|
||||
for _, pkg := range r.Packages {
|
||||
matches := kbIDPattern.FindAllStringSubmatch(pkg.Name, -1)
|
||||
for _, match := range matches {
|
||||
applied[match[1]] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
switch kbIDs := r.Optional["AppliedKBID"].(type) {
|
||||
case []interface{}:
|
||||
for _, kbID := range kbIDs {
|
||||
s, ok := kbID.(string)
|
||||
if !ok {
|
||||
logging.Log.Warnf("skip KBID: %v", kbID)
|
||||
continue
|
||||
}
|
||||
applied[strings.TrimPrefix(s, "KB")] = struct{}{}
|
||||
}
|
||||
case []string:
|
||||
for _, kbID := range kbIDs {
|
||||
applied[strings.TrimPrefix(kbID, "KB")] = struct{}{}
|
||||
}
|
||||
case nil:
|
||||
logging.Log.Warnf("This Windows has no option(AppliedKBID). UUID: %s", r.ServerUUID)
|
||||
}
|
||||
|
||||
switch kbIDs := r.Optional["UnappliedKBID"].(type) {
|
||||
case []interface{}:
|
||||
for _, kbID := range kbIDs {
|
||||
s, ok := kbID.(string)
|
||||
if !ok {
|
||||
logging.Log.Warnf("skip KBID: %v", kbID)
|
||||
continue
|
||||
}
|
||||
unapplied[strings.TrimPrefix(s, "KB")] = struct{}{}
|
||||
}
|
||||
case []string:
|
||||
for _, kbID := range kbIDs {
|
||||
unapplied[strings.TrimPrefix(kbID, "KB")] = struct{}{}
|
||||
}
|
||||
case nil:
|
||||
logging.Log.Warnf("This Windows has no option(UnappliedKBID). UUID: %s", r.ServerUUID)
|
||||
}
|
||||
}
|
||||
|
||||
logging.Log.Debugf(`GetCvesByMicrosoftKBID query body {"osName": %s, "installedProducts": %q, "applied": %q, "unapplied: %q"}`, osName, products, maps.Keys(applied), maps.Keys(unapplied))
|
||||
cves, err := ms.driver.GetCvesByMicrosoftKBID(osName, products, maps.Keys(applied), maps.Keys(unapplied))
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to detect CVEs. err: %w", err)
|
||||
}
|
||||
|
||||
for cveID, cve := range cves {
|
||||
cveCont, mitigations := ms.ConvertToModel(&cve)
|
||||
uniqKB := map[string]struct{}{}
|
||||
for _, p := range cve.Products {
|
||||
for _, kb := range p.KBs {
|
||||
if _, err := strconv.Atoi(kb.Article); err == nil {
|
||||
uniqKB[fmt.Sprintf("KB%s", kb.Article)] = struct{}{}
|
||||
} else {
|
||||
uniqKB[kb.Article] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
advisories := []models.DistroAdvisory{}
|
||||
for kb := range uniqKB {
|
||||
advisories = append(advisories, models.DistroAdvisory{
|
||||
AdvisoryID: kb,
|
||||
Description: "Microsoft Knowledge Base",
|
||||
})
|
||||
}
|
||||
|
||||
r.ScannedCves[cveID] = models.VulnInfo{
|
||||
CveID: cveID,
|
||||
Confidences: models.Confidences{models.WindowsUpdateSearch},
|
||||
DistroAdvisories: advisories,
|
||||
CveContents: models.NewCveContents(*cveCont),
|
||||
Mitigations: mitigations,
|
||||
}
|
||||
}
|
||||
return len(cves), nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ms Microsoft) ConvertToModel(cve *gostmodels.MicrosoftCVE) (*models.CveContent, []models.Mitigation) {
|
||||
slices.SortFunc(cve.Products, func(i, j gostmodels.MicrosoftProduct) bool {
|
||||
return i.ScoreSet.Vector < j.ScoreSet.Vector
|
||||
})
|
||||
|
||||
v3score := 0.0
|
||||
var v3Vector string
|
||||
for _, p := range cve.Products {
|
||||
v, err := strconv.ParseFloat(p.ScoreSet.BaseScore, 64)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if v3score < v {
|
||||
v3score = v
|
||||
v3Vector = p.ScoreSet.Vector
|
||||
}
|
||||
}
|
||||
|
||||
var v3Severity string
|
||||
for _, p := range cve.Products {
|
||||
v3Severity = p.Severity
|
||||
}
|
||||
|
||||
option := map[string]string{}
|
||||
if 0 < len(cve.ExploitStatus) {
|
||||
// TODO: CVE-2020-0739
|
||||
// "exploit_status": "Publicly Disclosed:No;Exploited:No;Latest Software Release:Exploitation Less Likely;Older Software Release:Exploitation Less Likely;DOS:N/A",
|
||||
option["exploit"] = cve.ExploitStatus
|
||||
}
|
||||
|
||||
mitigations := []models.Mitigation{}
|
||||
if cve.Mitigation != "" {
|
||||
mitigations = append(mitigations, models.Mitigation{
|
||||
CveContentType: models.Microsoft,
|
||||
Mitigation: cve.Mitigation,
|
||||
URL: cve.URL,
|
||||
})
|
||||
}
|
||||
if cve.Workaround != "" {
|
||||
mitigations = append(mitigations, models.Mitigation{
|
||||
CveContentType: models.Microsoft,
|
||||
Mitigation: cve.Workaround,
|
||||
URL: cve.URL,
|
||||
})
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.Microsoft,
|
||||
CveID: cve.CveID,
|
||||
Title: cve.Title,
|
||||
Summary: cve.Description,
|
||||
Cvss3Score: v3score,
|
||||
Cvss3Vector: v3Vector,
|
||||
Cvss3Severity: v3Severity,
|
||||
Published: cve.PublishDate,
|
||||
LastModified: cve.LastUpdateDate,
|
||||
SourceLink: cve.URL,
|
||||
Optional: option,
|
||||
}, mitigations
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// Pseudo is Gost client except for RedHat family, Debian, Ubuntu and Windows
|
||||
type Pseudo struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (pse Pseudo) DetectCVEs(_ *models.ScanResult, _ bool) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
270
gost/redhat.go
270
gost/redhat.go
@@ -1,270 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// RedHat is Gost client for RedHat family linux
|
||||
type RedHat struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (red RedHat) DetectCVEs(r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
gostRelease := r.Release
|
||||
if r.Family == constant.CentOS {
|
||||
gostRelease = strings.TrimPrefix(r.Release, "stream")
|
||||
}
|
||||
if red.driver == nil {
|
||||
prefix, err := util.URLPathJoin(red.baseURL, "redhat", major(gostRelease), "pkgs")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
|
||||
}
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, prefix)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Unfixed CVEs via HTTP. err: %w", err)
|
||||
}
|
||||
for _, res := range responses {
|
||||
// CVE-ID: RedhatCVE
|
||||
cves := map[string]gostmodels.RedhatCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &cves); err != nil {
|
||||
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
|
||||
}
|
||||
for _, cve := range cves {
|
||||
if newly := red.setUnfixedCveToScanResult(&cve, r); newly {
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, pack := range r.Packages {
|
||||
// CVE-ID: RedhatCVE
|
||||
cves, err := red.driver.GetUnfixedCvesRedhat(major(gostRelease), pack.Name, ignoreWillNotFix)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Unfixed CVEs. err: %w", err)
|
||||
}
|
||||
for _, cve := range cves {
|
||||
if newly := red.setUnfixedCveToScanResult(&cve, r); newly {
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
func (red RedHat) fillCvesWithRedHatAPI(r *models.ScanResult) error {
|
||||
cveIDs := []string{}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if _, ok := vuln.CveContents[models.RedHatAPI]; ok {
|
||||
continue
|
||||
}
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
|
||||
if red.driver == nil {
|
||||
prefix, err := util.URLPathJoin(red.baseURL, "redhat", "cves")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
responses, err := getCvesViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, res := range responses {
|
||||
redCve := gostmodels.RedhatCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &redCve); err != nil {
|
||||
return err
|
||||
}
|
||||
if redCve.ID == 0 {
|
||||
continue
|
||||
}
|
||||
red.setFixedCveToScanResult(&redCve, r)
|
||||
}
|
||||
} else {
|
||||
redCves, err := red.driver.GetRedhatMulti(cveIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, redCve := range redCves {
|
||||
if len(redCve.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
red.setFixedCveToScanResult(&redCve, r)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (red RedHat) setFixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models.ScanResult) {
|
||||
cveCont, mitigations := red.ConvertToModel(cve)
|
||||
v, ok := r.ScannedCves[cveCont.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = []models.CveContent{*cveCont}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cveCont.CveID,
|
||||
CveContents: models.NewCveContents(*cveCont),
|
||||
Confidences: models.Confidences{models.RedHatAPIMatch},
|
||||
}
|
||||
}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveCont.CveID] = v
|
||||
}
|
||||
|
||||
func (red RedHat) setUnfixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models.ScanResult) (newly bool) {
|
||||
cveCont, mitigations := red.ConvertToModel(cve)
|
||||
v, ok := r.ScannedCves[cve.Name]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = []models.CveContent{*cveCont}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cveCont.CveID,
|
||||
CveContents: models.NewCveContents(*cveCont),
|
||||
Confidences: models.Confidences{models.RedHatAPIMatch},
|
||||
}
|
||||
newly = true
|
||||
}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
|
||||
gostRelease := r.Release
|
||||
if r.Family == constant.CentOS {
|
||||
gostRelease = strings.TrimPrefix(r.Release, "stream")
|
||||
}
|
||||
pkgStats := red.mergePackageStates(v, cve.PackageState, r.Packages, gostRelease)
|
||||
if 0 < len(pkgStats) {
|
||||
v.AffectedPackages = pkgStats
|
||||
r.ScannedCves[cve.Name] = v
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (red RedHat) mergePackageStates(v models.VulnInfo, ps []gostmodels.RedhatPackageState, installed models.Packages, release string) (pkgStats models.PackageFixStatuses) {
|
||||
pkgStats = v.AffectedPackages
|
||||
for _, pstate := range ps {
|
||||
if pstate.Cpe !=
|
||||
"cpe:/o:redhat:enterprise_linux:"+major(release) {
|
||||
return
|
||||
}
|
||||
|
||||
if !(pstate.FixState == "Will not fix" ||
|
||||
pstate.FixState == "Fix deferred" ||
|
||||
pstate.FixState == "Affected") {
|
||||
return
|
||||
}
|
||||
|
||||
if _, ok := installed[pstate.PackageName]; !ok {
|
||||
return
|
||||
}
|
||||
|
||||
notFixedYet := false
|
||||
switch pstate.FixState {
|
||||
case "Will not fix", "Fix deferred", "Affected":
|
||||
notFixedYet = true
|
||||
}
|
||||
|
||||
pkgStats = pkgStats.Store(models.PackageFixStatus{
|
||||
Name: pstate.PackageName,
|
||||
FixState: pstate.FixState,
|
||||
NotFixedYet: notFixedYet,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (red RedHat) parseCwe(str string) (cwes []string) {
|
||||
if str != "" {
|
||||
s := strings.Replace(str, "(", "|", -1)
|
||||
s = strings.Replace(s, ")", "|", -1)
|
||||
s = strings.Replace(s, "->", "|", -1)
|
||||
for _, s := range strings.Split(s, "|") {
|
||||
if s != "" {
|
||||
cwes = append(cwes, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (red RedHat) ConvertToModel(cve *gostmodels.RedhatCVE) (*models.CveContent, []models.Mitigation) {
|
||||
cwes := red.parseCwe(cve.Cwe)
|
||||
|
||||
details := []string{}
|
||||
for _, detail := range cve.Details {
|
||||
details = append(details, detail.Detail)
|
||||
}
|
||||
|
||||
v2score := 0.0
|
||||
if cve.Cvss.CvssBaseScore != "" {
|
||||
v2score, _ = strconv.ParseFloat(cve.Cvss.CvssBaseScore, 64)
|
||||
}
|
||||
v2severity := ""
|
||||
if v2score != 0 {
|
||||
v2severity = cve.ThreatSeverity
|
||||
}
|
||||
|
||||
v3score := 0.0
|
||||
if cve.Cvss3.Cvss3BaseScore != "" {
|
||||
v3score, _ = strconv.ParseFloat(cve.Cvss3.Cvss3BaseScore, 64)
|
||||
}
|
||||
v3severity := ""
|
||||
if v3score != 0 {
|
||||
v3severity = cve.ThreatSeverity
|
||||
}
|
||||
|
||||
refs := []models.Reference{}
|
||||
for _, r := range cve.References {
|
||||
refs = append(refs, models.Reference{Link: r.Reference})
|
||||
}
|
||||
|
||||
vendorURL := "https://access.redhat.com/security/cve/" + cve.Name
|
||||
mitigations := []models.Mitigation{}
|
||||
if cve.Mitigation != "" {
|
||||
mitigations = []models.Mitigation{
|
||||
{
|
||||
CveContentType: models.RedHatAPI,
|
||||
Mitigation: cve.Mitigation,
|
||||
URL: vendorURL,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.RedHatAPI,
|
||||
CveID: cve.Name,
|
||||
Title: cve.Bugzilla.Description,
|
||||
Summary: strings.Join(details, "\n"),
|
||||
Cvss2Score: v2score,
|
||||
Cvss2Vector: cve.Cvss.CvssScoringVector,
|
||||
Cvss2Severity: v2severity,
|
||||
Cvss3Score: v3score,
|
||||
Cvss3Vector: cve.Cvss3.Cvss3ScoringVector,
|
||||
Cvss3Severity: v3severity,
|
||||
References: refs,
|
||||
CweIDs: cwes,
|
||||
Published: cve.PublicDate,
|
||||
SourceLink: vendorURL,
|
||||
}, mitigations
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseCwe(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in string
|
||||
out []string
|
||||
}{
|
||||
{
|
||||
in: "CWE-665->(CWE-200|CWE-89)",
|
||||
out: []string{"CWE-665", "CWE-200", "CWE-89"},
|
||||
},
|
||||
{
|
||||
in: "CWE-841->CWE-770->CWE-454",
|
||||
out: []string{"CWE-841", "CWE-770", "CWE-454"},
|
||||
},
|
||||
{
|
||||
in: "(CWE-122|CWE-125)",
|
||||
out: []string{"CWE-122", "CWE-125"},
|
||||
},
|
||||
}
|
||||
|
||||
r := RedHat{}
|
||||
for i, tt := range tests {
|
||||
out := r.parseCwe(tt.in)
|
||||
sort.Strings(out)
|
||||
sort.Strings(tt.out)
|
||||
if !reflect.DeepEqual(tt.out, out) {
|
||||
t.Errorf("[%d]expected: %s, actual: %s", i, tt.out, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
202
gost/ubuntu.go
202
gost/ubuntu.go
@@ -1,202 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Ubuntu is Gost client for Ubuntu
|
||||
type Ubuntu struct {
|
||||
Base
|
||||
}
|
||||
|
||||
func (ubu Ubuntu) supported(version string) bool {
|
||||
_, ok := map[string]string{
|
||||
"1404": "trusty",
|
||||
"1604": "xenial",
|
||||
"1804": "bionic",
|
||||
"1910": "eoan",
|
||||
"2004": "focal",
|
||||
"2010": "groovy",
|
||||
"2104": "hirsute",
|
||||
"2110": "impish",
|
||||
"2204": "jammy",
|
||||
}[version]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (ubu Ubuntu) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
ubuReleaseVer := strings.Replace(r.Release, ".", "", 1)
|
||||
if !ubu.supported(ubuReleaseVer) {
|
||||
logging.Log.Warnf("Ubuntu %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
// Add linux and set the version of running kernel to search Gost.
|
||||
if r.Container.ContainerID == "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages[linuxImage]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
Name: "linux",
|
||||
Version: r.RunningKernel.Version,
|
||||
NewVersion: newVer,
|
||||
}
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if ubu.driver == nil {
|
||||
url, err := util.URLPathJoin(ubu.baseURL, "ubuntu", ubuReleaseVer, "pkgs")
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
|
||||
}
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, url)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Unfixed CVEs via HTTP. err: %w", err)
|
||||
}
|
||||
|
||||
for _, res := range responses {
|
||||
ubuCves := map[string]gostmodels.UbuntuCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &ubuCves); err != nil {
|
||||
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for _, pack := range r.Packages {
|
||||
ubuCves, err := ubu.driver.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Unfixed CVEs For Package. err: %w", err)
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range r.SrcPackages {
|
||||
ubuCves, err := ubu.driver.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
|
||||
if err != nil {
|
||||
return 0, xerrors.Errorf("Failed to get Unfixed CVEs For SrcPackage. err: %w", err)
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for _, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.UbuntuAPI] = []models.CveContent{cve}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.UbuntuAPIMatch},
|
||||
}
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
if p.isSrcPack {
|
||||
if srcPack, ok := r.SrcPackages[p.packName]; ok {
|
||||
for _, binName := range srcPack.BinaryNames {
|
||||
if _, ok := r.Packages[binName]; ok {
|
||||
names = append(names, binName)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, linuxImage)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ubu Ubuntu) ConvertToModel(cve *gostmodels.UbuntuCVE) *models.CveContent {
|
||||
references := []models.Reference{}
|
||||
for _, r := range cve.References {
|
||||
if strings.Contains(r.Reference, "https://cve.mitre.org/cgi-bin/cvename.cgi?name=") {
|
||||
references = append(references, models.Reference{Source: "CVE", Link: r.Reference})
|
||||
} else {
|
||||
references = append(references, models.Reference{Link: r.Reference})
|
||||
}
|
||||
}
|
||||
|
||||
for _, b := range cve.Bugs {
|
||||
references = append(references, models.Reference{Source: "Bug", Link: b.Bug})
|
||||
}
|
||||
|
||||
for _, u := range cve.Upstreams {
|
||||
for _, upstreamLink := range u.UpstreamLinks {
|
||||
references = append(references, models.Reference{Source: "UPSTREAM", Link: upstreamLink.Link})
|
||||
}
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.UbuntuAPI,
|
||||
CveID: cve.Candidate,
|
||||
Summary: cve.Description,
|
||||
Cvss2Severity: cve.Priority,
|
||||
Cvss3Severity: cve.Priority,
|
||||
SourceLink: "https://ubuntu.com/security/" + cve.Candidate,
|
||||
References: references,
|
||||
Published: cve.PublicDate,
|
||||
}
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
func TestUbuntu_Supported(t *testing.T) {
|
||||
type args struct {
|
||||
ubuReleaseVer string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "14.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1404",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "16.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1604",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "18.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1804",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "20.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2004",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "20.10 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2010",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "21.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2104",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "empty string is not supported yet",
|
||||
args: args{
|
||||
ubuReleaseVer: "",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ubu := Ubuntu{}
|
||||
if got := ubu.supported(tt.args.ubuReleaseVer); got != tt.want {
|
||||
t.Errorf("Ubuntu.Supported() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUbuntuConvertToModel(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input gostmodels.UbuntuCVE
|
||||
expected models.CveContent
|
||||
}{
|
||||
{
|
||||
name: "gost Ubuntu.ConvertToModel",
|
||||
input: gostmodels.UbuntuCVE{
|
||||
Candidate: "CVE-2021-3517",
|
||||
PublicDate: time.Date(2021, 5, 19, 14, 15, 0, 0, time.UTC),
|
||||
References: []gostmodels.UbuntuReference{
|
||||
{Reference: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-3517"},
|
||||
{Reference: "https://gitlab.gnome.org/GNOME/libxml2/-/issues/235"},
|
||||
{Reference: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/bf22713507fe1fc3a2c4b525cf0a88c2dc87a3a2"}},
|
||||
Description: "description.",
|
||||
Notes: []gostmodels.UbuntuNote{},
|
||||
Bugs: []gostmodels.UbuntuBug{{Bug: "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=987738"}},
|
||||
Priority: "medium",
|
||||
Patches: []gostmodels.UbuntuPatch{
|
||||
{PackageName: "libxml2", ReleasePatches: []gostmodels.UbuntuReleasePatch{
|
||||
{ReleaseName: "focal", Status: "needed", Note: ""},
|
||||
}},
|
||||
},
|
||||
Upstreams: []gostmodels.UbuntuUpstream{{
|
||||
PackageName: "libxml2", UpstreamLinks: []gostmodels.UbuntuUpstreamLink{
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/50f06b3efb638efb0abd95dc62dca05ae67882c2"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
expected: models.CveContent{
|
||||
Type: models.UbuntuAPI,
|
||||
CveID: "CVE-2021-3517",
|
||||
Summary: "description.",
|
||||
Cvss2Severity: "medium",
|
||||
Cvss3Severity: "medium",
|
||||
SourceLink: "https://ubuntu.com/security/CVE-2021-3517",
|
||||
References: []models.Reference{
|
||||
{Source: "CVE", Link: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-3517"},
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/issues/235"},
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/bf22713507fe1fc3a2c4b525cf0a88c2dc87a3a2"},
|
||||
{Source: "Bug", Link: "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=987738"},
|
||||
{Source: "UPSTREAM", Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/50f06b3efb638efb0abd95dc62dca05ae67882c2"}},
|
||||
Published: time.Date(2021, 5, 19, 14, 15, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ubu := Ubuntu{}
|
||||
got := ubu.ConvertToModel(&tt.input)
|
||||
if !reflect.DeepEqual(got, &tt.expected) {
|
||||
t.Errorf("Ubuntu.ConvertToModel() = %#v, want %#v", got, &tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
195
gost/util.go
195
gost/util.go
@@ -1,195 +0,0 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type response struct {
|
||||
request request
|
||||
json string
|
||||
}
|
||||
|
||||
func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- request{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type request struct {
|
||||
osMajorVersion string
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cveID string
|
||||
}
|
||||
|
||||
func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
return getCvesWithFixStateViaHTTP(r, urlPrefix, "unfixed-cves")
|
||||
}
|
||||
|
||||
func getCvesWithFixStateViaHTTP(r *models.ScanResult, urlPrefix, fixState string) (responses []response, err error) {
|
||||
nReq := len(r.Packages) + len(r.SrcPackages)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, pack := range r.Packages {
|
||||
reqChan <- request{
|
||||
osMajorVersion: major(r.Release),
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
}
|
||||
}
|
||||
for _, pack := range r.SrcPackages {
|
||||
reqChan <- request{
|
||||
osMajorVersion: major(r.Release),
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.packName,
|
||||
fixState,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func httpGet(url string, req request, resChan chan<- response, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- response{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func major(osVer string) (majorVersion string) {
|
||||
return strings.Split(osVer, ".")[0]
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 34 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 297 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 36 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user