Compare commits
233 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3605645ff6 | ||
|
|
1d97e91341 | ||
|
|
96333f38c9 | ||
|
|
8b5d1c8e92 | ||
|
|
dea80f860c | ||
|
|
6eb4c5a5fe | ||
|
|
b219a8495e | ||
|
|
eb87d5d4e1 | ||
|
|
6963442a5e | ||
|
|
f7299b9dba | ||
|
|
379fc8a1a1 | ||
|
|
947fbbb29e | ||
|
|
06d2032c9c | ||
|
|
d055c48827 | ||
|
|
2a00339da1 | ||
|
|
2d959b3af8 | ||
|
|
595e26db41 | ||
|
|
1e457320c5 | ||
|
|
a06e689502 | ||
|
|
ca3f6b1dbf | ||
|
|
f1c78e42a2 | ||
|
|
2f3b8bf3cc | ||
|
|
ab54266f9e | ||
|
|
d79d138440 | ||
|
|
139f3a81b6 | ||
|
|
d1a617cfff | ||
|
|
48f7597bcf | ||
|
|
93731311a1 | ||
|
|
999529a05b | ||
|
|
847d820af7 | ||
|
|
5234306ded | ||
|
|
86b60e1478 | ||
|
|
42fdc08933 | ||
|
|
38b1d622f6 | ||
|
|
2477f9a8f8 | ||
|
|
ec6e90acd3 | ||
|
|
2aca2e4352 | ||
|
|
14518d925e | ||
|
|
948f8c0751 | ||
|
|
1c1e40058e | ||
|
|
2158fc6cb1 | ||
|
|
91ed318c5d | ||
|
|
bfc3828ce1 | ||
|
|
c7eac4e7fe | ||
|
|
cc63a0eccf | ||
|
|
fd18df1dd4 | ||
|
|
8775b5efdf | ||
|
|
a9f29a6c5d | ||
|
|
05fdde48f9 | ||
|
|
3dfbd6b616 | ||
|
|
04f246cf8b | ||
|
|
7500f41655 | ||
|
|
a1cc152e81 | ||
|
|
1c77bc1ba3 | ||
|
|
ec31c54caf | ||
|
|
2f05864813 | ||
|
|
2fbc0a001e | ||
|
|
7d8a24ee1a | ||
|
|
7750347010 | ||
|
|
9bcffcd721 | ||
|
|
787604de6a | ||
|
|
5164fb1423 | ||
|
|
07335617d3 | ||
|
|
e5855922c1 | ||
|
|
671be3f2f7 | ||
|
|
fe8d252c51 | ||
|
|
0cdc7a3af5 | ||
|
|
1cfe155a3a | ||
|
|
2923cbc645 | ||
|
|
7c209cc9dc | ||
|
|
84fa4ce432 | ||
|
|
f2e9cd9668 | ||
|
|
77049d6cbb | ||
|
|
b4c23c158b | ||
|
|
964b4aa389 | ||
|
|
dc5aa35db7 | ||
|
|
43c05d06fc | ||
|
|
a3f7d1d7e7 | ||
|
|
bb4a1ca6c2 | ||
|
|
57cce640e1 | ||
|
|
1eb5d36668 | ||
|
|
6bc4850596 | ||
|
|
24005ae7ae | ||
|
|
7aa296bb57 | ||
|
|
3829ed2f8e | ||
|
|
2b7294a504 | ||
|
|
0c6a892893 | ||
|
|
89d94ad85a | ||
|
|
ffdb78962f | ||
|
|
321dae37ce | ||
|
|
a31797af0b | ||
|
|
32999cf432 | ||
|
|
88218f5d92 | ||
|
|
15761933ac | ||
|
|
0b62842f0e | ||
|
|
6bceddeeda | ||
|
|
2dcbff8cd5 | ||
|
|
8659668177 | ||
|
|
e07b6a9160 | ||
|
|
aac5ef1438 | ||
|
|
d780a73297 | ||
|
|
9ef8cee36e | ||
|
|
77808a2c05 | ||
|
|
177e553d12 | ||
|
|
40f8272a28 | ||
|
|
a7eb1141ae | ||
|
|
c73ed7f32f | ||
|
|
f047a6fe0c | ||
|
|
7f15a86d6a | ||
|
|
da1e515253 | ||
|
|
591786fde6 | ||
|
|
47e6ea249d | ||
|
|
4a72295de7 | ||
|
|
9ed5f2cac5 | ||
|
|
3e67f04fe4 | ||
|
|
b9416ae062 | ||
|
|
b4e49e093e | ||
|
|
020f6ac609 | ||
|
|
7e71cbdd46 | ||
|
|
1003f62212 | ||
|
|
9b18e1f9f0 | ||
|
|
24f790f474 | ||
|
|
fb8749fc5e | ||
|
|
96c3592db1 | ||
|
|
d65421cf46 | ||
|
|
c52ba448cd | ||
|
|
21adce463b | ||
|
|
f24240bf90 | ||
|
|
ff83cadd6e | ||
|
|
e8c09282d9 | ||
|
|
5f4d68cde4 | ||
|
|
9077a83ea8 | ||
|
|
543dc99ecd | ||
|
|
f0b3a8b1db | ||
|
|
0b9ec05181 | ||
|
|
0bf12412d6 | ||
|
|
0ea4d58c63 | ||
|
|
5755b00576 | ||
|
|
1c8e074c9d | ||
|
|
0e0e5ce4be | ||
|
|
23dfe53885 | ||
|
|
8e6351a9e4 | ||
|
|
3086e2760f | ||
|
|
b8db2e0b74 | ||
|
|
43b46cb324 | ||
|
|
d0559c7719 | ||
|
|
231c63cf62 | ||
|
|
2a9aebe059 | ||
|
|
4e535d792f | ||
|
|
4b487503d4 | ||
|
|
0095c40e69 | ||
|
|
82c1abfd3a | ||
|
|
40988401bd | ||
|
|
e8e3f4d138 | ||
|
|
7eb77f5b51 | ||
|
|
e115235299 | ||
|
|
151d4b2d30 | ||
|
|
e553f8b4c5 | ||
|
|
47652ef0fb | ||
|
|
ab0e950800 | ||
|
|
a7b0ce1c85 | ||
|
|
dc9c0edece | ||
|
|
17ae386d1e | ||
|
|
2d369d0cfe | ||
|
|
c36e645d9b | ||
|
|
40039c07e2 | ||
|
|
a692cec0ef | ||
|
|
e7ca491a94 | ||
|
|
23f3e2fc11 | ||
|
|
27b3e17b79 | ||
|
|
740781af56 | ||
|
|
36c9c229b8 | ||
|
|
183fdcbdef | ||
|
|
a2a697900a | ||
|
|
6fef4db8a0 | ||
|
|
e879ff1e9e | ||
|
|
9bfe0627ae | ||
|
|
0179f4299a | ||
|
|
56017e57a0 | ||
|
|
cda91e0906 | ||
|
|
5d47adb5c9 | ||
|
|
54e73c2f54 | ||
|
|
2d075079f1 | ||
|
|
2a8ee4b22b | ||
|
|
1ec31d7be9 | ||
|
|
02286b0c59 | ||
|
|
1d0c5dea9f | ||
|
|
1c4a12c4b7 | ||
|
|
3f2ac45d71 | ||
|
|
518f4dc039 | ||
|
|
2cdeef4ffe | ||
|
|
03579126fd | ||
|
|
e3c27e1817 | ||
|
|
aeaf308679 | ||
|
|
f5e47bea40 | ||
|
|
50cf13a7f2 | ||
|
|
abd8041772 | ||
|
|
847c6438e7 | ||
|
|
ef8309df27 | ||
|
|
0dff6cf983 | ||
|
|
4c04acbd9e | ||
|
|
1c4f231572 | ||
|
|
51b8e169d2 | ||
|
|
b4611ae9b7 | ||
|
|
cd6722017b | ||
|
|
290edffccf | ||
|
|
64a6222bf9 | ||
|
|
adb686b7c9 | ||
|
|
d4af341b0f | ||
|
|
fea7e93c8d | ||
|
|
8b6b8d0f2e | ||
|
|
4dcbd865cc | ||
|
|
39b19444fe | ||
|
|
644d5a5462 | ||
|
|
8e18451e3f | ||
|
|
3dbdd01f97 | ||
|
|
a89079c005 | ||
|
|
a8c0926b4f | ||
|
|
dd2959a31b | ||
|
|
51099f42c3 | ||
|
|
63f170cc7a | ||
|
|
3c1489e588 | ||
|
|
e4f1e03f62 | ||
|
|
83d48ec990 | ||
|
|
b20d2b2684 | ||
|
|
2b918c70ae | ||
|
|
1100c133ba | ||
|
|
88899f0e89 | ||
|
|
59dc0059bc | ||
|
|
986fb304c0 | ||
|
|
d6435d2885 | ||
|
|
affb456499 | ||
|
|
705ed0a0ac |
@@ -1,7 +0,0 @@
|
||||
.dockerignore
|
||||
Dockerfile
|
||||
vendor/
|
||||
cve.sqlite3*
|
||||
oval.sqlite3*
|
||||
setup/
|
||||
img/
|
||||
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: kotakanbe
|
||||
43
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
43
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
@@ -1,43 +0,0 @@
|
||||
---
|
||||
name: Bug Report
|
||||
labels: bug
|
||||
about: If something isn't working as expected.
|
||||
---
|
||||
|
||||
# What did you do? (required. The issue will be **closed** when not provided.)
|
||||
|
||||
|
||||
# What did you expect to happen?
|
||||
|
||||
|
||||
# What happened instead?
|
||||
|
||||
* Current Output
|
||||
|
||||
Please re-run the command using ```-debug``` and provide the output below.
|
||||
|
||||
# Steps to reproduce the behaviour
|
||||
|
||||
|
||||
# Configuration (**MUST** fill this out):
|
||||
|
||||
* Go version (`go version`):
|
||||
|
||||
* Go environment (`go env`):
|
||||
|
||||
* Vuls environment:
|
||||
|
||||
Hash : ____
|
||||
|
||||
To check the commit hash of HEAD
|
||||
$ vuls -v
|
||||
|
||||
or
|
||||
|
||||
$ cd $GOPATH/src/github.com/future-architect/vuls
|
||||
$ git rev-parse --short HEAD
|
||||
|
||||
* config.toml:
|
||||
|
||||
* command:
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
9
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
@@ -1,9 +0,0 @@
|
||||
---
|
||||
name: Feature Request
|
||||
labels: enhancement
|
||||
about: I have a suggestion (and might want to implement myself)!
|
||||
---
|
||||
|
||||
<!--
|
||||
If this is a FEATURE REQUEST, request format does not matter!
|
||||
-->
|
||||
10
.github/ISSUE_TEMPLATE/SUPPORT_QUESTION.md
vendored
10
.github/ISSUE_TEMPLATE/SUPPORT_QUESTION.md
vendored
@@ -1,10 +0,0 @@
|
||||
---
|
||||
name: Support Question
|
||||
labels: question
|
||||
about: If you have a question about Vuls.
|
||||
---
|
||||
|
||||
<!--
|
||||
If you have a trouble, feel free to ask.
|
||||
Make sure you're not asking duplicate question by searching on the issues lists.
|
||||
-->
|
||||
7
.github/ISSUE_TEMPLATE/VULSREPO.md
vendored
7
.github/ISSUE_TEMPLATE/VULSREPO.md
vendored
@@ -1,7 +0,0 @@
|
||||
---
|
||||
name: Vuls Repo
|
||||
labels: vulsrepo
|
||||
about: If something isn't working as expected.
|
||||
---
|
||||
|
||||
|
||||
40
.github/PULL_REQUEST_TEMPLATE.md
vendored
40
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,40 +0,0 @@
|
||||
|
||||
If this Pull Request is work in progress, Add a prefix of “[WIP]” in the title.
|
||||
|
||||
# What did you implement:
|
||||
|
||||
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context.
|
||||
|
||||
Fixes # (issue)
|
||||
|
||||
## Type of change
|
||||
|
||||
Please delete options that are not relevant.
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] This change requires a documentation update
|
||||
|
||||
# How Has This Been Tested?
|
||||
|
||||
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce.
|
||||
|
||||
# Checklist:
|
||||
You don't have to satisfy all of the following.
|
||||
|
||||
- [ ] Write tests
|
||||
- [ ] Write documentation
|
||||
- [ ] Check that there aren't other open pull requests for the same issue/feature
|
||||
- [ ] Format your source code by `make fmt`
|
||||
- [ ] Pass the test by `make test`
|
||||
- [ ] Provide verification config / commands
|
||||
- [ ] Enable "Allow edits from maintainers" for this PR
|
||||
- [ ] Update the messages below
|
||||
|
||||
***Is this ready for review?:*** NO
|
||||
|
||||
# Reference
|
||||
|
||||
* https://blog.github.com/2015-01-21-how-to-write-the-perfect-pull-request/
|
||||
|
||||
29
.github/workflows/golangci.yml
vendored
29
.github/workflows/golangci.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: golangci-lint
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
jobs:
|
||||
golangci:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
|
||||
version: v1.32
|
||||
args: --timeout=10m
|
||||
|
||||
# Optional: working directory, useful for monorepos
|
||||
# working-directory: somedir
|
||||
|
||||
# Optional: golangci-lint command line arguments.
|
||||
# args: --issues-exit-code=0
|
||||
|
||||
# Optional: show only new issues if it's a pull request. The default value is `false`.
|
||||
# only-new-issues: true
|
||||
31
.github/workflows/goreleaser.yml
vendored
31
.github/workflows/goreleaser.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: goreleaser
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Unshallow
|
||||
run: git fetch --prune --unshallow
|
||||
-
|
||||
name: Set up Go
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: 1.15
|
||||
-
|
||||
name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
with:
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
27
.github/workflows/test.yml
vendored
27
.github/workflows/test.yml
vendored
@@ -1,21 +1,26 @@
|
||||
name: Test
|
||||
|
||||
on: [pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.15.x
|
||||
id: go
|
||||
go-version-file: 'go.mod'
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Test
|
||||
run: make test
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout=10m
|
||||
|
||||
- name: testing
|
||||
run: go test -race ./...
|
||||
22
.github/workflows/tidy.yml
vendored
22
.github/workflows/tidy.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: go-mod-tidy-pr
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Weekly build
|
||||
|
||||
jobs:
|
||||
go-mod-tidy-pr:
|
||||
name: go-mod-tidy-pr
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Run go-mod-tidy-pr
|
||||
uses: sue445/go-mod-tidy-pr@master
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
git_user_name: kotakanbe
|
||||
git_user_email: kotakanbe@gmail.com
|
||||
go_version: 1.15.6
|
||||
39
.gitignore
vendored
39
.gitignore
vendored
@@ -1,17 +1,24 @@
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
.vscode
|
||||
*.txt
|
||||
*.json
|
||||
*.sqlite3*
|
||||
*.db
|
||||
tags
|
||||
.gitmodules
|
||||
coverage.out
|
||||
issues/
|
||||
vendor/
|
||||
log/
|
||||
results/
|
||||
*config.toml
|
||||
!setup/docker/*
|
||||
.DS_Store
|
||||
dist/
|
||||
.idea
|
||||
|
||||
# Vuls
|
||||
vuls
|
||||
!cmd/vuls
|
||||
vuls.db
|
||||
config.json
|
||||
results
|
||||
@@ -1,17 +0,0 @@
|
||||
name: golang-ci
|
||||
|
||||
linters-settings:
|
||||
errcheck:
|
||||
#exclude: /path/to/file.txt
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- goimports
|
||||
- golint
|
||||
- govet
|
||||
- misspell
|
||||
- errcheck
|
||||
- staticcheck
|
||||
- prealloc
|
||||
- ineffassign
|
||||
114
.goreleaser.yml
114
.goreleaser.yml
@@ -1,114 +0,0 @@
|
||||
project_name: vuls
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
release:
|
||||
github:
|
||||
owner: future-architect
|
||||
name: vuls
|
||||
builds:
|
||||
- id: vuls
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
main: ./cmd/vuls/main.go
|
||||
flags:
|
||||
- -a
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls
|
||||
|
||||
- id: vuls-scanner
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
main: ./cmd/scanner/main.go
|
||||
flags:
|
||||
- -a
|
||||
- -tags=scanner
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls-scanner
|
||||
|
||||
- id: trivy-to-vuls
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
main: ./contrib/trivy/cmd/main.go
|
||||
binary: trivy-to-vuls
|
||||
|
||||
- id: future-vuls
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
flags:
|
||||
- -a
|
||||
- -tags=scanner
|
||||
main: ./contrib/future-vuls/cmd/main.go
|
||||
binary: future-vuls
|
||||
|
||||
archives:
|
||||
|
||||
- id: vuls
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- vuls
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
- id: vuls-scanner
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- vuls-scanner
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
- id: trivy-to-vuls
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- trivy-to-vuls
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
- id: future-vuls
|
||||
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
|
||||
builds:
|
||||
- future-vuls
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
snapshot:
|
||||
name_template: SNAPSHOT-{{ .Commit }}
|
||||
514
CHANGELOG.md
514
CHANGELOG.md
@@ -1,514 +0,0 @@
|
||||
# Change Log
|
||||
|
||||
## v0.4.1 and later, see [GitHub release](https://github.com/future-architect/vuls/releases)
|
||||
|
||||
## [v0.4.0](https://github.com/future-architect/vuls/tree/v0.4.0) (2017-08-25)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.3.0...v0.4.0)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Output changelog in report, TUI and JSON for RHEL [\#367](https://github.com/future-architect/vuls/issues/367)
|
||||
- Output changelog in report, TUI and JSON for Amazon Linux [\#366](https://github.com/future-architect/vuls/issues/366)
|
||||
- Improve scanning accuracy by checking package versions [\#256](https://github.com/future-architect/vuls/issues/256)
|
||||
- Improve SSH [\#415](https://github.com/future-architect/vuls/issues/415)
|
||||
- Enable to scan even if target server can not connect to the Internet [\#258](https://github.com/future-architect/vuls/issues/258)
|
||||
- SSH Hostkey check [\#417](https://github.com/future-architect/vuls/pull/417) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- v0.4.0 [\#449](https://github.com/future-architect/vuls/pull/449) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Change default ssh method from go library to external command [\#416](https://github.com/future-architect/vuls/pull/416) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add containers-only option to configtest [\#411](https://github.com/future-architect/vuls/pull/411) ([knqyf263](https://github.com/knqyf263))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Running Vuls tui before vuls report does not show vulnerabilities checked by CPE [\#396](https://github.com/future-architect/vuls/issues/396)
|
||||
- With a long package name, Local shell mode \(stty dont' work\) [\#444](https://github.com/future-architect/vuls/issues/444)
|
||||
- Improve SSH [\#415](https://github.com/future-architect/vuls/issues/415)
|
||||
- Report that a vulnerability exists in the wrong package [\#408](https://github.com/future-architect/vuls/issues/408)
|
||||
- With a long package name, a parse error occurs. [\#391](https://github.com/future-architect/vuls/issues/391)
|
||||
- Ubuntu failed to scan vulnerable packages [\#205](https://github.com/future-architect/vuls/issues/205)
|
||||
- CVE-ID in changelog can't be picked up. [\#154](https://github.com/future-architect/vuls/issues/154)
|
||||
- v0.4.0 [\#449](https://github.com/future-architect/vuls/pull/449) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix SSH dial error [\#413](https://github.com/future-architect/vuls/pull/413) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update deps, Change deps tool from glide to dep [\#412](https://github.com/future-architect/vuls/pull/412) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix report option Loaded error-info [\#406](https://github.com/future-architect/vuls/pull/406) ([hogehogehugahuga](https://github.com/hogehogehugahuga))
|
||||
- Add --user root to docker exec command [\#389](https://github.com/future-architect/vuls/pull/389) ([PaulFurtado](https://github.com/PaulFurtado))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- README.md.ja not include "Oracle Linux, FreeBSD" [\#465](https://github.com/future-architect/vuls/issues/465)
|
||||
- Can't scan remote server - \(centos 7 - updated\) [\#451](https://github.com/future-architect/vuls/issues/451)
|
||||
- An abnormality in the result of vuls tui [\#439](https://github.com/future-architect/vuls/issues/439)
|
||||
- compile faild [\#436](https://github.com/future-architect/vuls/issues/436)
|
||||
- Can't install vuls on CentOS 7 [\#432](https://github.com/future-architect/vuls/issues/432)
|
||||
- Vuls scan doesn't show severity score in any of the vulnerable packages [\#430](https://github.com/future-architect/vuls/issues/430)
|
||||
- Load config failedtoml: cannot load TOML value of type string into a Go slice [\#429](https://github.com/future-architect/vuls/issues/429)
|
||||
- vuls scan not running check-update with sudo for Centos 7 [\#428](https://github.com/future-architect/vuls/issues/428)
|
||||
- options for configtest not being activated [\#422](https://github.com/future-architect/vuls/issues/422)
|
||||
- "could not find project Gopkg.toml, use dep init to initiate a manifest" when installing vuls [\#420](https://github.com/future-architect/vuls/issues/420)
|
||||
- go get not get [\#407](https://github.com/future-architect/vuls/issues/407)
|
||||
- Failed to scan via docker. err: Unknown format [\#404](https://github.com/future-architect/vuls/issues/404)
|
||||
- Failed to scan - kernel-xxx is an installed security update [\#403](https://github.com/future-architect/vuls/issues/403)
|
||||
- 169.254.169.254 port 80: Connection refused [\#402](https://github.com/future-architect/vuls/issues/402)
|
||||
- vuls scan --debug cause `invalid memory address` error [\#397](https://github.com/future-architect/vuls/issues/397)
|
||||
- Provide a command line flag that will automatically install aptitude on debian? [\#390](https://github.com/future-architect/vuls/issues/390)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- export fill cve info [\#467](https://github.com/future-architect/vuls/pull/467) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add oval docker [\#466](https://github.com/future-architect/vuls/pull/466) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix typos in commands. [\#464](https://github.com/future-architect/vuls/pull/464) ([ymomoi](https://github.com/ymomoi))
|
||||
- Update README [\#463](https://github.com/future-architect/vuls/pull/463) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- export FillWithOval [\#462](https://github.com/future-architect/vuls/pull/462) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add serveruuid field [\#458](https://github.com/future-architect/vuls/pull/458) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add s3 dirctory option [\#457](https://github.com/future-architect/vuls/pull/457) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Extract Advisory.Description on RHEL, Amazon, Oracle [\#450](https://github.com/future-architect/vuls/pull/450) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- nosudo on CentOS and Fetch Changelogs on Amazon, RHEL [\#448](https://github.com/future-architect/vuls/pull/448) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- change logrus package to lowercase and update other packages [\#446](https://github.com/future-architect/vuls/pull/446) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- add db backend redis [\#445](https://github.com/future-architect/vuls/pull/445) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fast test [\#435](https://github.com/future-architect/vuls/pull/435) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix typo [\#433](https://github.com/future-architect/vuls/pull/433) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Add support for PostgreSQL as a DB storage back-end [\#431](https://github.com/future-architect/vuls/pull/431) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- typo README.js.md [\#426](https://github.com/future-architect/vuls/pull/426) ([ryurock](https://github.com/ryurock))
|
||||
- Add TOC to README [\#425](https://github.com/future-architect/vuls/pull/425) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fixing \#420 where lock and manifest have moved to TOML [\#421](https://github.com/future-architect/vuls/pull/421) ([elfgoh](https://github.com/elfgoh))
|
||||
- Define timeout for vulnerabilities scan and platform detection [\#414](https://github.com/future-architect/vuls/pull/414) ([s7anley](https://github.com/s7anley))
|
||||
- Enable -timeout option when detecting OS [\#410](https://github.com/future-architect/vuls/pull/410) ([knqyf263](https://github.com/knqyf263))
|
||||
- Remove duplicate command in README [\#401](https://github.com/future-architect/vuls/pull/401) ([knqyf263](https://github.com/knqyf263))
|
||||
- Fix to read config.toml at tui [\#441](https://github.com/future-architect/vuls/pull/441) ([usiusi360](https://github.com/usiusi360))
|
||||
- Change NVD URL to new one [\#419](https://github.com/future-architect/vuls/pull/419) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add some testcases [\#418](https://github.com/future-architect/vuls/pull/418) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.3.0](https://github.com/future-architect/vuls/tree/v0.3.0) (2017-03-24)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.2.0...v0.3.0)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Changelog parsing fails when package maintainers aren't consistent regarding versions [\#327](https://github.com/future-architect/vuls/issues/327)
|
||||
- Docker scan doesn't report image name [\#325](https://github.com/future-architect/vuls/issues/325)
|
||||
- vuls report -to-email only one E-Mail [\#295](https://github.com/future-architect/vuls/issues/295)
|
||||
- Support RHEL5 [\#286](https://github.com/future-architect/vuls/issues/286)
|
||||
- Continue scanning even when some hosts have tech issues? [\#264](https://github.com/future-architect/vuls/issues/264)
|
||||
- Normalization of JSON output [\#259](https://github.com/future-architect/vuls/issues/259)
|
||||
- Add report subcommand, change scan subcommand options [\#239](https://github.com/future-architect/vuls/issues/239)
|
||||
- scan localhost? [\#210](https://github.com/future-architect/vuls/issues/210)
|
||||
- Can Vuls show details about updateable packages [\#341](https://github.com/future-architect/vuls/issues/341)
|
||||
- Scan all containers except [\#285](https://github.com/future-architect/vuls/issues/285)
|
||||
- Notify the difference from the previous scan result [\#255](https://github.com/future-architect/vuls/issues/255)
|
||||
- EC2RoleCreds support? [\#250](https://github.com/future-architect/vuls/issues/250)
|
||||
- Output confidence score of detection accuracy and detection method to JSON or Reporting [\#350](https://github.com/future-architect/vuls/pull/350) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Avoid null slice being null in JSON [\#345](https://github.com/future-architect/vuls/pull/345) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -format-one-email option [\#331](https://github.com/future-architect/vuls/pull/331) ([knqyf263](https://github.com/knqyf263))
|
||||
- Support Raspbian [\#330](https://github.com/future-architect/vuls/pull/330) ([knqyf263](https://github.com/knqyf263))
|
||||
- Add leniancy to the version matching for debian to account for versio… [\#328](https://github.com/future-architect/vuls/pull/328) ([jsulinski](https://github.com/jsulinski))
|
||||
- Add image information for docker containers [\#326](https://github.com/future-architect/vuls/pull/326) ([jsulinski](https://github.com/jsulinski))
|
||||
- Continue scanning even when some hosts have tech issues [\#309](https://github.com/future-architect/vuls/pull/309) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -log-dir option [\#301](https://github.com/future-architect/vuls/pull/301) ([knqyf263](https://github.com/knqyf263))
|
||||
- Use --assumeno option [\#300](https://github.com/future-architect/vuls/pull/300) ([knqyf263](https://github.com/knqyf263))
|
||||
- Add local scan mode\(Scan without SSH when target server is localhost\) [\#291](https://github.com/future-architect/vuls/pull/291) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support RHEL5 [\#289](https://github.com/future-architect/vuls/pull/289) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add LXD support [\#288](https://github.com/future-architect/vuls/pull/288) ([jiazio](https://github.com/jiazio))
|
||||
- Add timeout option to configtest [\#400](https://github.com/future-architect/vuls/pull/400) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Notify the difference from the previous scan result [\#392](https://github.com/future-architect/vuls/pull/392) ([knqyf263](https://github.com/knqyf263))
|
||||
- Add Oracle Linux support [\#386](https://github.com/future-architect/vuls/pull/386) ([Djelibeybi](https://github.com/Djelibeybi))
|
||||
- Change container scan format in config.toml [\#381](https://github.com/future-architect/vuls/pull/381) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Obsolete CentOS5 support [\#378](https://github.com/future-architect/vuls/pull/378) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Deprecate prepare subcommand to minimize the root authority defined by /etc/sudoers [\#375](https://github.com/future-architect/vuls/pull/375) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support IAM role for report to S3. [\#370](https://github.com/future-architect/vuls/pull/370) ([ohsawa0515](https://github.com/ohsawa0515))
|
||||
- Add .travis.yml [\#363](https://github.com/future-architect/vuls/pull/363) ([knqyf263](https://github.com/knqyf263))
|
||||
- Output changelog in report, TUI and JSON for Ubuntu/Debian/CentOS [\#356](https://github.com/future-architect/vuls/pull/356) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Debian scans failing in docker [\#323](https://github.com/future-architect/vuls/issues/323)
|
||||
- Local CVE DB is still checked, even if a CVE Dictionary URL is defined [\#316](https://github.com/future-architect/vuls/issues/316)
|
||||
- vuls needs gmake. [\#313](https://github.com/future-architect/vuls/issues/313)
|
||||
- patch request for FreeBSD [\#312](https://github.com/future-architect/vuls/issues/312)
|
||||
- Report: failed to read from json \(Docker\) [\#294](https://github.com/future-architect/vuls/issues/294)
|
||||
- -report-mail option does not output required mail header [\#282](https://github.com/future-architect/vuls/issues/282)
|
||||
- PackInfo not found error when vuls scan. [\#281](https://github.com/future-architect/vuls/issues/281)
|
||||
- Normalize character set [\#279](https://github.com/future-architect/vuls/issues/279)
|
||||
- The number of Updatable Packages is different from the number of yum check-update [\#373](https://github.com/future-architect/vuls/issues/373)
|
||||
- sudo is needed when exec yum check-update on RHEL7 [\#371](https://github.com/future-architect/vuls/issues/371)
|
||||
- `123-3ubuntu4` should be marked as ChangelogLenientMatch [\#362](https://github.com/future-architect/vuls/issues/362)
|
||||
- CentOS multi package invalid result [\#360](https://github.com/future-architect/vuls/issues/360)
|
||||
- Parse error after check-update. \(Unknown format\) [\#359](https://github.com/future-architect/vuls/issues/359)
|
||||
- Fix candidate to confidence. [\#354](https://github.com/future-architect/vuls/pull/354) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Bug fix: not send e-mail to cc address [\#346](https://github.com/future-architect/vuls/pull/346) ([knqyf263](https://github.com/knqyf263))
|
||||
- Change the command used for os detection from uname to freebsd-version [\#340](https://github.com/future-architect/vuls/pull/340) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling of detectOS [\#337](https://github.com/future-architect/vuls/pull/337) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix infinite retry at size overrun error in Slack report [\#329](https://github.com/future-architect/vuls/pull/329) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- aptitude changelog defaults to using more, which is not interactive a… [\#324](https://github.com/future-architect/vuls/pull/324) ([jsulinski](https://github.com/jsulinski))
|
||||
- Do not use sudo when echo [\#322](https://github.com/future-architect/vuls/pull/322) ([knqyf263](https://github.com/knqyf263))
|
||||
- Reduce privilege requirements for commands that don't need sudo on Ubuntu/Debian [\#319](https://github.com/future-architect/vuls/pull/319) ([jsulinski](https://github.com/jsulinski))
|
||||
- Don't check for a CVE DB when CVE Dictionary URL is defined [\#317](https://github.com/future-architect/vuls/pull/317) ([jsulinski](https://github.com/jsulinski))
|
||||
- Fix typo contianer -\> container [\#314](https://github.com/future-architect/vuls/pull/314) ([justyns](https://github.com/justyns))
|
||||
- Fix the changelog cache logic for ubuntu/debian [\#305](https://github.com/future-architect/vuls/pull/305) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix yum updateinfo options [\#304](https://github.com/future-architect/vuls/pull/304) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update glide.lock to fix create-log-dir error. [\#303](https://github.com/future-architect/vuls/pull/303) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a bug in logging \(file output\) at scan command [\#302](https://github.com/future-architect/vuls/pull/302) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -pipe flag \#294 [\#299](https://github.com/future-architect/vuls/pull/299) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix RHEL5 scan stopped halfway [\#293](https://github.com/future-architect/vuls/pull/293) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix amazon linux scan stopped halfway [\#292](https://github.com/future-architect/vuls/pull/292) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix nil-ponter in TUI [\#388](https://github.com/future-architect/vuls/pull/388) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix Bug of Mysql Backend [\#384](https://github.com/future-architect/vuls/pull/384) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix scan confidence on Ubuntu/Debian/Raspbian \#362 [\#379](https://github.com/future-architect/vuls/pull/379) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix updatalbe packages count \#373 [\#374](https://github.com/future-architect/vuls/pull/374) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- sudo yum check-update on RHEL [\#372](https://github.com/future-architect/vuls/pull/372) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Change ssh option from -t to -tt [\#369](https://github.com/future-architect/vuls/pull/369) ([knqyf263](https://github.com/knqyf263))
|
||||
- Increase the width of RequestPty [\#364](https://github.com/future-architect/vuls/pull/364) ([knqyf263](https://github.com/knqyf263))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- vuls configtest --debugがsudoのチェックで止まってしまう [\#395](https://github.com/future-architect/vuls/issues/395)
|
||||
- Add support for Oracle Linux [\#385](https://github.com/future-architect/vuls/issues/385)
|
||||
- error on install - Ubuntu 16.04 [\#376](https://github.com/future-architect/vuls/issues/376)
|
||||
- Unknown OS Type [\#335](https://github.com/future-architect/vuls/issues/335)
|
||||
- mac os 10.12.3 make install error [\#334](https://github.com/future-architect/vuls/issues/334)
|
||||
- assumeYes doesn't work because there is no else condition [\#320](https://github.com/future-architect/vuls/issues/320)
|
||||
- Debian scan uses sudo where unnecessary [\#318](https://github.com/future-architect/vuls/issues/318)
|
||||
- Add FreeBSD 11 to supported OS on documents. [\#311](https://github.com/future-architect/vuls/issues/311)
|
||||
- docker fetchnvd failing [\#274](https://github.com/future-architect/vuls/issues/274)
|
||||
- Latest version of labstack echo breaks installation [\#268](https://github.com/future-architect/vuls/issues/268)
|
||||
- fetchnvd Fails using example loop [\#267](https://github.com/future-architect/vuls/issues/267)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- fix typo in README.ja.md [\#394](https://github.com/future-architect/vuls/pull/394) ([lv7777](https://github.com/lv7777))
|
||||
- Update Tutorial in README [\#387](https://github.com/future-architect/vuls/pull/387) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix README [\#383](https://github.com/future-architect/vuls/pull/383) ([usiusi360](https://github.com/usiusi360))
|
||||
- s/dictinary/dictionary typo [\#382](https://github.com/future-architect/vuls/pull/382) ([beuno](https://github.com/beuno))
|
||||
- Fix Japanese typo [\#377](https://github.com/future-architect/vuls/pull/377) ([IMAI-Yuji](https://github.com/IMAI-Yuji))
|
||||
- Improve kanji character [\#351](https://github.com/future-architect/vuls/pull/351) ([hasegawa-tomoki](https://github.com/hasegawa-tomoki))
|
||||
- Add PULL\_REQUEST\_TEMPLATE.md [\#348](https://github.com/future-architect/vuls/pull/348) ([knqyf263](https://github.com/knqyf263))
|
||||
- Update README [\#347](https://github.com/future-architect/vuls/pull/347) ([knqyf263](https://github.com/knqyf263))
|
||||
- Fix test case [\#344](https://github.com/future-architect/vuls/pull/344) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix typo [\#343](https://github.com/future-architect/vuls/pull/343) ([knqyf263](https://github.com/knqyf263))
|
||||
- Rename Makefile to GNUmakefile \#313 [\#339](https://github.com/future-architect/vuls/pull/339) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update README [\#338](https://github.com/future-architect/vuls/pull/338) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- add error handling [\#332](https://github.com/future-architect/vuls/pull/332) ([kazuminn](https://github.com/kazuminn))
|
||||
- Update readme [\#308](https://github.com/future-architect/vuls/pull/308) ([lapthorn](https://github.com/lapthorn))
|
||||
- Update glide.lock to fix import error [\#306](https://github.com/future-architect/vuls/pull/306) ([knqyf263](https://github.com/knqyf263))
|
||||
- Check whether echo is executable with nopasswd [\#298](https://github.com/future-architect/vuls/pull/298) ([knqyf263](https://github.com/knqyf263))
|
||||
- Update docker README [\#297](https://github.com/future-architect/vuls/pull/297) ([knqyf263](https://github.com/knqyf263))
|
||||
- update readme [\#296](https://github.com/future-architect/vuls/pull/296) ([galigalikun](https://github.com/galigalikun))
|
||||
- remove unused import line. [\#358](https://github.com/future-architect/vuls/pull/358) ([ymomoi](https://github.com/ymomoi))
|
||||
|
||||
## [v0.2.0](https://github.com/future-architect/vuls/tree/v0.2.0) (2017-01-10)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.7...v0.2.0)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Add report subcommand, change scan options. \#239 [\#270](https://github.com/future-architect/vuls/pull/270) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add --assume-yes to prepare \#260 [\#266](https://github.com/future-architect/vuls/pull/266) ([Code0x58](https://github.com/Code0x58))
|
||||
- Use RFC3339 timestamps in the results [\#265](https://github.com/future-architect/vuls/pull/265) ([Code0x58](https://github.com/Code0x58))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- vuls prepare failed to centos7 [\#275](https://github.com/future-architect/vuls/issues/275)
|
||||
- Failed to scan on RHEL5 [\#94](https://github.com/future-architect/vuls/issues/94)
|
||||
- Fix container os detection [\#287](https://github.com/future-architect/vuls/pull/287) ([jiazio](https://github.com/jiazio))
|
||||
- Add date header to report mail. [\#283](https://github.com/future-architect/vuls/pull/283) ([ymomoi](https://github.com/ymomoi))
|
||||
- Add Content-Type header to report/mail.go . [\#280](https://github.com/future-architect/vuls/pull/280) ([hogehogehugahuga](https://github.com/hogehogehugahuga))
|
||||
- Keep output of "vuls scan -report-\*" to be same every times [\#272](https://github.com/future-architect/vuls/pull/272) ([yoheimuta](https://github.com/yoheimuta))
|
||||
- Fix JSON-dir regex pattern \#265 [\#271](https://github.com/future-architect/vuls/pull/271) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Stop quietly ignoring `--ssh-external` on Windows [\#263](https://github.com/future-architect/vuls/pull/263) ([Code0x58](https://github.com/Code0x58))
|
||||
- Fix non-interactive `apt-get install` \#251 [\#253](https://github.com/future-architect/vuls/pull/253) ([Code0x58](https://github.com/Code0x58))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- gocui.NewGui now takes a parameter [\#261](https://github.com/future-architect/vuls/issues/261)
|
||||
- Add a `--yes` flag to bypass interactive prompt for `vuls prepare` [\#260](https://github.com/future-architect/vuls/issues/260)
|
||||
- `vuls prepare` doesn't work on Debian host due to apt-get confirmation prompt [\#251](https://github.com/future-architect/vuls/issues/251)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Fix gocui.NewGui after signature change \#261 [\#262](https://github.com/future-architect/vuls/pull/262) ([Code0x58](https://github.com/Code0x58))
|
||||
- Replace inconsistent tabs with spaces [\#254](https://github.com/future-architect/vuls/pull/254) ([Code0x58](https://github.com/Code0x58))
|
||||
- Fix README [\#249](https://github.com/future-architect/vuls/pull/249) ([usiusi360](https://github.com/usiusi360))
|
||||
|
||||
## [v0.1.7](https://github.com/future-architect/vuls/tree/v0.1.7) (2016-11-08)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.6...v0.1.7)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Enable to scan only docker container, without docker host [\#122](https://github.com/future-architect/vuls/issues/122)
|
||||
- Add -skip-broken option \[CentOS only\] \#245 [\#248](https://github.com/future-architect/vuls/pull/248) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Display unknown CVEs to TUI [\#244](https://github.com/future-architect/vuls/pull/244) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add the XML output [\#240](https://github.com/future-architect/vuls/pull/240) ([gleentea](https://github.com/gleentea))
|
||||
- add '-ssh-external' option to prepare subcommand [\#234](https://github.com/future-architect/vuls/pull/234) ([mykstmhr](https://github.com/mykstmhr))
|
||||
- Integrate OWASP Dependency Check [\#232](https://github.com/future-architect/vuls/pull/232) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add support for reading CVE data from MySQL. [\#225](https://github.com/future-architect/vuls/pull/225) ([oswell](https://github.com/oswell))
|
||||
- Remove base docker image, -v shows commit hash [\#223](https://github.com/future-architect/vuls/pull/223) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Support ignore CveIDs in config [\#222](https://github.com/future-architect/vuls/pull/222) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Confirm before installing dependencies on prepare [\#219](https://github.com/future-architect/vuls/pull/219) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove all.json [\#218](https://github.com/future-architect/vuls/pull/218) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add GitHub issue template [\#217](https://github.com/future-architect/vuls/pull/217) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Improve makefile, -version shows git hash, fix README [\#216](https://github.com/future-architect/vuls/pull/216) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- change e-mail package from gomail to net/smtp [\#211](https://github.com/future-architect/vuls/pull/211) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Add only-containers option to scan subcommand \#122 [\#190](https://github.com/future-architect/vuls/pull/190) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix -results-dir option of scan subcommand [\#185](https://github.com/future-architect/vuls/pull/185) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Show error when no scannable servers are detected. [\#177](https://github.com/future-architect/vuls/pull/177) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add sudo check to prepare subcommand [\#176](https://github.com/future-architect/vuls/pull/176) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Supports yum --enablerepo option \(supports only base,updates for now\) [\#147](https://github.com/future-architect/vuls/pull/147) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Debian 8.6 \(jessie\) scan does not show vulnerable packages [\#235](https://github.com/future-architect/vuls/issues/235)
|
||||
- panic: runtime error: index out of range - ubuntu 16.04 + vuls history [\#180](https://github.com/future-architect/vuls/issues/180)
|
||||
- Moved golang.org/x/net/context to context [\#243](https://github.com/future-architect/vuls/pull/243) ([yoheimuta](https://github.com/yoheimuta))
|
||||
- Fix changelog cache bug on Ubuntu and Debian \#235 [\#238](https://github.com/future-architect/vuls/pull/238) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- add '-ssh-external' option to prepare subcommand [\#234](https://github.com/future-architect/vuls/pull/234) ([mykstmhr](https://github.com/mykstmhr))
|
||||
- Fixed error for the latest version of gocui [\#231](https://github.com/future-architect/vuls/pull/231) ([ymd38](https://github.com/ymd38))
|
||||
- Handle the refactored gocui SetCurrentView method. [\#229](https://github.com/future-architect/vuls/pull/229) ([oswell](https://github.com/oswell))
|
||||
- Fix locale env var LANG to LANGUAGE [\#215](https://github.com/future-architect/vuls/pull/215) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fixed bug with parsing update line on CentOS/RHEL [\#206](https://github.com/future-architect/vuls/pull/206) ([andyone](https://github.com/andyone))
|
||||
- Fix defer cache.DB.close [\#201](https://github.com/future-architect/vuls/pull/201) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a help message of -report-azure-blob option [\#195](https://github.com/future-architect/vuls/pull/195) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling in tui [\#193](https://github.com/future-architect/vuls/pull/193) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix not working changelog cache on Container [\#189](https://github.com/future-architect/vuls/pull/189) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix release version detection on FreeBSD [\#184](https://github.com/future-architect/vuls/pull/184) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix defer cahce.DB.close\(\) [\#183](https://github.com/future-architect/vuls/pull/183) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a mode of files/dir \(report, log\) [\#182](https://github.com/future-architect/vuls/pull/182) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a error when no json dirs are found under results \#180 [\#181](https://github.com/future-architect/vuls/pull/181) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- ssh-external option of configtest is not working \#178 [\#179](https://github.com/future-architect/vuls/pull/179) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- --enable-repos of yum option [\#246](https://github.com/future-architect/vuls/issues/246)
|
||||
- --skip-broken at yum option [\#245](https://github.com/future-architect/vuls/issues/245)
|
||||
- Recent changes to gobui cause build failures [\#228](https://github.com/future-architect/vuls/issues/228)
|
||||
- https://hub.docker.com/r/vuls/go-cve-dictionary/ is empty [\#208](https://github.com/future-architect/vuls/issues/208)
|
||||
- Not able to install gomail fails [\#202](https://github.com/future-architect/vuls/issues/202)
|
||||
- No results file created - vuls tui failed [\#199](https://github.com/future-architect/vuls/issues/199)
|
||||
- Wrong file permissions for results/\*.json in official Docker container [\#197](https://github.com/future-architect/vuls/issues/197)
|
||||
- Failed: Unknown OS Type [\#196](https://github.com/future-architect/vuls/issues/196)
|
||||
- Segmentation fault with configtest [\#192](https://github.com/future-architect/vuls/issues/192)
|
||||
- Failed to scan. err: No server defined. Check the configuration [\#187](https://github.com/future-architect/vuls/issues/187)
|
||||
- vuls configtest -ssh-external doesnt work [\#178](https://github.com/future-architect/vuls/issues/178)
|
||||
- apt-get update: time out [\#175](https://github.com/future-architect/vuls/issues/175)
|
||||
- scanning on Centos6, but vuls recognizes debian. [\#174](https://github.com/future-architect/vuls/issues/174)
|
||||
- Fix READMEja \#164 [\#173](https://github.com/future-architect/vuls/issues/173)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Update README \#225 [\#242](https://github.com/future-architect/vuls/pull/242) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix readme [\#241](https://github.com/future-architect/vuls/pull/241) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix README \#234 [\#237](https://github.com/future-architect/vuls/pull/237) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update glide files [\#236](https://github.com/future-architect/vuls/pull/236) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix README [\#226](https://github.com/future-architect/vuls/pull/226) ([usiusi360](https://github.com/usiusi360))
|
||||
- fix some misspelling. [\#221](https://github.com/future-architect/vuls/pull/221) ([ymomoi](https://github.com/ymomoi))
|
||||
- fix docker readme [\#214](https://github.com/future-architect/vuls/pull/214) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix ja document about typo [\#213](https://github.com/future-architect/vuls/pull/213) ([shokohara](https://github.com/shokohara))
|
||||
- fix readme [\#212](https://github.com/future-architect/vuls/pull/212) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix README [\#207](https://github.com/future-architect/vuls/pull/207) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- fix typo [\#204](https://github.com/future-architect/vuls/pull/204) ([usiusi360](https://github.com/usiusi360))
|
||||
- fix gitignore [\#191](https://github.com/future-architect/vuls/pull/191) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Update glide.lock [\#188](https://github.com/future-architect/vuls/pull/188) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix path in setup/docker/README [\#186](https://github.com/future-architect/vuls/pull/186) ([dladuke](https://github.com/dladuke))
|
||||
- Vuls and vulsrepo are now separated [\#163](https://github.com/future-architect/vuls/pull/163) ([hikachan](https://github.com/hikachan))
|
||||
|
||||
## [v0.1.6](https://github.com/future-architect/vuls/tree/v0.1.6) (2016-09-12)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.5...v0.1.6)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- High speed scan on Ubuntu/Debian [\#172](https://github.com/future-architect/vuls/pull/172) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support CWE\(Common Weakness Enumeration\) [\#169](https://github.com/future-architect/vuls/pull/169) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Enable to scan without sudo on amazon linux [\#167](https://github.com/future-architect/vuls/pull/167) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove deprecated options -use-unattended-upgrades,-use-yum-plugin-security [\#161](https://github.com/future-architect/vuls/pull/161) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- delete sqlite3 [\#152](https://github.com/future-architect/vuls/pull/152) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Failed to setup vuls docker [\#170](https://github.com/future-architect/vuls/issues/170)
|
||||
- yum check-update error occurred when no reboot after kernel updating [\#165](https://github.com/future-architect/vuls/issues/165)
|
||||
- error thrown from 'docker build .' [\#157](https://github.com/future-architect/vuls/issues/157)
|
||||
- CVE-ID is truncated to 4 digits [\#153](https://github.com/future-architect/vuls/issues/153)
|
||||
- 'yum update --changelog' stalled in 'vuls scan'. if ssh user is not 'root'. [\#150](https://github.com/future-architect/vuls/issues/150)
|
||||
- Panic on packet scan [\#131](https://github.com/future-architect/vuls/issues/131)
|
||||
- Update glide.lock \#170 [\#171](https://github.com/future-architect/vuls/pull/171) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix detecting a platform on Azure [\#168](https://github.com/future-architect/vuls/pull/168) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix parse error for yum check-update \#165 [\#166](https://github.com/future-architect/vuls/pull/166) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix bug: Vuls on Docker [\#159](https://github.com/future-architect/vuls/pull/159) ([tjinjin](https://github.com/tjinjin))
|
||||
- Fix CVE-ID is truncated to 4 digits [\#155](https://github.com/future-architect/vuls/pull/155) ([usiusi360](https://github.com/usiusi360))
|
||||
- Fix yum update --changelog stalled when non-root ssh user on CentOS \#150 [\#151](https://github.com/future-architect/vuls/pull/151) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- Support su for root privilege escalation [\#44](https://github.com/future-architect/vuls/issues/44)
|
||||
- Support FreeBSD [\#34](https://github.com/future-architect/vuls/issues/34)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Change scripts for data fetching from jvn [\#164](https://github.com/future-architect/vuls/pull/164) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix: setup vulsrepo [\#162](https://github.com/future-architect/vuls/pull/162) ([tjinjin](https://github.com/tjinjin))
|
||||
- Fix-docker-vulsrepo-install [\#160](https://github.com/future-architect/vuls/pull/160) ([usiusi360](https://github.com/usiusi360))
|
||||
- Reduce regular expression compilation [\#158](https://github.com/future-architect/vuls/pull/158) ([itchyny](https://github.com/itchyny))
|
||||
- Add testcases for \#153 [\#156](https://github.com/future-architect/vuls/pull/156) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.5](https://github.com/future-architect/vuls/tree/v0.1.5) (2016-08-16)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.4...v0.1.5)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Enable to scan without running go-cve-dictionary as server mode [\#84](https://github.com/future-architect/vuls/issues/84)
|
||||
- Support high-speed scanning for CentOS [\#138](https://github.com/future-architect/vuls/pull/138) ([tai-ga](https://github.com/tai-ga))
|
||||
- Add configtest subcommand. skip un-ssh-able servers. [\#134](https://github.com/future-architect/vuls/pull/134) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support -report-azure-blob option [\#130](https://github.com/future-architect/vuls/pull/130) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add optional key-values that will be outputted to JSON in config [\#117](https://github.com/future-architect/vuls/pull/117) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Change dir structure [\#115](https://github.com/future-architect/vuls/pull/115) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add some validation of loading config. user, host and port [\#113](https://github.com/future-architect/vuls/pull/113) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support scanning with external ssh command [\#101](https://github.com/future-architect/vuls/pull/101) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Detect Platform and get instance-id of amazon ec2 [\#95](https://github.com/future-architect/vuls/pull/95) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add -report-s3 option [\#92](https://github.com/future-architect/vuls/pull/92) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Added FreeBSD support. [\#90](https://github.com/future-architect/vuls/pull/90) ([justyntemme](https://github.com/justyntemme))
|
||||
- Add glide files for vendoring [\#89](https://github.com/future-architect/vuls/pull/89) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix README, change -cvedbpath to -cve-dictionary-dbpath \#84 [\#85](https://github.com/future-architect/vuls/pull/85) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add option for it get cve detail from cve.sqlite3. [\#81](https://github.com/future-architect/vuls/pull/81) ([ymd38](https://github.com/ymd38))
|
||||
- Add -report-text option, Fix small bug of report in japanese [\#78](https://github.com/future-architect/vuls/pull/78) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add JSONWriter, Fix CVE sort order of report [\#77](https://github.com/future-architect/vuls/pull/77) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Docker: Panic [\#76](https://github.com/future-architect/vuls/issues/76)
|
||||
- Fix apt command to scan correctly when system locale is not english [\#149](https://github.com/future-architect/vuls/pull/149) ([kit494way](https://github.com/kit494way))
|
||||
- Disable -ask-sudo-password for security reasons [\#148](https://github.com/future-architect/vuls/pull/148) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix no tty error while executing with -external-ssh option [\#143](https://github.com/future-architect/vuls/pull/143) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- wrong log packages [\#141](https://github.com/future-architect/vuls/pull/141) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix platform detection. [\#137](https://github.com/future-architect/vuls/pull/137) ([Rompei](https://github.com/Rompei))
|
||||
- Fix nil pointer when scan with -cve-dictionary-dbpath and cpeNames [\#111](https://github.com/future-architect/vuls/pull/111) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove vulndb file before pkg audit [\#110](https://github.com/future-architect/vuls/pull/110) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add error handling when unable to connect via ssh. status code: 255 [\#108](https://github.com/future-architect/vuls/pull/108) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Enable to detect vulnerabilities on FreeBSD [\#98](https://github.com/future-architect/vuls/pull/98) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix unknown format err while check-update on RHEL6.5 [\#93](https://github.com/future-architect/vuls/pull/93) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix type of SMTP Port of discovery command's output [\#88](https://github.com/future-architect/vuls/pull/88) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error msg when go-cve-dictionary is unavailable \#84 [\#86](https://github.com/future-architect/vuls/pull/86) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling to avoid nil pointer err on debian [\#83](https://github.com/future-architect/vuls/pull/83) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix nil pointer while doing apt-cache policy on ubuntu \#76 [\#82](https://github.com/future-architect/vuls/pull/82) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix log import url [\#79](https://github.com/future-architect/vuls/pull/79) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
- Fix error handling of gorequest [\#75](https://github.com/future-architect/vuls/pull/75) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix freezing forever when no args specified in TUI mode [\#73](https://github.com/future-architect/vuls/pull/73) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- mv version.go version/version.go to run main.go without compile [\#71](https://github.com/future-architect/vuls/pull/71) ([sadayuki-matsuno](https://github.com/sadayuki-matsuno))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- SSh password authentication failed on FreeBSD [\#99](https://github.com/future-architect/vuls/issues/99)
|
||||
- BUG: -o pipefail is not work on FreeBSD's /bin/sh. because it isn't bash [\#91](https://github.com/future-architect/vuls/issues/91)
|
||||
- Use ~/.ssh/config [\#62](https://github.com/future-architect/vuls/issues/62)
|
||||
- SSH ciphers [\#37](https://github.com/future-architect/vuls/issues/37)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Update README \#138 [\#144](https://github.com/future-architect/vuls/pull/144) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix a typo [\#142](https://github.com/future-architect/vuls/pull/142) ([dtan4](https://github.com/dtan4))
|
||||
- Remove unnecessary step in readme of docker setup [\#140](https://github.com/future-architect/vuls/pull/140) ([mikkame](https://github.com/mikkame))
|
||||
- Update logo [\#139](https://github.com/future-architect/vuls/pull/139) ([chanomaru](https://github.com/chanomaru))
|
||||
- Update README.ja.md to fix wrong tips. [\#135](https://github.com/future-architect/vuls/pull/135) ([a2atsu](https://github.com/a2atsu))
|
||||
- add tips about NVD JVN issue [\#133](https://github.com/future-architect/vuls/pull/133) ([a2atsu](https://github.com/a2atsu))
|
||||
- Fix README wrong links [\#129](https://github.com/future-architect/vuls/pull/129) ([aomoriringo](https://github.com/aomoriringo))
|
||||
- Add logo [\#126](https://github.com/future-architect/vuls/pull/126) ([chanomaru](https://github.com/chanomaru))
|
||||
- Improve setup/docker [\#125](https://github.com/future-architect/vuls/pull/125) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix scan command help [\#124](https://github.com/future-architect/vuls/pull/124) ([aomoriringo](https://github.com/aomoriringo))
|
||||
- added dockernized-vuls with vulsrepo [\#121](https://github.com/future-architect/vuls/pull/121) ([hikachan](https://github.com/hikachan))
|
||||
- Fix detect platform on azure and degital ocean [\#119](https://github.com/future-architect/vuls/pull/119) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Remove json marshall-indent [\#118](https://github.com/future-architect/vuls/pull/118) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Improve Readme.ja [\#116](https://github.com/future-architect/vuls/pull/116) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add architecture diag to README.md [\#114](https://github.com/future-architect/vuls/pull/114) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Rename linux.go to base.go [\#100](https://github.com/future-architect/vuls/pull/100) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update README.md [\#74](https://github.com/future-architect/vuls/pull/74) ([yoshi-taka](https://github.com/yoshi-taka))
|
||||
- Refactoring debian.go [\#72](https://github.com/future-architect/vuls/pull/72) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.4](https://github.com/future-architect/vuls/tree/v0.1.4) (2016-05-24)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.3...v0.1.4)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Initial fetch from NVD is too heavy \(2.3 GB of memory consumed\) [\#27](https://github.com/future-architect/vuls/issues/27)
|
||||
- Enable to show previous scan result [\#69](https://github.com/future-architect/vuls/pull/69) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add ignore-unscored-cves option [\#68](https://github.com/future-architect/vuls/pull/68) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Support dynamic scanning docker container [\#67](https://github.com/future-architect/vuls/pull/67) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Add version flag [\#65](https://github.com/future-architect/vuls/pull/65) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Update Dockerfile [\#57](https://github.com/future-architect/vuls/pull/57) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- Update run.sh [\#56](https://github.com/future-architect/vuls/pull/56) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- Support Windows [\#33](https://github.com/future-architect/vuls/pull/33) ([mattn](https://github.com/mattn))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- vuls scan -cvss-over does not work. [\#59](https://github.com/future-architect/vuls/issues/59)
|
||||
- `panic: runtime error: invalid memory address or nil pointer dereference` when scan CentOS5.5 [\#58](https://github.com/future-architect/vuls/issues/58)
|
||||
- It rans out of memory. [\#47](https://github.com/future-architect/vuls/issues/47)
|
||||
- BUG: vuls scan on CentOS with Japanese environment. [\#43](https://github.com/future-architect/vuls/issues/43)
|
||||
- yum --color=never [\#36](https://github.com/future-architect/vuls/issues/36)
|
||||
- Failed to parse yum check-update [\#32](https://github.com/future-architect/vuls/issues/32)
|
||||
- Pointless sudo [\#29](https://github.com/future-architect/vuls/issues/29)
|
||||
- Can't init database in a path having blanks [\#26](https://github.com/future-architect/vuls/issues/26)
|
||||
- Fix pointless sudo in debian.go \#29 [\#66](https://github.com/future-architect/vuls/pull/66) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix error handling of httpGet in cve-client \#58 [\#64](https://github.com/future-architect/vuls/pull/64) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix nil pointer at error handling of cve\_client \#58 [\#63](https://github.com/future-architect/vuls/pull/63) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Set language en\_US. [\#61](https://github.com/future-architect/vuls/pull/61) ([pabroff](https://github.com/pabroff))
|
||||
- Fix -cvss-over flag \#59 [\#60](https://github.com/future-architect/vuls/pull/60) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix scan on Japanese environment. [\#55](https://github.com/future-architect/vuls/pull/55) ([pabroff](https://github.com/pabroff))
|
||||
- Fix a typo: replace Depricated by Deprecated. [\#54](https://github.com/future-architect/vuls/pull/54) ([jody-frankowski](https://github.com/jody-frankowski))
|
||||
- Fix yes no infinite loop while doing yum update --changelog on root@CentOS \#47 [\#50](https://github.com/future-architect/vuls/pull/50) ([pabroff](https://github.com/pabroff))
|
||||
- Fix $servername in output of discover command [\#45](https://github.com/future-architect/vuls/pull/45) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.3](https://github.com/future-architect/vuls/tree/v0.1.3) (2016-04-21)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.2...v0.1.3)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- Add sudo support for prepare [\#11](https://github.com/future-architect/vuls/issues/11)
|
||||
- Dockerfile? [\#10](https://github.com/future-architect/vuls/issues/10)
|
||||
- Update README [\#41](https://github.com/future-architect/vuls/pull/41) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- Sparse dockerization [\#38](https://github.com/future-architect/vuls/pull/38) ([theonlydoo](https://github.com/theonlydoo))
|
||||
- No password in config [\#35](https://github.com/future-architect/vuls/pull/35) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fr readme translation [\#23](https://github.com/future-architect/vuls/pull/23) ([novakin](https://github.com/novakin))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Issues updating CVE database behind https proxy [\#39](https://github.com/future-architect/vuls/issues/39)
|
||||
- Vuls failed to parse yum check-update [\#24](https://github.com/future-architect/vuls/issues/24)
|
||||
- Fix yum to yum --color=never \#36 [\#42](https://github.com/future-architect/vuls/pull/42) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fix parse yum check update [\#40](https://github.com/future-architect/vuls/pull/40) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix typo [\#31](https://github.com/future-architect/vuls/pull/31) ([blue119](https://github.com/blue119))
|
||||
- Fix error while parsing yum check-update \#24 [\#30](https://github.com/future-architect/vuls/pull/30) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
**Closed issues:**
|
||||
|
||||
- Unable to scan on ubuntu because changelog.ubuntu.com is down... [\#21](https://github.com/future-architect/vuls/issues/21)
|
||||
- err: Not initialize\(d\) yet.. [\#16](https://github.com/future-architect/vuls/issues/16)
|
||||
- Errors when using fish shell [\#8](https://github.com/future-architect/vuls/issues/8)
|
||||
|
||||
## [v0.1.2](https://github.com/future-architect/vuls/tree/v0.1.2) (2016-04-12)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.1...v0.1.2)
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Maximum 6 nodes available to scan [\#12](https://github.com/future-architect/vuls/issues/12)
|
||||
- panic: runtime error: index out of range [\#5](https://github.com/future-architect/vuls/issues/5)
|
||||
- Fix sudo option on RedHat like Linux and change some messages. [\#20](https://github.com/future-architect/vuls/pull/20) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Typo fix and updated readme [\#19](https://github.com/future-architect/vuls/pull/19) ([EuanKerr](https://github.com/EuanKerr))
|
||||
- remove a period at the end of error messages. [\#18](https://github.com/future-architect/vuls/pull/18) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- fix error while yum updateinfo --security update on rhel@aws [\#17](https://github.com/future-architect/vuls/pull/17) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Fixed typos [\#15](https://github.com/future-architect/vuls/pull/15) ([radarhere](https://github.com/radarhere))
|
||||
- Typo fix in error messages [\#14](https://github.com/future-architect/vuls/pull/14) ([Bregor](https://github.com/Bregor))
|
||||
- Fix index out of range error when the number of servers is over 6. \#12 [\#13](https://github.com/future-architect/vuls/pull/13) ([kotakanbe](https://github.com/kotakanbe))
|
||||
- Revise small grammar mistakes in serverapi.go [\#9](https://github.com/future-architect/vuls/pull/9) ([cpobrien](https://github.com/cpobrien))
|
||||
- Fix error handling in HTTP backoff function [\#7](https://github.com/future-architect/vuls/pull/7) ([kotakanbe](https://github.com/kotakanbe))
|
||||
|
||||
## [v0.1.1](https://github.com/future-architect/vuls/tree/v0.1.1) (2016-04-06)
|
||||
[Full Changelog](https://github.com/future-architect/vuls/compare/v0.1.0...v0.1.1)
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- Typo in Exapmle [\#6](https://github.com/future-architect/vuls/pull/6) ([toli](https://github.com/toli))
|
||||
|
||||
## [v0.1.0](https://github.com/future-architect/vuls/tree/v0.1.0) (2016-04-04)
|
||||
**Merged pull requests:**
|
||||
|
||||
- English translation [\#4](https://github.com/future-architect/vuls/pull/4) ([hikachan](https://github.com/hikachan))
|
||||
- English translation [\#3](https://github.com/future-architect/vuls/pull/3) ([chewyinping](https://github.com/chewyinping))
|
||||
- Add a Bitdeli Badge to README [\#2](https://github.com/future-architect/vuls/pull/2) ([bitdeli-chef](https://github.com/bitdeli-chef))
|
||||
|
||||
|
||||
|
||||
\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*
|
||||
34
Dockerfile
34
Dockerfile
@@ -1,34 +0,0 @@
|
||||
FROM golang:alpine as builder
|
||||
|
||||
RUN apk add --no-cache \
|
||||
git \
|
||||
make \
|
||||
gcc \
|
||||
musl-dev
|
||||
|
||||
ENV REPOSITORY github.com/future-architect/vuls
|
||||
COPY . $GOPATH/src/$REPOSITORY
|
||||
RUN cd $GOPATH/src/$REPOSITORY && make install
|
||||
|
||||
|
||||
FROM alpine:3.11
|
||||
|
||||
MAINTAINER hikachan sadayuki-matsuno
|
||||
|
||||
ENV LOGDIR /var/log/vuls
|
||||
ENV WORKDIR /vuls
|
||||
|
||||
RUN apk add --no-cache \
|
||||
openssh-client \
|
||||
ca-certificates \
|
||||
git \
|
||||
&& mkdir -p $WORKDIR $LOGDIR
|
||||
|
||||
COPY --from=builder /go/bin/vuls /usr/local/bin/
|
||||
|
||||
VOLUME ["$WORKDIR", "$LOGDIR"]
|
||||
WORKDIR $WORKDIR
|
||||
ENV PWD $WORKDIR
|
||||
|
||||
ENTRYPOINT ["vuls"]
|
||||
CMD ["--help"]
|
||||
100
GNUmakefile
100
GNUmakefile
@@ -1,82 +1,42 @@
|
||||
.PHONY: \
|
||||
build \
|
||||
install \
|
||||
all \
|
||||
vendor \
|
||||
lint \
|
||||
vet \
|
||||
fmt \
|
||||
fmtcheck \
|
||||
pretest \
|
||||
test \
|
||||
cov \
|
||||
clean
|
||||
|
||||
SRCS = $(shell git ls-files '*.go')
|
||||
PKGS = $(shell go list ./...)
|
||||
VERSION := $(shell git describe --tags --abbrev=0)
|
||||
ifeq ($(VERSION), )
|
||||
VERSION := $(shell git rev-parse --abbrev-ref HEAD)
|
||||
endif
|
||||
ifeq ($(shell git rev-parse --abbrev-ref HEAD), nightly)
|
||||
VERSION := nightly
|
||||
endif
|
||||
REVISION := $(shell git rev-parse --short HEAD)
|
||||
BUILDTIME := $(shell date "+%Y%m%d_%H%M%S")
|
||||
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' \
|
||||
-X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
|
||||
GO := GO111MODULE=on go
|
||||
CGO_UNABLED := CGO_ENABLED=0 go
|
||||
GO_OFF := GO111MODULE=off go
|
||||
LDFLAGS := -ldflags "-s -w -X=github.com/future-architect/vuls/pkg/cmd/version.Version=$(VERSION) -X=github.com/future-architect/vuls/pkg/cmd/version.Revision=$(REVISION)"
|
||||
|
||||
GOPATH := $(shell go env GOPATH)
|
||||
GOBIN := $(GOPATH)/bin
|
||||
|
||||
all: build
|
||||
$(GOBIN)/golangci-lint:
|
||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
|
||||
|
||||
build: ./cmd/vuls/main.go pretest fmt
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
.PHONY: build
|
||||
build:
|
||||
go build $(LDFLAGS) ./cmd/vuls
|
||||
|
||||
b: ./cmd/vuls/main.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
.PHONY: install
|
||||
install:
|
||||
go install $(LDFLAGS) ./cmd/vuls
|
||||
|
||||
install: ./cmd/vuls/main.go pretest fmt
|
||||
$(GO) install -ldflags "$(LDFLAGS)" ./cmd/vuls
|
||||
|
||||
build-scanner: ./cmd/scanner/main.go pretest fmt
|
||||
$(CGO_UNABLED) build -tags=scanner -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/scanner
|
||||
|
||||
install-scanner: ./cmd/scanner/main.go pretest fmt
|
||||
$(CGO_UNABLED) install -tags=scanner -ldflags "$(LDFLAGS)" ./cmd/scanner
|
||||
|
||||
lint:
|
||||
$(GO_OFF) get -u golang.org/x/lint/golint
|
||||
golint $(PKGS)
|
||||
|
||||
vet:
|
||||
echo $(PKGS) | xargs env $(GO) vet || exit;
|
||||
|
||||
fmt:
|
||||
gofmt -s -w $(SRCS)
|
||||
|
||||
mlint:
|
||||
$(foreach file,$(SRCS),gometalinter $(file) || exit;)
|
||||
|
||||
fmtcheck:
|
||||
$(foreach file,$(SRCS),gofmt -s -d $(file);)
|
||||
.PHONY: test
|
||||
test: pretest
|
||||
go test -race ./...
|
||||
|
||||
.PHONY: pretest
|
||||
pretest: lint vet fmtcheck
|
||||
|
||||
test:
|
||||
$(GO) test -cover -v ./... || exit;
|
||||
.PHONY: lint
|
||||
lint: $(GOBIN)/golangci-lint
|
||||
golangci-lint run
|
||||
|
||||
unused:
|
||||
$(foreach pkg,$(PKGS),unused $(pkg);)
|
||||
.PHONY: vet
|
||||
vet:
|
||||
go vet ./...
|
||||
|
||||
cov:
|
||||
@ go get -v github.com/axw/gocov/gocov
|
||||
@ go get golang.org/x/tools/cmd/cover
|
||||
gocov test | gocov report
|
||||
|
||||
clean:
|
||||
echo $(PKGS) | xargs go clean || exit;
|
||||
|
||||
# trivy-to-vuls
|
||||
build-trivy-to-vuls: pretest fmt
|
||||
$(GO) build -o trivy-to-vuls contrib/trivy/cmd/*.go
|
||||
|
||||
# future-vuls
|
||||
build-future-vuls: pretest fmt
|
||||
$(GO) build -o future-vuls contrib/future-vuls/cmd/*.go
|
||||
.PHONY: fmtcheck
|
||||
fmtcheck:
|
||||
gofmt -s -d .
|
||||
661
LICENSE
661
LICENSE
@@ -1,661 +0,0 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
641
README.md
641
README.md
@@ -1,190 +1,513 @@
|
||||
|
||||
# Vuls: VULnerability Scanner
|
||||
|
||||
[](http://goo.gl/forms/xm5KFo35tu)
|
||||
[](https://github.com/future-architect/vuls/blob/master/LICENSE)
|
||||
[](https://travis-ci.org/future-architect/vuls)
|
||||
[](https://goreportcard.com/report/github.com/future-architect/vuls)
|
||||
[](https://github.com/future-architect/vuls/graphs/contributors)
|
||||
## NOTE
|
||||
This is the nightly branch and provides the latest functionality.
|
||||
Please use the master branch if you want to use it stably, as destructive changes are also made.
|
||||
|
||||

|
||||
## Usage
|
||||
|
||||
Vulnerability scanner for Linux/FreeBSD, agent-less, written in Go.
|
||||
We have a slack team. [Join slack team](http://goo.gl/forms/xm5KFo35tu)
|
||||
Twitter: [@vuls_en](https://twitter.com/vuls_en)
|
||||
### 1. Nightly Vuls installation
|
||||
|
||||

|
||||
```console
|
||||
$ go install github.com/future-architect/vuls/cmd/vuls@nightly
|
||||
$ vuls version
|
||||
vuls nightly 2ef5390
|
||||
```
|
||||
|
||||

|
||||
### 2. DB Fetch
|
||||
|
||||
[](https://asciinema.org/a/3y9zrf950agiko7klg8abvyck)
|
||||
```console
|
||||
$ vuls db fetch
|
||||
```
|
||||
|
||||

|
||||
### 3. init config & configuration change
|
||||
|
||||
----
|
||||
Execute the following command to output a config template to stdout.
|
||||
Make a file of it and change the necessary host part, path to the DB, etc. to suit your environment.
|
||||
|
||||
## Abstract
|
||||
```console
|
||||
$ vuls config init
|
||||
```
|
||||
|
||||
For a system administrator, having to perform security vulnerability analysis and software update on a daily basis can be a burden.
|
||||
To avoid downtime in a production environment, it is common for a system administrator to choose not to use the automatic update option provided by the package manager and to perform update manually.
|
||||
This leads to the following problems.
|
||||
### Optional. add to list of known hosts
|
||||
|
||||
- The system administrator will have to constantly watch out for any new vulnerabilities in NVD (National Vulnerability Database) or similar databases.
|
||||
- It might be impossible for the system administrator to monitor all the software if there are a large number of software packages installed in the server.
|
||||
- It is expensive to perform analysis to determine the servers affected by new vulnerabilities. The possibility of overlooking a server or two during analysis is there.
|
||||
When using remote as the host type, make an ssh connection to the remote host.
|
||||
Before scanning, register the host in the known_hosts.
|
||||
|
||||
Vuls is a tool created to solve the problems listed above. It has the following characteristics.
|
||||
```console
|
||||
$ ssh -i /home/mainek00n/.ssh/id_rsa -p 2222 root@127.0.0.1
|
||||
The authenticity of host '[127.0.0.1]:2222 ([127.0.0.1]:2222)' can't be established.
|
||||
ED25519 key fingerprint is SHA256:dK+aO73n6hymIC3+3yFFHpvyNu/txTYi/LXvMl/TzOk.
|
||||
This key is not known by any other names
|
||||
Are you sure you want to continue connecting (yes/no/[fingerprint])? yes
|
||||
Warning: Permanently added '[127.0.0.1]:2222' (ED25519) to the list of known hosts.
|
||||
```
|
||||
|
||||
- Informs users of the vulnerabilities that are related to the system.
|
||||
- Informs users of the servers that are affected.
|
||||
- Vulnerability detection is done automatically to prevent any oversight.
|
||||
- A report is generated on a regular basis using CRON or other methods. to manage vulnerability.
|
||||
### 4. Scan
|
||||
|
||||

|
||||
```console
|
||||
$ vuls scan
|
||||
Scan Summary
|
||||
============
|
||||
local (ubuntu 22.04): success ospkg: 2663, cpe: 0 installed
|
||||
remote (debian 11): success ospkg: 319, cpe: 0 installed
|
||||
cpe: success ospkg: 0, cpe: 1 installed
|
||||
|
||||
----
|
||||
or
|
||||
|
||||
## Main Features
|
||||
$ vuls server
|
||||
|
||||
### Scan for any vulnerabilities in Linux/FreeBSD Server
|
||||
$ cat machine.json
|
||||
{
|
||||
"contents": [
|
||||
{
|
||||
"content_type": "os-release",
|
||||
"content": "PRETTY_NAME=\"Ubuntu 22.04.1 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.1 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https:\/\/www.ubuntu.com\/\"\nSUPPORT_URL=\"https:\/\/help.ubuntu.com\/\"\nBUG_REPORT_URL=\"https:\/\/bugs.launchpad.net\/ubuntu\/\"\nPRIVACY_POLICY_URL=\"https:\/\/www.ubuntu.com\/legal\/terms-and-policies\/privacy-policy\"\nUBUNTU_CODENAME=jammy"
|
||||
},
|
||||
{
|
||||
"content_type": "dpkg",
|
||||
"content": "accountsservice,ii ,22.07.5-2ubuntu1.3,amd64,accountsservice,22.07.5-2ubuntu1.3\nacl,ii ,2.3.1-1,amd64,acl,2.3.1-1\nnvim-common,ii ,2:8.2.3995-1ubuntu2.1,all,vim,2:8.2.3995-1ubuntu2.1\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
[Supports major Linux/FreeBSD](https://vuls.io/docs/en/supported-os.html)
|
||||
$ curl -s -X POST -H "Content-Type: application/json" -d @machine.json 127.0.0.1:5515/scan | jq
|
||||
{
|
||||
"name": "8002e134-dc2d-4786-96b3-e751103fe5c3",
|
||||
"family": "ubuntu",
|
||||
"release": "22.04",
|
||||
"scanned_at": "2022-11-14T10:41:07.558379311+09:00",
|
||||
"packages": {
|
||||
"kernel": {},
|
||||
"os_pkg": {
|
||||
"accountsservice": {
|
||||
"name": "accountsservice",
|
||||
"version": "22.07.5-2ubuntu1.3",
|
||||
"arch": "amd64",
|
||||
"src_name": "accountsservice",
|
||||
"src_version": "22.07.5-2ubuntu1.3"
|
||||
},
|
||||
"acl": {
|
||||
"name": "acl",
|
||||
"version": "2.3.1-1",
|
||||
"arch": "amd64",
|
||||
"src_name": "acl",
|
||||
"src_version": "2.3.1-1"
|
||||
},
|
||||
"nvim-common": {
|
||||
"name": "nvim-common",
|
||||
"version": "2:8.2.3995-1ubuntu2.1",
|
||||
"arch": "all",
|
||||
"src_name": "vim",
|
||||
"src_version": "2:8.2.3995-1ubuntu2.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- Alpine, Amazon Linux, CentOS, Debian, Oracle Linux, Raspbian, RHEL, SUSE Enterprise Linux, and Ubuntu
|
||||
- FreeBSD
|
||||
- Cloud, on-premise, Running Docker Container
|
||||
### 5. Detect
|
||||
|
||||
### High-quality scan
|
||||
```console
|
||||
$ vuls detect
|
||||
Detect Summary
|
||||
==============
|
||||
local (ubuntu 22.04) : success 143 CVEs detected
|
||||
remote (debian 11) : success 101 CVEs detected
|
||||
cpe : success 7 CVEs detected
|
||||
|
||||
- Vulnerability Database
|
||||
- [NVD](https://nvd.nist.gov/)
|
||||
- [JVN(Japanese)](http://jvndb.jvn.jp/apis/myjvn/)
|
||||
or
|
||||
|
||||
- OVAL
|
||||
- [Red Hat](https://www.redhat.com/security/data/oval/)
|
||||
- [Debian](https://www.debian.org/security/oval/)
|
||||
- [Ubuntu](https://people.canonical.com/~ubuntu-security/oval/)
|
||||
- [SUSE](http://ftp.suse.com/pub/projects/security/oval/)
|
||||
- [Oracle Linux](https://linux.oracle.com/security/oval/)
|
||||
$ vuls server
|
||||
|
||||
- Security Advisory
|
||||
- [Alpine-secdb](https://git.alpinelinux.org/cgit/alpine-secdb/)
|
||||
- [Red Hat Security Advisories](https://access.redhat.com/security/security-updates/)
|
||||
- [Debian Security Bug Tracker](https://security-tracker.debian.org/tracker/)
|
||||
$ cat scan.json
|
||||
{
|
||||
"name": "8002e134-dc2d-4786-96b3-e751103fe5c3",
|
||||
"family": "ubuntu",
|
||||
"release": "22.04",
|
||||
"scanned_at": "2022-11-14T10:41:07.558379311+09:00",
|
||||
"packages": {
|
||||
"kernel": {},
|
||||
"os_pkg": {
|
||||
"accountsservice": {
|
||||
"name": "accountsservice",
|
||||
"version": "22.07.5-2ubuntu1.3",
|
||||
"arch": "amd64",
|
||||
"src_name": "accountsservice",
|
||||
"src_version": "22.07.5-2ubuntu1.3"
|
||||
},
|
||||
"acl": {
|
||||
"name": "acl",
|
||||
"version": "2.3.1-1",
|
||||
"arch": "amd64",
|
||||
"src_name": "acl",
|
||||
"src_version": "2.3.1-1"
|
||||
},
|
||||
"nvim-common": {
|
||||
"name": "nvim-common",
|
||||
"version": "2:8.2.3995-1ubuntu2.1",
|
||||
"arch": "all",
|
||||
"src_name": "vim",
|
||||
"src_version": "2:8.2.3995-1ubuntu2.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
|
||||
- Commands(yum, zypper, pkg-audit)
|
||||
- RHSA / ALAS / ELSA / FreeBSD-SA
|
||||
- Changelog
|
||||
$ curl -s -X POST -H "Content-Type: application/json" -d @scan.json 127.0.0.1:5515/detect | jq
|
||||
{
|
||||
"name": "0c33d5fc-add4-465b-9ffa-90e74036259d",
|
||||
"family": "ubuntu",
|
||||
"release": "22.04",
|
||||
"scanned_at": "2022-11-14T10:42:31.863598309+09:00",
|
||||
"detectedd_at": "2022-11-14T10:42:50.677085953+09:00",
|
||||
"packages": {
|
||||
"kernel": {},
|
||||
"os_pkg": {
|
||||
"accountsservice": {
|
||||
"name": "accountsservice",
|
||||
"version": "22.07.5-2ubuntu1.3",
|
||||
"arch": "amd64",
|
||||
"src_name": "accountsservice",
|
||||
"src_version": "22.07.5-2ubuntu1.3"
|
||||
},
|
||||
"acl": {
|
||||
"name": "acl",
|
||||
"version": "2.3.1-1",
|
||||
"arch": "amd64",
|
||||
"src_name": "acl",
|
||||
"src_version": "2.3.1-1"
|
||||
},
|
||||
"nvim-common": {
|
||||
"name": "nvim-common",
|
||||
"version": "2:8.2.3995-1ubuntu2.1",
|
||||
"arch": "all",
|
||||
"src_name": "vim",
|
||||
"src_version": "2:8.2.3995-1ubuntu2.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"scanned_cves": {
|
||||
"CVE-2022-0128": {
|
||||
"content": {
|
||||
"official": {
|
||||
"id": "CVE-2022-0128",
|
||||
"advisory": [
|
||||
"mitre",
|
||||
"nvd",
|
||||
"alpine:3.12:CVE-2022-0128",
|
||||
"alpine:3.13:CVE-2022-0128",
|
||||
"alpine:3.14:CVE-2022-0128",
|
||||
"alpine:3.15:CVE-2022-0128",
|
||||
"alpine:3.16:CVE-2022-0128",
|
||||
"amazon:2022:ALAS2022-2022-014",
|
||||
"debian_security_tracker:11:CVE-2022-0128",
|
||||
"debian_security_tracker:12:CVE-2022-0128",
|
||||
"debian_security_tracker:sid:CVE-2022-0128",
|
||||
"debian_security_tracker:10:CVE-2022-0128",
|
||||
"redhat_oval:6-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:7-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:8-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:9-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"suse_oval:opensuse.leap.15.3:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:opensuse.leap.15.4:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:suse.linux.enterprise.desktop.15:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:suse.linux.enterprise.server.12:oval:org.opensuse.security:def:20220128",
|
||||
"suse_oval:suse.linux.enterprise.server.15:oval:org.opensuse.security:def:20220128",
|
||||
"suse_cvrf",
|
||||
"ubuntu_oval:14.04:oval:com.ubuntu.trusty:def:202201280000000",
|
||||
"ubuntu_oval:16.04:oval:com.ubuntu.xenial:def:202201280000000",
|
||||
"ubuntu_oval:21.04:oval:com.ubuntu.hirsute:def:202201280000000",
|
||||
"ubuntu_oval:22.04:oval:com.ubuntu.jammy:def:202201280000000",
|
||||
"ubuntu_oval:22.10:oval:com.ubuntu.kinetic:def:202201280000000",
|
||||
"ubuntu_security_tracker"
|
||||
],
|
||||
"title": "CVE-2022-0128",
|
||||
"description": "vim is vulnerable to Out-of-bounds Read",
|
||||
"cvss": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"version": "2.0",
|
||||
"vector": "AV:N/AC:M/Au:N/C:P/I:P/A:P",
|
||||
"score": 6.8,
|
||||
"severity": "MEDIUM"
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H",
|
||||
"score": 7.8,
|
||||
"severity": "HIGH"
|
||||
},
|
||||
{
|
||||
"source": "amazon:2022:ALAS2022-2022-014",
|
||||
"severity": "Important"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:7-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:8-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:9-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "redhat_oval:6-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:H",
|
||||
"score": 6.1,
|
||||
"severity": "moderate"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:opensuse.leap.15.4:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:suse.linux.enterprise.desktop.15:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:suse.linux.enterprise.server.12:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:suse.linux.enterprise.server.15:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_oval:opensuse.leap.15.3:oval:org.opensuse.security:def:20220128",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3,
|
||||
"severity": "low"
|
||||
},
|
||||
{
|
||||
"source": "suse_cvrf",
|
||||
"version": "2.0",
|
||||
"vector": "AV:N/AC:M/Au:N/C:P/I:P/A:P",
|
||||
"score": 6.8
|
||||
},
|
||||
{
|
||||
"source": "suse_cvrf",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:L/I:N/A:N",
|
||||
"score": 3.3
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:14.04:oval:com.ubuntu.trusty:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:16.04:oval:com.ubuntu.xenial:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:21.04:oval:com.ubuntu.hirsute:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:22.04:oval:com.ubuntu.jammy:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_oval:22.10:oval:com.ubuntu.kinetic:def:202201280000000",
|
||||
"severity": "Medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_security_tracker",
|
||||
"severity": "medium"
|
||||
},
|
||||
{
|
||||
"source": "ubuntu_security_tracker",
|
||||
"version": "3.1",
|
||||
"vector": "CVSS:3.1/AV:L/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H",
|
||||
"score": 7.8,
|
||||
"severity": "HIGH"
|
||||
}
|
||||
],
|
||||
"epss": {
|
||||
"epss": 0.01537,
|
||||
"percentile": 0.73989
|
||||
},
|
||||
"cwe": [
|
||||
{
|
||||
"source": [
|
||||
"nvd",
|
||||
"redhat_oval:6-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:7-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:8-including-unpatched:oval:com.redhat.unaffected:def:20220128",
|
||||
"redhat_oval:9-including-unpatched:oval:com.redhat.unaffected:def:20220128"
|
||||
],
|
||||
"id": "125"
|
||||
}
|
||||
],
|
||||
"exploit": [
|
||||
{
|
||||
"source": [
|
||||
"nvd",
|
||||
"inthewild",
|
||||
"trickest"
|
||||
],
|
||||
"url": "https://huntr.dev/bounties/63f51299-008a-4112-b85b-1e904aadd4ba"
|
||||
}
|
||||
],
|
||||
"published": "2022-01-06T17:15:00Z",
|
||||
"modified": "2022-11-02T13:18:00Z",
|
||||
"reference": [
|
||||
"http://www.openwall.com/lists/oss-security/2022/01/15/1",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4166",
|
||||
"https://access.redhat.com/security/cve/CVE-2022-0128",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011493.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011575.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011592.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-June/011301.html",
|
||||
"https://bugs.launchpad.net/ubuntu/+bug/https://huntr.dev/bounties/63f51299-008a-4112-b85b-1e904aadd4ba",
|
||||
"https://support.apple.com/kb/HT213343",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4019",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4187",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4193",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0156",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-August/011821.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-September/012143.html",
|
||||
"https://bugzilla.suse.com/1194388",
|
||||
"https://www.suse.com/support/security/rating/",
|
||||
"http://people.canonical.com/~ubuntu-security/cve/2022/CVE-2022-0128.html",
|
||||
"http://seclists.org/fulldisclosure/2022/May/35",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4069",
|
||||
"https://www.suse.com/security/cve/CVE-2022-0128",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011573.html",
|
||||
"http://seclists.org/fulldisclosure/2022/Mar/29",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0128",
|
||||
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0128",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-August/011795.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011495.html",
|
||||
"https://ubuntu.com/security/CVE-2022-0128",
|
||||
"https://huntr.dev/bounties/63f51299-008a-4112-b85b-1e904aadd4ba",
|
||||
"http://seclists.org/fulldisclosure/2022/Jul/14",
|
||||
"https://security.gentoo.org/glsa/202208-32",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-0158",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011591.html",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4136",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4173",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011494.html",
|
||||
"https://support.apple.com/kb/HT213183",
|
||||
"https://support.apple.com/kb/HT213256",
|
||||
"https://github.com/vim/vim/commit/d3a117814d6acbf0dca3eff1a7626843b9b3734a",
|
||||
"http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-4192",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011574.html",
|
||||
"https://lists.suse.com/pipermail/sle-security-updates/2022-July/011593.html"
|
||||
]
|
||||
}
|
||||
},
|
||||
"affected_packages": [
|
||||
{
|
||||
"name": "vim",
|
||||
"source": "official:ubuntu_security_tracker:CVE-2022-0128",
|
||||
"status": "needed"
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
},
|
||||
"config": {
|
||||
"detect": {
|
||||
"path": "/home/mainek00n/github/github.com/future-architect/vuls/vuls.db",
|
||||
"result_dir": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- PoC, Exploit
|
||||
- [Exploit Database](https://www.exploit-db.com/)
|
||||
- [Metasploit-Framework modules](https://www.rapid7.com/db/?q=&type=metasploit)
|
||||
### 6. Report
|
||||
|
||||
- CERT
|
||||
- [US-CERT](https://www.us-cert.gov/ncas/alerts)
|
||||
- [JPCERT](http://www.jpcert.or.jp/at/2019.html)
|
||||
```console
|
||||
$ vuls report --format list
|
||||
cpe
|
||||
===
|
||||
+----------------+----------------------------+------+----------+-------+----------------------------------------+--------+----------+
|
||||
| CVEID | VECTOR | CVSS | EPSS | KEV | PACKAGE | STATUS | SOURCE |
|
||||
+----------------+----------------------------+------+----------+-------+----------------------------------------+--------+----------+
|
||||
| CVE-2021-44228 | AV:N/AC:M/Au:N/C:C/I:C/A:C | 9.3 | 0.911590 | true | cpe:2.3:a:apache:log4j:*:*:*:*:*:*:*:* | | official |
|
||||
+----------------+----------------------------+------+----------+-------+ +--------+ +
|
||||
| CVE-2022-23307 | AV:N/AC:L/Au:S/C:C/I:C/A:C | 9.0 | 0.011640 | false | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2021-44832 | AV:N/AC:M/Au:S/C:C/I:C/A:C | 8.5 | 0.686370 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2022-23305 | AV:N/AC:M/Au:N/C:P/I:P/A:P | 6.8 | 0.017420 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2022-23302 | AV:N/AC:M/Au:S/C:P/I:P/A:P | 6.0 | 0.091480 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2021-45046 | AV:N/AC:H/Au:N/C:P/I:P/A:P | 5.1 | 0.719510 | | | | |
|
||||
+----------------+----------------------------+------+----------+ + +--------+ +
|
||||
| CVE-2021-45105 | AV:N/AC:M/Au:N/C:N/I:N/A:P | 4.3 | 0.442620 | | | | |
|
||||
+----------------+----------------------------+------+----------+-------+----------------------------------------+--------+----------+
|
||||
|
||||
- Libraries
|
||||
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
|
||||
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
|
||||
- [Safety DB(Python)](https://github.com/pyupio/safety-db)
|
||||
- [PHP Security Advisories Database](https://github.com/FriendsOfPHP/security-advisories)
|
||||
- [RustSec Advisory Database](https://github.com/RustSec/advisory-db)
|
||||
local (ubuntu 22.04)
|
||||
====================
|
||||
+----------------+----------------------------+------+----------+-------+-----------------------+----------+----------+
|
||||
| CVEID | VECTOR | CVSS | EPSS | KEV | PACKAGE | STATUS | SOURCE |
|
||||
+----------------+----------------------------+------+----------+-------+-----------------------+----------+----------+
|
||||
| CVE-2022-0318 | AV:N/AC:L/Au:N/C:P/I:P/A:P | 7.5 | 0.011830 | false | vim | needed | official |
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+ + +
|
||||
| CVE-2022-25255 | AV:L/AC:L/Au:N/C:C/I:C/A:C | 7.2 | 0.009500 | | qtbase-opensource-src | | |
|
||||
+----------------+ + +----------+ +-----------------------+ + +
|
||||
| CVE-2022-0995 | | | 0.024480 | | linux | | |
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+ + +
|
||||
...
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+----------+ +
|
||||
| CVE-2022-42799 | | | 0.011080 | | webkit2gtk | needed | |
|
||||
+----------------+----------------------------+------+----------+ +-----------------------+ + +
|
||||
| CVE-2022-3061 | | | 0.008900 | | linux | | |
|
||||
+----------------+----------------------------+------+----------+-------+-----------------------+----------+----------+
|
||||
|
||||
- WordPress
|
||||
- [wpscan](https://wpscan.com/api)
|
||||
remote (debian 11)
|
||||
==================
|
||||
+----------------+----------------------------+------+----------+-------+------------+--------+----------+
|
||||
| CVEID | VECTOR | CVSS | EPSS | KEV | PACKAGE | STATUS | SOURCE |
|
||||
+----------------+----------------------------+------+----------+-------+------------+--------+----------+
|
||||
| CVE-2022-31782 | AV:N/AC:M/Au:N/C:P/I:P/A:P | 6.8 | 0.008850 | false | freetype | open | official |
|
||||
+----------------+ + +----------+ +------------+ + +
|
||||
| CVE-2022-1304 | | | 0.010360 | | e2fsprogs | | |
|
||||
+----------------+----------------------------+------+----------+ +------------+ + +
|
||||
| CVE-2022-1587 | AV:N/AC:L/Au:N/C:P/I:N/A:P | 6.4 | 0.011080 | | pcre2 | | |
|
||||
+----------------+ + +----------+ + + + +
|
||||
| CVE-2022-1586 | | | 0.011830 | | | | |
|
||||
+----------------+----------------------------+------+ + +------------+ + +
|
||||
| CVE-2022-2097 | AV:N/AC:L/Au:N/C:P/I:N/A:N | 5.0 | | | openssl | | |
|
||||
+----------------+----------------------------+------+----------+ +------------+ + +
|
||||
...
|
||||
+----------------+----------------------------+------+----------+ +------------+ + +
|
||||
| CVE-2022-38126 | | | 0.008850 | | binutils | | |
|
||||
+----------------+----------------------------+------+ + +------------+ + +
|
||||
| CVE-2022-41848 | | | | | linux | | |
|
||||
+----------------+----------------------------+------+----------+-------+------------+--------+----------+
|
||||
|
||||
### Scan mode
|
||||
|
||||
[Fast Scan](https://vuls.io/docs/en/architecture-fast-scan.html)
|
||||
|
||||
- Scan without root privilege, no dependencies
|
||||
- Almost no load on the scan target server
|
||||
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
|
||||
[Fast Root Scan](https://vuls.io/docs/en/architecture-fast-root-scan.html)
|
||||
|
||||
- Scan with root privilege
|
||||
- Almost no load on the scan target server
|
||||
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Oracle Linux, and RedHat)
|
||||
- Detect processes which updated before but not restarting yet using checkrestart of debian-goodies (Debian and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
|
||||
### [Remote, Local scan mode, Server mode](https://vuls.io/docs/en/architecture-remote-local.html)
|
||||
|
||||
[Remote scan mode](https://vuls.io/docs/en/architecture-remote-scan.html)
|
||||
|
||||
- User is required to only set up one machine that is connected to other target servers via SSH
|
||||
|
||||
[Local scan mode](https://vuls.io/docs/en/architecture-local-scan.html)
|
||||
|
||||
- If you don't want the central Vuls server to connect to each server by SSH, you can use Vuls in the Local Scan mode.
|
||||
|
||||
[Server mode](https://vuls.io/docs/en/usage-server.html)
|
||||
|
||||
- First, start Vuls in server mode and listen as an HTTP server.
|
||||
- Next, issue a command on the scan target server to collect software information. Then send the result to Vuls Server via HTTP. You receive the scan results as JSON format.
|
||||
- No SSH needed, No Scanner needed. Only issuing Linux commands directory on the scan target server.
|
||||
|
||||
### **Dynamic** Analysis
|
||||
|
||||
- It is possible to acquire the state of the server by connecting via SSH and executing the command.
|
||||
- Vuls warns when the scan target server was updated the kernel etc. but not restarting it.
|
||||
|
||||
### Scan vulnerabilities of non-OS-packages
|
||||
|
||||
- Libraries of programming language
|
||||
- Self-compiled software
|
||||
- Network Devices
|
||||
|
||||
Vuls has some options to detect the vulnerabilities
|
||||
|
||||
- [Lockfile based Scan](https://vuls.io/docs/en/usage-scan-non-os-packages.html#library-vulns-scan)
|
||||
- [GitHub Integration](https://vuls.io/docs/en/usage-scan-non-os-packages.html#usage-integrate-with-github-security-alerts)
|
||||
- [Common Platform Enumeration (CPE) based Scan](https://vuls.io/docs/en/usage-scan-non-os-packages.html#cpe-scan)
|
||||
- [OWASP Dependency Check Integration](https://vuls.io/docs/en/usage-scan-non-os-packages.html#usage-integrate-with-owasp-dependency-check-to-automatic-update-when-the-libraries-are-updated-experimental)
|
||||
|
||||
## Scan WordPress core, themes, plugins
|
||||
|
||||
- [Scan WordPress](https://vuls.io/docs/en/usage-scan-wordpress.html)
|
||||
|
||||
## MISC
|
||||
|
||||
- Nondestructive testing
|
||||
- Pre-authorization is *NOT* necessary before scanning on AWS
|
||||
- Vuls works well with Continuous Integration since tests can be run every day. This allows you to find vulnerabilities very quickly.
|
||||
- Auto-generation of configuration file template
|
||||
- Auto-detection of servers set using CIDR, generate configuration file template
|
||||
- Email and Slack notification is possible (supports Japanese language)
|
||||
- Scan result is viewable on accessory software, TUI Viewer in a terminal or Web UI ([VulsRepo](https://github.com/ishiDACo/vulsrepo)).
|
||||
|
||||
----
|
||||
|
||||
## What Vuls Doesn't Do
|
||||
|
||||
- Vuls doesn't update the vulnerable packages.
|
||||
|
||||
----
|
||||
|
||||
## Document
|
||||
|
||||
For more information such as Installation, Tutorial, Usage, visit [vuls.io](https://vuls.io/)
|
||||
[日本語翻訳ドキュメント](https://vuls.io/ja/)
|
||||
|
||||
----
|
||||
|
||||
## Authors
|
||||
|
||||
kotakanbe ([@kotakanbe](https://twitter.com/kotakanbe)) created vuls and [these fine people](https://github.com/future-architect/vuls/graphs/contributors) have contributed.
|
||||
|
||||
----
|
||||
|
||||
## Stargazers over time
|
||||
|
||||
[](https://starcharts.herokuapp.com/future-architect/vuls)
|
||||
|
||||
-----;
|
||||
|
||||
## License
|
||||
|
||||
Please see [LICENSE](https://github.com/future-architect/vuls/blob/master/LICENSE).
|
||||
```
|
||||
171
cache/bolt.go
vendored
171
cache/bolt.go
vendored
@@ -1,171 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/boltdb/bolt"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/sirupsen/logrus"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Bolt holds a pointer of bolt.DB
|
||||
// boltdb is used to store a cache of Changelogs of Ubuntu/Debian
|
||||
type Bolt struct {
|
||||
Path string
|
||||
Log *logrus.Entry
|
||||
db *bolt.DB
|
||||
}
|
||||
|
||||
// SetupBolt opens a boltdb and creates a meta bucket if not exists.
|
||||
func SetupBolt(path string, l *logrus.Entry) error {
|
||||
l.Infof("Open boltDB: %s", path)
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b := Bolt{
|
||||
Path: path,
|
||||
Log: l,
|
||||
db: db,
|
||||
}
|
||||
if err = b.createBucketIfNotExists(metabucket); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
DB = b
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close a db.
|
||||
func (b Bolt) Close() error {
|
||||
if b.db == nil {
|
||||
return nil
|
||||
}
|
||||
return b.db.Close()
|
||||
}
|
||||
|
||||
// CreateBucketIfNotExists creates a buket that is specified by arg.
|
||||
func (b *Bolt) createBucketIfNotExists(name string) error {
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
_, err := tx.CreateBucketIfNotExists([]byte(name))
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to create bucket: %w", err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// GetMeta gets a Meta Information os the servername to boltdb.
|
||||
func (b Bolt) GetMeta(serverName string) (meta Meta, found bool, err error) {
|
||||
err = b.db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
v := bkt.Get([]byte(serverName))
|
||||
if len(v) == 0 {
|
||||
found = false
|
||||
return nil
|
||||
}
|
||||
if e := json.Unmarshal(v, &meta); e != nil {
|
||||
return e
|
||||
}
|
||||
found = true
|
||||
return nil
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// RefreshMeta gets a Meta Information os the servername to boltdb.
|
||||
func (b Bolt) RefreshMeta(meta Meta) error {
|
||||
meta.CreatedAt = time.Now()
|
||||
jsonBytes, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to marshal to JSON: %w", err)
|
||||
}
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
if err := bkt.Put([]byte(meta.Name), jsonBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Log.Debugf("Refreshed Meta: %s", meta.Name)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// EnsureBuckets puts a Meta information and create a buket that holds changelogs.
|
||||
func (b Bolt) EnsureBuckets(meta Meta) error {
|
||||
jsonBytes, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to marshal to JSON: %w", err)
|
||||
}
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
b.Log.Debugf("Put to meta: %s", meta.Name)
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
if err := bkt.Put([]byte(meta.Name), jsonBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// re-create a bucket (bucket name: servername)
|
||||
bkt = tx.Bucket([]byte(meta.Name))
|
||||
if bkt != nil {
|
||||
b.Log.Debugf("Delete bucket: %s", meta.Name)
|
||||
if err := tx.DeleteBucket([]byte(meta.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Log.Debugf("Bucket deleted: %s", meta.Name)
|
||||
}
|
||||
b.Log.Debugf("Create bucket: %s", meta.Name)
|
||||
if _, err := tx.CreateBucket([]byte(meta.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Log.Debugf("Bucket created: %s", meta.Name)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// PrettyPrint is for debug
|
||||
func (b Bolt) PrettyPrint(meta Meta) error {
|
||||
return b.db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
v := bkt.Get([]byte(meta.Name))
|
||||
b.Log.Debugf("Meta: key:%s, value:%s", meta.Name, v)
|
||||
|
||||
bkt = tx.Bucket([]byte(meta.Name))
|
||||
c := bkt.Cursor()
|
||||
for k, v := c.First(); k != nil; k, v = c.Next() {
|
||||
b.Log.Debugf("key:%s, len: %d, %s...",
|
||||
k, len(v), util.Truncate(string(v), 30))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// GetChangelog get the changelog of specified packName from the Bucket
|
||||
func (b Bolt) GetChangelog(servername, packName string) (changelog string, err error) {
|
||||
err = b.db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
if bkt == nil {
|
||||
return xerrors.Errorf("Failed to get Bucket: %s", servername)
|
||||
}
|
||||
v := bkt.Get([]byte(packName))
|
||||
if v == nil {
|
||||
changelog = ""
|
||||
return nil
|
||||
}
|
||||
changelog = string(v)
|
||||
return nil
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// PutChangelog put the changelgo of specified packName into the Bucket
|
||||
func (b Bolt) PutChangelog(servername, packName, changelog string) error {
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
if bkt == nil {
|
||||
return xerrors.Errorf("Failed to get Bucket: %s", servername)
|
||||
}
|
||||
return bkt.Put([]byte(packName), []byte(changelog))
|
||||
})
|
||||
}
|
||||
120
cache/bolt_test.go
vendored
120
cache/bolt_test.go
vendored
@@ -1,120 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/boltdb/bolt"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const path = "/tmp/vuls-test-cache-11111111.db"
|
||||
const servername = "server1"
|
||||
|
||||
var meta = Meta{
|
||||
Name: servername,
|
||||
Distro: config.Distro{
|
||||
Family: "ubuntu",
|
||||
Release: "16.04",
|
||||
},
|
||||
Packs: models.Packages{
|
||||
"apt": {
|
||||
Name: "apt",
|
||||
Version: "1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestSetupBolt(t *testing.T) {
|
||||
log := logrus.NewEntry(&logrus.Logger{})
|
||||
err := SetupBolt(path, log)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
defer os.Remove(path)
|
||||
|
||||
if err := DB.Close(); err != nil {
|
||||
t.Errorf("Failed to close bolt: %s", err)
|
||||
}
|
||||
|
||||
// check if meta bucket exists
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to open bolt: %s", err)
|
||||
}
|
||||
|
||||
_ = db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(metabucket))
|
||||
if bkt == nil {
|
||||
t.Errorf("Meta bucket nof found")
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func TestEnsureBuckets(t *testing.T) {
|
||||
log := logrus.NewEntry(&logrus.Logger{})
|
||||
if err := SetupBolt(path, log); err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
if err := DB.EnsureBuckets(meta); err != nil {
|
||||
t.Errorf("Failed to ensure buckets: %s", err)
|
||||
}
|
||||
defer os.Remove(path)
|
||||
|
||||
m, found, err := DB.GetMeta(servername)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to get meta: %s", err)
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Not Found in meta")
|
||||
}
|
||||
if meta.Name != m.Name || meta.Distro != m.Distro {
|
||||
t.Errorf("expected %v, actual %v", meta, m)
|
||||
}
|
||||
if !reflect.DeepEqual(meta.Packs, m.Packs) {
|
||||
t.Errorf("expected %v, actual %v", meta.Packs, m.Packs)
|
||||
}
|
||||
if err := DB.Close(); err != nil {
|
||||
t.Errorf("Failed to close bolt: %s", err)
|
||||
}
|
||||
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to open bolt: %s", err)
|
||||
}
|
||||
_ = db.View(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
if bkt == nil {
|
||||
t.Errorf("Meta bucket nof found")
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestPutGetChangelog(t *testing.T) {
|
||||
clog := "changelog-text"
|
||||
log := logrus.NewEntry(&logrus.Logger{})
|
||||
if err := SetupBolt(path, log); err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
defer os.Remove(path)
|
||||
|
||||
if err := DB.EnsureBuckets(meta); err != nil {
|
||||
t.Errorf("Failed to ensure buckets: %s", err)
|
||||
}
|
||||
if err := DB.PutChangelog(servername, "apt", clog); err != nil {
|
||||
t.Errorf("Failed to put changelog: %s", err)
|
||||
}
|
||||
if actual, err := DB.GetChangelog(servername, "apt"); err != nil {
|
||||
t.Errorf("Failed to get changelog: %s", err)
|
||||
} else {
|
||||
if actual != clog {
|
||||
t.Errorf("changelog is not same. e: %s, a: %s", clog, actual)
|
||||
}
|
||||
}
|
||||
}
|
||||
33
cache/db.go
vendored
33
cache/db.go
vendored
@@ -1,33 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// DB has a cache instance
|
||||
var DB Cache
|
||||
|
||||
const metabucket = "changelog-meta"
|
||||
|
||||
// Cache is a interface of cache
|
||||
type Cache interface {
|
||||
Close() error
|
||||
GetMeta(string) (Meta, bool, error)
|
||||
RefreshMeta(Meta) error
|
||||
EnsureBuckets(Meta) error
|
||||
PrettyPrint(Meta) error
|
||||
GetChangelog(string, string) (string, error)
|
||||
PutChangelog(string, string, string) error
|
||||
}
|
||||
|
||||
// Meta holds a server name, distro information of the scanned server and
|
||||
// package information that was collected at the last scan.
|
||||
type Meta struct {
|
||||
Name string
|
||||
Distro config.Distro
|
||||
Packs models.Packages
|
||||
CreatedAt time.Time
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"context"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
commands "github.com/future-architect/vuls/subcmds"
|
||||
"github.com/google/subcommands"
|
||||
)
|
||||
|
||||
func main() {
|
||||
subcommands.Register(subcommands.HelpCommand(), "")
|
||||
subcommands.Register(subcommands.FlagsCommand(), "")
|
||||
subcommands.Register(subcommands.CommandsCommand(), "")
|
||||
subcommands.Register(&commands.DiscoverCmd{}, "discover")
|
||||
subcommands.Register(&commands.ScanCmd{}, "scan")
|
||||
subcommands.Register(&commands.HistoryCmd{}, "history")
|
||||
subcommands.Register(&commands.ConfigtestCmd{}, "configtest")
|
||||
subcommands.Register(&commands.SaaSCmd{}, "saas")
|
||||
|
||||
var v = flag.Bool("v", false, "Show version")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if *v {
|
||||
fmt.Printf("vuls %s %s\n", config.Version, config.Revision)
|
||||
os.Exit(int(subcommands.ExitSuccess))
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
os.Exit(int(subcommands.Execute(ctx)))
|
||||
}
|
||||
@@ -1,38 +1,15 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"context"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
commands "github.com/future-architect/vuls/subcmds"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/future-architect/vuls/pkg/cmd/root"
|
||||
)
|
||||
|
||||
func main() {
|
||||
subcommands.Register(subcommands.HelpCommand(), "")
|
||||
subcommands.Register(subcommands.FlagsCommand(), "")
|
||||
subcommands.Register(subcommands.CommandsCommand(), "")
|
||||
subcommands.Register(&commands.DiscoverCmd{}, "discover")
|
||||
subcommands.Register(&commands.TuiCmd{}, "tui")
|
||||
subcommands.Register(&commands.ScanCmd{}, "scan")
|
||||
subcommands.Register(&commands.HistoryCmd{}, "history")
|
||||
subcommands.Register(&commands.ReportCmd{}, "report")
|
||||
subcommands.Register(&commands.ConfigtestCmd{}, "configtest")
|
||||
subcommands.Register(&commands.ServerCmd{}, "server")
|
||||
|
||||
var v = flag.Bool("v", false, "Show version")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if *v {
|
||||
fmt.Printf("vuls %s %s\n", config.Version, config.Revision)
|
||||
os.Exit(int(subcommands.ExitSuccess))
|
||||
if err := root.NewCmdRoot().Execute(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to exec vuls: %s\n", fmt.Sprintf("%+v", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
os.Exit(int(subcommands.Execute(ctx)))
|
||||
}
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ChatWorkConf is ChatWork config
|
||||
type ChatWorkConf struct {
|
||||
APIToken string `json:"-"`
|
||||
Room string `json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *ChatWorkConf) Validate() (errs []error) {
|
||||
if !Conf.ToChatWork {
|
||||
return
|
||||
}
|
||||
if len(c.Room) == 0 {
|
||||
errs = append(errs, xerrors.New("chatWorkConf.room must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.APIToken) == 0 {
|
||||
errs = append(errs, xerrors.New("chatWorkConf.ApiToken must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package config
|
||||
|
||||
var (
|
||||
// Colors has ansi color list
|
||||
Colors = []string{
|
||||
"\033[32m", // green
|
||||
"\033[33m", // yellow
|
||||
"\033[36m", // cyan
|
||||
"\033[35m", // magenta
|
||||
"\033[31m", // red
|
||||
"\033[34m", // blue
|
||||
}
|
||||
// ResetColor is reset color
|
||||
ResetColor = "\033[0m"
|
||||
)
|
||||
463
config/config.go
463
config/config.go
@@ -1,463 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Version of Vuls
|
||||
var Version = "`make build` or `make install` will show the version"
|
||||
|
||||
// Revision of Git
|
||||
var Revision string
|
||||
|
||||
// Conf has Configuration
|
||||
var Conf Config
|
||||
|
||||
//Config is struct of Configuration
|
||||
type Config struct {
|
||||
Debug bool `json:"debug,omitempty"`
|
||||
DebugSQL bool `json:"debugSQL,omitempty"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
HTTPProxy string `valid:"url" json:"httpProxy,omitempty"`
|
||||
LogDir string `json:"logDir,omitempty"`
|
||||
ResultsDir string `json:"resultsDir,omitempty"`
|
||||
Pipe bool `json:"pipe,omitempty"`
|
||||
Quiet bool `json:"quiet,omitempty"`
|
||||
NoProgress bool `json:"noProgress,omitempty"`
|
||||
SSHNative bool `json:"sshNative,omitempty"`
|
||||
Vvv bool `json:"vvv,omitempty"`
|
||||
|
||||
Default ServerInfo `json:"default,omitempty"`
|
||||
Servers map[string]ServerInfo `json:"servers,omitempty"`
|
||||
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
|
||||
|
||||
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
|
||||
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
|
||||
IgnoreGitHubDismissed bool `json:"ignore_git_hub_dismissed,omitempty"`
|
||||
|
||||
CacheDBPath string `json:"cacheDBPath,omitempty"`
|
||||
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
|
||||
|
||||
CveDict GoCveDictConf `json:"cveDict,omitempty"`
|
||||
OvalDict GovalDictConf `json:"ovalDict,omitempty"`
|
||||
Gost GostConf `json:"gost,omitempty"`
|
||||
Exploit ExploitConf `json:"exploit,omitempty"`
|
||||
Metasploit MetasploitConf `json:"metasploit,omitempty"`
|
||||
|
||||
Slack SlackConf `json:"-"`
|
||||
EMail SMTPConf `json:"-"`
|
||||
HTTP HTTPConf `json:"-"`
|
||||
Syslog SyslogConf `json:"-"`
|
||||
AWS AWSConf `json:"-"`
|
||||
Azure AzureConf `json:"-"`
|
||||
ChatWork ChatWorkConf `json:"-"`
|
||||
Telegram TelegramConf `json:"-"`
|
||||
|
||||
WpScan WpScanConf `json:"WpScan,omitempty"`
|
||||
|
||||
Saas SaasConf `json:"-"`
|
||||
DetectIPS bool `json:"detectIps,omitempty"`
|
||||
|
||||
RefreshCve bool `json:"refreshCve,omitempty"`
|
||||
ToSlack bool `json:"toSlack,omitempty"`
|
||||
ToChatWork bool `json:"toChatWork,omitempty"`
|
||||
ToTelegram bool `json:"ToTelegram,omitempty"`
|
||||
ToEmail bool `json:"toEmail,omitempty"`
|
||||
ToSyslog bool `json:"toSyslog,omitempty"`
|
||||
ToLocalFile bool `json:"toLocalFile,omitempty"`
|
||||
ToS3 bool `json:"toS3,omitempty"`
|
||||
ToAzureBlob bool `json:"toAzureBlob,omitempty"`
|
||||
ToHTTP bool `json:"toHTTP,omitempty"`
|
||||
FormatXML bool `json:"formatXML,omitempty"`
|
||||
FormatJSON bool `json:"formatJSON,omitempty"`
|
||||
FormatOneEMail bool `json:"formatOneEMail,omitempty"`
|
||||
FormatOneLineText bool `json:"formatOneLineText,omitempty"`
|
||||
FormatList bool `json:"formatList,omitempty"`
|
||||
FormatFullText bool `json:"formatFullText,omitempty"`
|
||||
FormatCsvList bool `json:"formatCsvList,omitempty"`
|
||||
GZIP bool `json:"gzip,omitempty"`
|
||||
Diff bool `json:"diff,omitempty"`
|
||||
}
|
||||
|
||||
// ValidateOnConfigtest validates
|
||||
func (c Config) ValidateOnConfigtest() bool {
|
||||
errs := c.checkSSHKeyExist()
|
||||
|
||||
if runtime.GOOS == "windows" && !c.SSHNative {
|
||||
errs = append(errs, xerrors.New("-ssh-native-insecure is needed on windows"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnScan validates configuration
|
||||
func (c Config) ValidateOnScan() bool {
|
||||
errs := c.checkSSHKeyExist()
|
||||
|
||||
if runtime.GOOS == "windows" && !c.SSHNative {
|
||||
errs = append(errs, xerrors.New("-ssh-native-insecure is needed on windows"))
|
||||
}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
|
||||
}
|
||||
}
|
||||
|
||||
if len(c.CacheDBPath) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.CacheDBPath); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"Cache DB path must be a *Absolute* file path. -cache-dbpath: %s",
|
||||
c.CacheDBPath))
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
func (c Config) checkSSHKeyExist() (errs []error) {
|
||||
for serverName, v := range c.Servers {
|
||||
if v.Type == ServerTypePseudo {
|
||||
continue
|
||||
}
|
||||
if v.KeyPath != "" {
|
||||
if _, err := os.Stat(v.KeyPath); err != nil {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"%s is invalid. keypath: %s not exists", serverName, v.KeyPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
// ValidateOnReportDB validates configuration
|
||||
func (c Config) ValidateOnReportDB() bool {
|
||||
errs := []error{}
|
||||
|
||||
if err := validateDB("cvedb", c.CveDict.Type, c.CveDict.SQLite3Path, c.CveDict.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("ovaldb", c.OvalDict.Type, c.OvalDict.SQLite3Path, c.OvalDict.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("gostdb", c.Gost.Type, c.Gost.SQLite3Path, c.Gost.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("exploitdb", c.Exploit.Type, c.Exploit.SQLite3Path, c.Exploit.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("msfdb", c.Metasploit.Type, c.Metasploit.SQLite3Path, c.Metasploit.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnReport validates configuration
|
||||
func (c Config) ValidateOnReport() bool {
|
||||
errs := []error{}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if mailerrs := c.EMail.Validate(); 0 < len(mailerrs) {
|
||||
errs = append(errs, mailerrs...)
|
||||
}
|
||||
|
||||
if slackerrs := c.Slack.Validate(); 0 < len(slackerrs) {
|
||||
errs = append(errs, slackerrs...)
|
||||
}
|
||||
|
||||
if chatworkerrs := c.ChatWork.Validate(); 0 < len(chatworkerrs) {
|
||||
errs = append(errs, chatworkerrs...)
|
||||
}
|
||||
|
||||
if telegramerrs := c.Telegram.Validate(); 0 < len(telegramerrs) {
|
||||
errs = append(errs, telegramerrs...)
|
||||
}
|
||||
|
||||
if syslogerrs := c.Syslog.Validate(); 0 < len(syslogerrs) {
|
||||
errs = append(errs, syslogerrs...)
|
||||
}
|
||||
|
||||
if httperrs := c.HTTP.Validate(); 0 < len(httperrs) {
|
||||
errs = append(errs, httperrs...)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnTui validates configuration
|
||||
func (c Config) ValidateOnTui() bool {
|
||||
errs := []error{}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
|
||||
}
|
||||
}
|
||||
|
||||
if err := validateDB("cvedb", c.CveDict.Type, c.CveDict.SQLite3Path, c.CveDict.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnSaaS validates configuration
|
||||
func (c Config) ValidateOnSaaS() bool {
|
||||
saaserrs := c.Saas.Validate()
|
||||
for _, err := range saaserrs {
|
||||
log.Error("Failed to validate SaaS conf: %+w", err)
|
||||
}
|
||||
return len(saaserrs) == 0
|
||||
}
|
||||
|
||||
// validateDB validates configuration
|
||||
func validateDB(dictionaryDBName, dbType, dbPath, dbURL string) error {
|
||||
log.Infof("-%s-type: %s, -%s-url: %s, -%s-path: %s",
|
||||
dictionaryDBName, dbType, dictionaryDBName, dbURL, dictionaryDBName, dbPath)
|
||||
|
||||
switch dbType {
|
||||
case "sqlite3":
|
||||
if dbURL != "" {
|
||||
return xerrors.Errorf("To use SQLite3, specify -%s-type=sqlite3 and -%s-path. To use as http server mode, specify -%s-type=http and -%s-url",
|
||||
dictionaryDBName, dictionaryDBName, dictionaryDBName, dictionaryDBName)
|
||||
}
|
||||
if ok, _ := govalidator.IsFilePath(dbPath); !ok {
|
||||
return xerrors.Errorf("SQLite3 path must be a *Absolute* file path. -%s-path: %s",
|
||||
dictionaryDBName, dbPath)
|
||||
}
|
||||
case "mysql":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`MySQL connection string is needed. -%s-url="user:pass@tcp(localhost:3306)/dbname"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
case "postgres":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`PostgreSQL connection string is needed. -%s-url="host=myhost user=user dbname=dbname sslmode=disable password=password"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
case "redis":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`Redis connection string is needed. -%s-url="redis://localhost/0"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
case "http":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`URL is needed. -%s-url="http://localhost:1323"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
default:
|
||||
return xerrors.Errorf("%s type must be either 'sqlite3', 'mysql', 'postgres', 'redis' or 'http'. -%s-type: %s",
|
||||
dictionaryDBName, dictionaryDBName, dbType)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AWSConf is aws config
|
||||
type AWSConf struct {
|
||||
// AWS profile to use
|
||||
Profile string `json:"profile"`
|
||||
|
||||
// AWS region to use
|
||||
Region string `json:"region"`
|
||||
|
||||
// S3 bucket name
|
||||
S3Bucket string `json:"s3Bucket"`
|
||||
|
||||
// /bucket/path/to/results
|
||||
S3ResultsDir string `json:"s3ResultsDir"`
|
||||
|
||||
// The Server-side encryption algorithm used when storing the reports in S3 (e.g., AES256, aws:kms).
|
||||
S3ServerSideEncryption string `json:"s3ServerSideEncryption"`
|
||||
}
|
||||
|
||||
// AzureConf is azure config
|
||||
type AzureConf struct {
|
||||
// Azure account name to use. AZURE_STORAGE_ACCOUNT environment variable is used if not specified
|
||||
AccountName string `json:"accountName"`
|
||||
|
||||
// Azure account key to use. AZURE_STORAGE_ACCESS_KEY environment variable is used if not specified
|
||||
AccountKey string `json:"-"`
|
||||
|
||||
// Azure storage container name
|
||||
ContainerName string `json:"containerName"`
|
||||
}
|
||||
|
||||
// WpScanConf is wpscan.com config
|
||||
type WpScanConf struct {
|
||||
Token string `toml:"Token,omitempty" json:"-"`
|
||||
DetectInactive bool `toml:"detectInactive,omitempty" json:"detectInactive,omitempty"`
|
||||
}
|
||||
|
||||
// ServerInfo has SSH Info, additional CPE packages to scan.
|
||||
type ServerInfo struct {
|
||||
ServerName string `toml:"-" json:"serverName,omitempty"`
|
||||
User string `toml:"user,omitempty" json:"user,omitempty"`
|
||||
Host string `toml:"host,omitempty" json:"host,omitempty"`
|
||||
JumpServer []string `toml:"jumpServer,omitempty" json:"jumpServer,omitempty"`
|
||||
Port string `toml:"port,omitempty" json:"port,omitempty"`
|
||||
SSHConfigPath string `toml:"sshConfigPath,omitempty" json:"sshConfigPath,omitempty"`
|
||||
KeyPath string `toml:"keyPath,omitempty" json:"keyPath,omitempty"`
|
||||
KeyPassword string `json:"-" toml:"-"`
|
||||
CpeNames []string `toml:"cpeNames,omitempty" json:"cpeNames,omitempty"`
|
||||
ScanMode []string `toml:"scanMode,omitempty" json:"scanMode,omitempty"`
|
||||
ScanModules []string `toml:"scanModules,omitempty" json:"scanModules,omitempty"`
|
||||
OwaspDCXMLPath string `toml:"owaspDCXMLPath,omitempty" json:"owaspDCXMLPath,omitempty"`
|
||||
ContainersOnly bool `toml:"containersOnly,omitempty" json:"containersOnly,omitempty"`
|
||||
ContainersIncluded []string `toml:"containersIncluded,omitempty" json:"containersIncluded,omitempty"`
|
||||
ContainersExcluded []string `toml:"containersExcluded,omitempty" json:"containersExcluded,omitempty"`
|
||||
ContainerType string `toml:"containerType,omitempty" json:"containerType,omitempty"`
|
||||
Containers map[string]ContainerSetting `toml:"containers,omitempty" json:"containers,omitempty"`
|
||||
IgnoreCves []string `toml:"ignoreCves,omitempty" json:"ignoreCves,omitempty"`
|
||||
IgnorePkgsRegexp []string `toml:"ignorePkgsRegexp,omitempty" json:"ignorePkgsRegexp,omitempty"`
|
||||
GitHubRepos map[string]GitHubConf `toml:"githubs" json:"githubs,omitempty"` // key: owner/repo
|
||||
UUIDs map[string]string `toml:"uuids,omitempty" json:"uuids,omitempty"`
|
||||
Memo string `toml:"memo,omitempty" json:"memo,omitempty"`
|
||||
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, RHEL, Amazon
|
||||
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
|
||||
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
|
||||
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
|
||||
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
|
||||
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
|
||||
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
|
||||
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
|
||||
IPSIdentifiers map[IPS]string `toml:"-" json:"ipsIdentifiers,omitempty"`
|
||||
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
|
||||
|
||||
// internal use
|
||||
LogMsgAnsiColor string `toml:"-" json:"-"` // DebugLog Color
|
||||
Container Container `toml:"-" json:"-"`
|
||||
Distro Distro `toml:"-" json:"-"`
|
||||
Mode ScanMode `toml:"-" json:"-"`
|
||||
Module ScanModule `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// ContainerSetting is used for loading container setting in config.toml
|
||||
type ContainerSetting struct {
|
||||
Cpes []string `json:"cpes,omitempty"`
|
||||
OwaspDCXMLPath string `json:"owaspDCXMLPath,omitempty"`
|
||||
IgnorePkgsRegexp []string `json:"ignorePkgsRegexp,omitempty"`
|
||||
IgnoreCves []string `json:"ignoreCves,omitempty"`
|
||||
}
|
||||
|
||||
// WordPressConf used for WordPress Scanning
|
||||
type WordPressConf struct {
|
||||
OSUser string `toml:"osUser,omitempty" json:"osUser,omitempty"`
|
||||
DocRoot string `toml:"docRoot,omitempty" json:"docRoot,omitempty"`
|
||||
CmdPath string `toml:"cmdPath,omitempty" json:"cmdPath,omitempty"`
|
||||
}
|
||||
|
||||
// IsZero return whether this struct is not specified in config.toml
|
||||
func (cnf WordPressConf) IsZero() bool {
|
||||
return cnf.OSUser == "" && cnf.DocRoot == "" && cnf.CmdPath == ""
|
||||
}
|
||||
|
||||
// GitHubConf is used for GitHub Security Alerts
|
||||
type GitHubConf struct {
|
||||
Token string `json:"-"`
|
||||
}
|
||||
|
||||
// GetServerName returns ServerName if this serverInfo is about host.
|
||||
// If this serverInfo is about a container, returns containerID@ServerName
|
||||
func (s ServerInfo) GetServerName() string {
|
||||
if len(s.Container.ContainerID) == 0 {
|
||||
return s.ServerName
|
||||
}
|
||||
return fmt.Sprintf("%s@%s", s.Container.Name, s.ServerName)
|
||||
}
|
||||
|
||||
// Distro has distribution info
|
||||
type Distro struct {
|
||||
Family string
|
||||
Release string
|
||||
}
|
||||
|
||||
func (l Distro) String() string {
|
||||
return fmt.Sprintf("%s %s", l.Family, l.Release)
|
||||
}
|
||||
|
||||
// MajorVersion returns Major version
|
||||
func (l Distro) MajorVersion() (int, error) {
|
||||
if l.Family == Amazon {
|
||||
if isAmazonLinux1(l.Release) {
|
||||
return 1, nil
|
||||
}
|
||||
return 2, nil
|
||||
}
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(l.Release, ".")[0])
|
||||
}
|
||||
return 0, xerrors.New("Release is empty")
|
||||
}
|
||||
|
||||
// IsContainer returns whether this ServerInfo is about container
|
||||
func (s ServerInfo) IsContainer() bool {
|
||||
return 0 < len(s.Container.ContainerID)
|
||||
}
|
||||
|
||||
// SetContainer set container
|
||||
func (s *ServerInfo) SetContainer(d Container) {
|
||||
s.Container = d
|
||||
}
|
||||
|
||||
// Container has Container information.
|
||||
type Container struct {
|
||||
ContainerID string
|
||||
Name string
|
||||
Image string
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSyslogConfValidate(t *testing.T) {
|
||||
var tests = []struct {
|
||||
conf SyslogConf
|
||||
expectedErrLength int
|
||||
}{
|
||||
{
|
||||
conf: SyslogConf{},
|
||||
expectedErrLength: 0,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "tcp",
|
||||
Port: "5140",
|
||||
},
|
||||
expectedErrLength: 0,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "udp",
|
||||
Port: "12345",
|
||||
Severity: "emerg",
|
||||
Facility: "user",
|
||||
},
|
||||
expectedErrLength: 0,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "foo",
|
||||
Port: "514",
|
||||
},
|
||||
expectedErrLength: 1,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "invalid",
|
||||
Port: "-1",
|
||||
},
|
||||
expectedErrLength: 2,
|
||||
},
|
||||
{
|
||||
conf: SyslogConf{
|
||||
Protocol: "invalid",
|
||||
Port: "invalid",
|
||||
Severity: "invalid",
|
||||
Facility: "invalid",
|
||||
},
|
||||
expectedErrLength: 4,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
Conf.ToSyslog = true
|
||||
errs := tt.conf.Validate()
|
||||
if len(errs) != tt.expectedErrLength {
|
||||
t.Errorf("test: %d, expected %d, actual %d", i, tt.expectedErrLength, len(errs))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistro_MajorVersion(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in Distro
|
||||
out int
|
||||
}{
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2 (2017.12)",
|
||||
},
|
||||
out: 2,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2017.12",
|
||||
},
|
||||
out: 1,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: CentOS,
|
||||
Release: "7.10",
|
||||
},
|
||||
out: 7,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
ver, err := tt.in.MajorVersion()
|
||||
if err != nil {
|
||||
t.Errorf("[%d] err occurred: %s", i, err)
|
||||
}
|
||||
if tt.out != ver {
|
||||
t.Errorf("[%d] expected %d, actual %d", i, tt.out, ver)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// ExploitConf is exploit config
|
||||
type ExploitConf struct {
|
||||
// DB type for exploit dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://exploit-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/exploit.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *ExploitConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "go-exploitdb.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const exploitDBType = "EXPLOITDB_TYPE"
|
||||
const exploitDBURL = "EXPLOITDB_URL"
|
||||
const exploitDBPATH = "EXPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *ExploitConf) Init() {
|
||||
if os.Getenv(exploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(exploitDBType)
|
||||
}
|
||||
if os.Getenv(exploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(exploitDBURL)
|
||||
}
|
||||
if os.Getenv(exploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(exploitDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *ExploitConf) IsFetchViaHTTP() bool {
|
||||
return Conf.Exploit.Type == "http"
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// GoCveDictConf is go-cve-dictionary config
|
||||
type GoCveDictConf struct {
|
||||
// DB type of CVE dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://cve-dictionary.com:1323 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/cve.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *GoCveDictConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "cve.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const cveDBType = "CVEDB_TYPE"
|
||||
const cveDBURL = "CVEDB_URL"
|
||||
const cveDBPATH = "CVEDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GoCveDictConf) Init() {
|
||||
if os.Getenv(cveDBType) != "" {
|
||||
cnf.Type = os.Getenv(cveDBType)
|
||||
}
|
||||
if os.Getenv(cveDBURL) != "" {
|
||||
cnf.URL = os.Getenv(cveDBURL)
|
||||
}
|
||||
if os.Getenv(cveDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(cveDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *GoCveDictConf) IsFetchViaHTTP() bool {
|
||||
return Conf.CveDict.Type == "http"
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// GostConf is gost config
|
||||
type GostConf struct {
|
||||
// DB type for gost dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://gost-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/gost.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *GostConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "gost.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const gostDBType = "GOSTDB_TYPE"
|
||||
const gostDBURL = "GOSTDB_URL"
|
||||
const gostDBPATH = "GOSTDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GostConf) Init() {
|
||||
if os.Getenv(gostDBType) != "" {
|
||||
cnf.Type = os.Getenv(gostDBType)
|
||||
}
|
||||
if os.Getenv(gostDBURL) != "" {
|
||||
cnf.URL = os.Getenv(gostDBURL)
|
||||
}
|
||||
if os.Getenv(gostDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(gostDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *GostConf) IsFetchViaHTTP() bool {
|
||||
return Conf.Gost.Type == "http"
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// GovalDictConf is goval-dictionary config
|
||||
type GovalDictConf struct {
|
||||
|
||||
// DB type of OVAL dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://goval-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/oval.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *GovalDictConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "oval.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const govalType = "OVALDB_TYPE"
|
||||
const govalURL = "OVALDB_URL"
|
||||
const govalPATH = "OVALDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GovalDictConf) Init() {
|
||||
if os.Getenv(govalType) != "" {
|
||||
cnf.Type = os.Getenv(govalType)
|
||||
}
|
||||
if os.Getenv(govalURL) != "" {
|
||||
cnf.URL = os.Getenv(govalURL)
|
||||
}
|
||||
if os.Getenv(govalPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(govalPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *GovalDictConf) IsFetchViaHTTP() bool {
|
||||
return Conf.OvalDict.Type == "http"
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
)
|
||||
|
||||
// HTTPConf is HTTP config
|
||||
type HTTPConf struct {
|
||||
URL string `valid:"url" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *HTTPConf) Validate() (errs []error) {
|
||||
if !Conf.ToHTTP {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
const httpKey = "VULS_HTTP_URL"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (c *HTTPConf) Init(toml HTTPConf) {
|
||||
if os.Getenv(httpKey) != "" {
|
||||
c.URL = os.Getenv(httpKey)
|
||||
}
|
||||
if toml.URL != "" {
|
||||
c.URL = toml.URL
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
package config
|
||||
|
||||
// IPS is
|
||||
type IPS string
|
||||
|
||||
const (
|
||||
// DeepSecurity is
|
||||
DeepSecurity IPS = "deepsecurity"
|
||||
)
|
||||
@@ -1,12 +0,0 @@
|
||||
package config
|
||||
|
||||
import "golang.org/x/xerrors"
|
||||
|
||||
// JSONLoader loads configuration
|
||||
type JSONLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration JSON file specified by path arg.
|
||||
func (c JSONLoader) Load(path, sudoPass, keyPass string) (err error) {
|
||||
return xerrors.New("Not implement yet")
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package config
|
||||
|
||||
// Load loads configuration
|
||||
func Load(path, keyPass string) error {
|
||||
var loader Loader
|
||||
loader = TOMLLoader{}
|
||||
return loader.Load(path, keyPass)
|
||||
}
|
||||
|
||||
// Loader is interface of concrete loader
|
||||
type Loader interface {
|
||||
Load(string, string) error
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// MetasploitConf is metasploit config
|
||||
type MetasploitConf struct {
|
||||
// DB type for metasploit dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://metasploit-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/metasploit.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *MetasploitConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "go-msfdb.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const metasploitDBType = "METASPLOITDB_TYPE"
|
||||
const metasploitDBURL = "METASPLOITDB_URL"
|
||||
const metasploitDBPATH = "METASPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *MetasploitConf) Init() {
|
||||
if os.Getenv(metasploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(metasploitDBType)
|
||||
}
|
||||
if os.Getenv(metasploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(metasploitDBURL)
|
||||
}
|
||||
if os.Getenv(metasploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(metasploitDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *MetasploitConf) IsFetchViaHTTP() bool {
|
||||
return Conf.Metasploit.Type == "http"
|
||||
}
|
||||
248
config/os.go
248
config/os.go
@@ -1,248 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
// RedHat is
|
||||
RedHat = "redhat"
|
||||
|
||||
// Debian is
|
||||
Debian = "debian"
|
||||
|
||||
// Ubuntu is
|
||||
Ubuntu = "ubuntu"
|
||||
|
||||
// CentOS is
|
||||
CentOS = "centos"
|
||||
|
||||
// Fedora is
|
||||
// Fedora = "fedora"
|
||||
|
||||
// Amazon is
|
||||
Amazon = "amazon"
|
||||
|
||||
// Oracle is
|
||||
Oracle = "oracle"
|
||||
|
||||
// FreeBSD is
|
||||
FreeBSD = "freebsd"
|
||||
|
||||
// Raspbian is
|
||||
Raspbian = "raspbian"
|
||||
|
||||
// Windows is
|
||||
Windows = "windows"
|
||||
|
||||
// OpenSUSE is
|
||||
OpenSUSE = "opensuse"
|
||||
|
||||
// OpenSUSELeap is
|
||||
OpenSUSELeap = "opensuse.leap"
|
||||
|
||||
// SUSEEnterpriseServer is
|
||||
SUSEEnterpriseServer = "suse.linux.enterprise.server"
|
||||
|
||||
// SUSEEnterpriseDesktop is
|
||||
SUSEEnterpriseDesktop = "suse.linux.enterprise.desktop"
|
||||
|
||||
// SUSEOpenstackCloud is
|
||||
SUSEOpenstackCloud = "suse.openstack.cloud"
|
||||
|
||||
// Alpine is
|
||||
Alpine = "alpine"
|
||||
|
||||
// ServerTypePseudo is used for ServerInfo.Type, r.Family
|
||||
ServerTypePseudo = "pseudo"
|
||||
)
|
||||
|
||||
// EOL has End-of-Life information
|
||||
type EOL struct {
|
||||
StandardSupportUntil time.Time
|
||||
ExtendedSupportUntil time.Time
|
||||
Ended bool
|
||||
}
|
||||
|
||||
// IsStandardSupportEnded checks now is under standard support
|
||||
func (e EOL) IsStandardSupportEnded(now time.Time) bool {
|
||||
return e.Ended ||
|
||||
!e.ExtendedSupportUntil.IsZero() && e.StandardSupportUntil.IsZero() ||
|
||||
!e.StandardSupportUntil.IsZero() && now.After(e.StandardSupportUntil)
|
||||
}
|
||||
|
||||
// IsExtendedSuppportEnded checks now is under extended support
|
||||
func (e EOL) IsExtendedSuppportEnded(now time.Time) bool {
|
||||
if e.Ended {
|
||||
return true
|
||||
}
|
||||
if e.StandardSupportUntil.IsZero() && e.ExtendedSupportUntil.IsZero() {
|
||||
return false
|
||||
}
|
||||
return !e.ExtendedSupportUntil.IsZero() && now.After(e.ExtendedSupportUntil) ||
|
||||
e.ExtendedSupportUntil.IsZero() && now.After(e.StandardSupportUntil)
|
||||
}
|
||||
|
||||
// GetEOL return EOL information for the OS-release passed by args
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/redhat/redhat.go#L20
|
||||
func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
switch family {
|
||||
case Amazon:
|
||||
rel := "2"
|
||||
if isAmazonLinux1(release) {
|
||||
rel = "1"
|
||||
}
|
||||
eol, found = map[string]EOL{
|
||||
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2": {},
|
||||
}[rel]
|
||||
case RedHat:
|
||||
// https://access.redhat.com/support/policy/updates/errata
|
||||
eol, found = map[string]EOL{
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {
|
||||
StandardSupportUntil: time.Date(2020, 11, 30, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"7": {
|
||||
StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"8": {
|
||||
StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[major(release)]
|
||||
case CentOS:
|
||||
// https://en.wikipedia.org/wiki/CentOS#End-of-support_schedule
|
||||
// TODO Stream
|
||||
eol, found = map[string]EOL{
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {Ended: true},
|
||||
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case Oracle:
|
||||
eol, found = map[string]EOL{
|
||||
// Source:
|
||||
// https://www.oracle.com/a/ocom/docs/elsp-lifetime-069338.pdf
|
||||
// https://community.oracle.com/docs/DOC-917964
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {
|
||||
StandardSupportUntil: time.Date(2021, 3, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2024, 3, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"7": {
|
||||
StandardSupportUntil: time.Date(2024, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"8": {
|
||||
StandardSupportUntil: time.Date(2029, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[major(release)]
|
||||
case Debian:
|
||||
eol, found = map[string]EOL{
|
||||
// https://wiki.debian.org/LTS
|
||||
"6": {Ended: true},
|
||||
"7": {Ended: true},
|
||||
"8": {Ended: true},
|
||||
"9": {StandardSupportUntil: time.Date(2022, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"10": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case Raspbian:
|
||||
// Not found
|
||||
eol, found = map[string]EOL{}[major(release)]
|
||||
case Ubuntu:
|
||||
// https://wiki.ubuntu.com/Releases
|
||||
eol, found = map[string]EOL{
|
||||
"14.10": {Ended: true},
|
||||
"14.04": {
|
||||
ExtendedSupportUntil: time.Date(2022, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"15.04": {Ended: true},
|
||||
"16.10": {Ended: true},
|
||||
"17.04": {Ended: true},
|
||||
"17.10": {Ended: true},
|
||||
"16.04": {
|
||||
StandardSupportUntil: time.Date(2021, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2024, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"18.04": {
|
||||
StandardSupportUntil: time.Date(2023, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
ExtendedSupportUntil: time.Date(2028, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"18.10": {Ended: true},
|
||||
"19.04": {Ended: true},
|
||||
"19.10": {Ended: true},
|
||||
"20.04": {
|
||||
StandardSupportUntil: time.Date(2025, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.04": {
|
||||
StandardSupportUntil: time.Date(2022, 1, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.10": {
|
||||
StandardSupportUntil: time.Date(2022, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[release]
|
||||
case SUSEEnterpriseServer:
|
||||
//TODO
|
||||
case Alpine:
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/alpine/alpine.go#L19
|
||||
// https://wiki.alpinelinux.org/wiki/Alpine_Linux:Releases
|
||||
eol, found = map[string]EOL{
|
||||
"2.0": {Ended: true},
|
||||
"2.1": {Ended: true},
|
||||
"2.2": {Ended: true},
|
||||
"2.3": {Ended: true},
|
||||
"2.4": {Ended: true},
|
||||
"2.5": {Ended: true},
|
||||
"2.6": {Ended: true},
|
||||
"2.7": {Ended: true},
|
||||
"3.0": {Ended: true},
|
||||
"3.1": {Ended: true},
|
||||
"3.2": {Ended: true},
|
||||
"3.3": {Ended: true},
|
||||
"3.4": {Ended: true},
|
||||
"3.5": {Ended: true},
|
||||
"3.6": {Ended: true},
|
||||
"3.7": {Ended: true},
|
||||
"3.8": {Ended: true},
|
||||
"3.9": {Ended: true},
|
||||
"3.10": {StandardSupportUntil: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.11": {StandardSupportUntil: time.Date(2021, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.12": {StandardSupportUntil: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
}[majorDotMinor(release)]
|
||||
case FreeBSD:
|
||||
// https://www.freebsd.org/security/
|
||||
eol, found = map[string]EOL{
|
||||
"7": {Ended: true},
|
||||
"8": {Ended: true},
|
||||
"9": {Ended: true},
|
||||
"10": {Ended: true},
|
||||
"11": {StandardSupportUntil: time.Date(2021, 9, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"12": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func major(osVer string) (majorVersion string) {
|
||||
return strings.Split(osVer, ".")[0]
|
||||
}
|
||||
|
||||
func majorDotMinor(osVer string) (majorDotMinor string) {
|
||||
ss := strings.SplitN(osVer, ".", 3)
|
||||
if len(ss) == 1 {
|
||||
return osVer
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", ss[0], ss[1])
|
||||
}
|
||||
|
||||
func isAmazonLinux1(osRelease string) bool {
|
||||
return len(strings.Fields(osRelease)) == 1
|
||||
}
|
||||
@@ -1,373 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
type fields struct {
|
||||
family string
|
||||
release string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
now time.Time
|
||||
found bool
|
||||
stdEnded bool
|
||||
extEnded bool
|
||||
}{
|
||||
// Amazon Linux
|
||||
{
|
||||
name: "amazon linux 1 supported",
|
||||
fields: fields{family: Amazon, release: "2018.03"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 1 eol on 2023-6-30",
|
||||
fields: fields{family: Amazon, release: "2018.03"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 2 supported",
|
||||
fields: fields{family: Amazon, release: "2 (Karoo)"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
//RHEL
|
||||
{
|
||||
name: "RHEL7 supported",
|
||||
fields: fields{family: RedHat, release: "7"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL8 supported",
|
||||
fields: fields{family: RedHat, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL6 eol",
|
||||
fields: fields{family: RedHat, release: "6"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "RHEL9 not found",
|
||||
fields: fields{family: RedHat, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//CentOS
|
||||
{
|
||||
name: "CentOS 7 supported",
|
||||
fields: fields{family: CentOS, release: "7"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS 8 supported",
|
||||
fields: fields{family: CentOS, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS 6 eol",
|
||||
fields: fields{family: CentOS, release: "6"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "CentOS 9 not found",
|
||||
fields: fields{family: CentOS, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//Oracle
|
||||
{
|
||||
name: "Oracle Linux 7 supported",
|
||||
fields: fields{family: Oracle, release: "7"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 8 supported",
|
||||
fields: fields{family: Oracle, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 6 eol",
|
||||
fields: fields{family: Oracle, release: "6"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Oracle Linux 9 not found",
|
||||
fields: fields{family: Oracle, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//Ubuntu
|
||||
{
|
||||
name: "Ubuntu 18.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "18.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 18.04 ext supported",
|
||||
fields: fields{family: Ubuntu, release: "18.04"},
|
||||
now: time.Date(2025, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 16.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "18.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 14.04 eol",
|
||||
fields: fields{family: Ubuntu, release: "14.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 14.10 eol",
|
||||
fields: fields{family: Ubuntu, release: "14.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 12.10 not found",
|
||||
fields: fields{family: Ubuntu, release: "12.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
found: false,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 21.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "21.04"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
//Debian
|
||||
{
|
||||
name: "Debian 9 supported",
|
||||
fields: fields{family: Debian, release: "9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 10 supported",
|
||||
fields: fields{family: Debian, release: "10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 8 supported",
|
||||
fields: fields{family: Debian, release: "8"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 11 supported",
|
||||
fields: fields{family: Debian, release: "11"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//alpine
|
||||
{
|
||||
name: "alpine 3.10 supported",
|
||||
fields: fields{family: Alpine, release: "3.10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.11 supported",
|
||||
fields: fields{family: Alpine, release: "3.11"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.12 supported",
|
||||
fields: fields{family: Alpine, release: "3.12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 3.9 eol",
|
||||
fields: fields{family: Alpine, release: "3.9"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 3.13 not found",
|
||||
fields: fields{family: Alpine, release: "3.13"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// freebsd
|
||||
{
|
||||
name: "freebsd 11 supported",
|
||||
fields: fields{family: FreeBSD, release: "11"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 11 eol on 2021-9-30",
|
||||
fields: fields{family: FreeBSD, release: "11"},
|
||||
now: time.Date(2021, 10, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 12 supported",
|
||||
fields: fields{family: FreeBSD, release: "12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 10 eol",
|
||||
fields: fields{family: FreeBSD, release: "10"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
eol, found := GetEOL(tt.fields.family, tt.fields.release)
|
||||
if found != tt.found {
|
||||
t.Errorf("GetEOL.found = %v, want %v", found, tt.found)
|
||||
}
|
||||
if found {
|
||||
if got := eol.IsStandardSupportEnded(tt.now); got != tt.stdEnded {
|
||||
t.Errorf("EOL.IsStandardSupportEnded() = %v, want %v", got, tt.stdEnded)
|
||||
}
|
||||
if got := eol.IsExtendedSuppportEnded(tt.now); got != tt.extEnded {
|
||||
t.Errorf("EOL.IsExtendedSupportEnded() = %v, want %v", got, tt.extEnded)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_majorDotMinor(t *testing.T) {
|
||||
type args struct {
|
||||
osVer string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantMajorDotMinor string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
args: args{
|
||||
osVer: "",
|
||||
},
|
||||
wantMajorDotMinor: "",
|
||||
},
|
||||
{
|
||||
name: "major",
|
||||
args: args{
|
||||
osVer: "3",
|
||||
},
|
||||
wantMajorDotMinor: "3",
|
||||
},
|
||||
{
|
||||
name: "major dot minor",
|
||||
args: args{
|
||||
osVer: "3.1",
|
||||
},
|
||||
wantMajorDotMinor: "3.1",
|
||||
},
|
||||
{
|
||||
name: "major dot minor dot release",
|
||||
args: args{
|
||||
osVer: "3.1.4",
|
||||
},
|
||||
wantMajorDotMinor: "3.1",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if gotMajorDotMinor := majorDotMinor(tt.args.osVer); gotMajorDotMinor != tt.wantMajorDotMinor {
|
||||
t.Errorf("majorDotMinor() = %v, want %v", gotMajorDotMinor, tt.wantMajorDotMinor)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SaasConf is FutureVuls config
|
||||
type SaasConf struct {
|
||||
GroupID int64 `json:"-"`
|
||||
Token string `json:"-"`
|
||||
URL string `json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SaasConf) Validate() (errs []error) {
|
||||
if c.GroupID == 0 {
|
||||
errs = append(errs, xerrors.New("GroupID must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.Token) == 0 {
|
||||
errs = append(errs, xerrors.New("Token must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.URL) == 0 {
|
||||
errs = append(errs, xerrors.New("URL must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ScanMode has a type of scan mode. fast, fast-root, deep and offline
|
||||
type ScanMode struct {
|
||||
flag byte
|
||||
}
|
||||
|
||||
const (
|
||||
// Fast is fast scan mode
|
||||
Fast = byte(1 << iota)
|
||||
// FastRoot is scanmode
|
||||
FastRoot
|
||||
// Deep is scanmode
|
||||
Deep
|
||||
// Offline is scanmode
|
||||
Offline
|
||||
|
||||
fastStr = "fast"
|
||||
fastRootStr = "fast-root"
|
||||
deepStr = "deep"
|
||||
offlineStr = "offline"
|
||||
)
|
||||
|
||||
// Set mode
|
||||
func (s *ScanMode) Set(f byte) {
|
||||
s.flag |= f
|
||||
}
|
||||
|
||||
// IsFast return whether scan mode is fast
|
||||
func (s ScanMode) IsFast() bool {
|
||||
return s.flag&Fast == Fast
|
||||
}
|
||||
|
||||
// IsFastRoot return whether scan mode is fastroot
|
||||
func (s ScanMode) IsFastRoot() bool {
|
||||
return s.flag&FastRoot == FastRoot
|
||||
}
|
||||
|
||||
// IsDeep return whether scan mode is deep
|
||||
func (s ScanMode) IsDeep() bool {
|
||||
return s.flag&Deep == Deep
|
||||
}
|
||||
|
||||
// IsOffline return whether scan mode is offline
|
||||
func (s ScanMode) IsOffline() bool {
|
||||
return s.flag&Offline == Offline
|
||||
}
|
||||
|
||||
func (s *ScanMode) ensure() error {
|
||||
numTrue := 0
|
||||
for _, b := range []bool{s.IsFast(), s.IsFastRoot(), s.IsDeep()} {
|
||||
if b {
|
||||
numTrue++
|
||||
}
|
||||
}
|
||||
if numTrue == 0 {
|
||||
s.Set(Fast)
|
||||
} else if s.IsDeep() && s.IsOffline() {
|
||||
return xerrors.New("Don't specify both of deep and offline")
|
||||
} else if numTrue != 1 {
|
||||
return xerrors.New("Specify only one of offline, fast, fast-root or deep")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s ScanMode) String() string {
|
||||
ss := ""
|
||||
if s.IsFast() {
|
||||
ss = fastStr
|
||||
} else if s.IsFastRoot() {
|
||||
ss = fastRootStr
|
||||
} else if s.IsDeep() {
|
||||
ss = deepStr
|
||||
}
|
||||
if s.IsOffline() {
|
||||
ss += " " + offlineStr
|
||||
}
|
||||
return ss + " mode"
|
||||
}
|
||||
|
||||
func setScanMode(server *ServerInfo, d ServerInfo) error {
|
||||
if len(server.ScanMode) == 0 {
|
||||
server.ScanMode = Conf.Default.ScanMode
|
||||
}
|
||||
for _, m := range server.ScanMode {
|
||||
switch strings.ToLower(m) {
|
||||
case fastStr:
|
||||
server.Mode.Set(Fast)
|
||||
case fastRootStr:
|
||||
server.Mode.Set(FastRoot)
|
||||
case deepStr:
|
||||
server.Mode.Set(Deep)
|
||||
case offlineStr:
|
||||
server.Mode.Set(Offline)
|
||||
default:
|
||||
return xerrors.Errorf("scanMode: %s of %s is invalid. Specify -fast, -fast-root, -deep or offline",
|
||||
m, server.ServerName)
|
||||
}
|
||||
}
|
||||
if err := server.Mode.ensure(); err != nil {
|
||||
return xerrors.Errorf("%s in %s", err, server.ServerName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ScanModule has a type of scan module
|
||||
type ScanModule struct {
|
||||
flag byte
|
||||
}
|
||||
|
||||
const (
|
||||
// OSPkg is scanmodule
|
||||
OSPkg = byte(1 << iota)
|
||||
// WordPress is scanmodule
|
||||
WordPress
|
||||
// Lockfile is scanmodule
|
||||
Lockfile
|
||||
// Port is scanmodule
|
||||
Port
|
||||
|
||||
osPkgStr = "ospkg"
|
||||
wordPressStr = "wordpress"
|
||||
lockfileStr = "lockfile"
|
||||
portStr = "port"
|
||||
)
|
||||
|
||||
var allModules = []string{osPkgStr, wordPressStr, lockfileStr, portStr}
|
||||
|
||||
// Set module
|
||||
func (s *ScanModule) Set(f byte) {
|
||||
s.flag |= f
|
||||
}
|
||||
|
||||
// IsScanOSPkg return whether scanning os pkg
|
||||
func (s ScanModule) IsScanOSPkg() bool {
|
||||
return s.flag&OSPkg == OSPkg
|
||||
}
|
||||
|
||||
// IsScanWordPress return whether scanning wordpress
|
||||
func (s ScanModule) IsScanWordPress() bool {
|
||||
return s.flag&WordPress == WordPress
|
||||
}
|
||||
|
||||
// IsScanLockFile whether scanning lock file
|
||||
func (s ScanModule) IsScanLockFile() bool {
|
||||
return s.flag&Lockfile == Lockfile
|
||||
}
|
||||
|
||||
// IsScanPort whether scanning listening ports
|
||||
func (s ScanModule) IsScanPort() bool {
|
||||
return s.flag&Port == Port
|
||||
}
|
||||
|
||||
// IsZero return the struct value are all false
|
||||
func (s ScanModule) IsZero() bool {
|
||||
return !(s.IsScanOSPkg() || s.IsScanWordPress() || s.IsScanLockFile() || s.IsScanPort())
|
||||
}
|
||||
|
||||
func (s *ScanModule) ensure() error {
|
||||
if s.IsZero() {
|
||||
s.Set(OSPkg)
|
||||
s.Set(WordPress)
|
||||
s.Set(Lockfile)
|
||||
s.Set(Port)
|
||||
} else if !s.IsScanOSPkg() && s.IsScanPort() {
|
||||
return xerrors.New("When specifying the Port, Specify OSPkg as well")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setScanModules(server *ServerInfo, d ServerInfo) error {
|
||||
if len(server.ScanModules) == 0 {
|
||||
server.ScanModules = d.ScanModules
|
||||
}
|
||||
for _, m := range server.ScanModules {
|
||||
switch strings.ToLower(m) {
|
||||
case osPkgStr:
|
||||
server.Module.Set(OSPkg)
|
||||
case wordPressStr:
|
||||
server.Module.Set(WordPress)
|
||||
case lockfileStr:
|
||||
server.Module.Set(Lockfile)
|
||||
case portStr:
|
||||
server.Module.Set(Port)
|
||||
default:
|
||||
return xerrors.Errorf("scanMode: %s of %s is invalid. Specify %s",
|
||||
m, server.ServerName, allModules)
|
||||
}
|
||||
}
|
||||
if err := server.Module.ensure(); err != nil {
|
||||
return xerrors.Errorf("%s in %s", err, server.ServerName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestScanModule_IsZero(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
modes []byte
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "not zero",
|
||||
modes: []byte{OSPkg},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "zero",
|
||||
modes: []byte{},
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := ScanModule{}
|
||||
for _, b := range tt.modes {
|
||||
s.Set(b)
|
||||
}
|
||||
if got := s.IsZero(); got != tt.want {
|
||||
t.Errorf("ScanModule.IsZero() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestScanModule_validate(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
modes []byte
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "valid",
|
||||
modes: []byte{},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "err",
|
||||
modes: []byte{WordPress, Lockfile, Port},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := ScanModule{}
|
||||
for _, b := range tt.modes {
|
||||
s.Set(b)
|
||||
}
|
||||
if err := s.ensure(); (err != nil) != tt.wantErr {
|
||||
t.Errorf("ScanModule.validate() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SlackConf is slack config
|
||||
type SlackConf struct {
|
||||
HookURL string `valid:"url" json:"-" toml:"hookURL,omitempty"`
|
||||
LegacyToken string `json:"-" toml:"legacyToken,omitempty"`
|
||||
Channel string `json:"-" toml:"channel,omitempty"`
|
||||
IconEmoji string `json:"-" toml:"iconEmoji,omitempty"`
|
||||
AuthUser string `json:"-" toml:"authUser,omitempty"`
|
||||
NotifyUsers []string `toml:"notifyUsers,omitempty" json:"-"`
|
||||
Text string `json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SlackConf) Validate() (errs []error) {
|
||||
if !Conf.ToSlack {
|
||||
return
|
||||
}
|
||||
|
||||
if len(c.HookURL) == 0 && len(c.LegacyToken) == 0 {
|
||||
errs = append(errs, xerrors.New("slack.hookURL or slack.LegacyToken must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.Channel) == 0 {
|
||||
errs = append(errs, xerrors.New("slack.channel must not be empty"))
|
||||
} else {
|
||||
if !(strings.HasPrefix(c.Channel, "#") ||
|
||||
c.Channel == "${servername}") {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"channel's prefix must be '#', channel: %s", c.Channel))
|
||||
}
|
||||
}
|
||||
|
||||
if len(c.AuthUser) == 0 {
|
||||
errs = append(errs, xerrors.New("slack.authUser must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SMTPConf is smtp config
|
||||
type SMTPConf struct {
|
||||
SMTPAddr string `toml:"smtpAddr,omitempty" json:"-"`
|
||||
SMTPPort string `toml:"smtpPort,omitempty" valid:"port" json:"-"`
|
||||
User string `toml:"user,omitempty" json:"-"`
|
||||
Password string `toml:"password,omitempty" json:"-"`
|
||||
From string `toml:"from,omitempty" json:"-"`
|
||||
To []string `toml:"to,omitempty" json:"-"`
|
||||
Cc []string `toml:"cc,omitempty" json:"-"`
|
||||
SubjectPrefix string `toml:"subjectPrefix,omitempty" json:"-"`
|
||||
}
|
||||
|
||||
func checkEmails(emails []string) (errs []error) {
|
||||
for _, addr := range emails {
|
||||
if len(addr) == 0 {
|
||||
return
|
||||
}
|
||||
if ok := govalidator.IsEmail(addr); !ok {
|
||||
errs = append(errs, xerrors.Errorf("Invalid email address. email: %s", addr))
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Validate SMTP configuration
|
||||
func (c *SMTPConf) Validate() (errs []error) {
|
||||
if !Conf.ToEmail {
|
||||
return
|
||||
}
|
||||
// Check Emails fromat
|
||||
emails := []string{}
|
||||
emails = append(emails, c.From)
|
||||
emails = append(emails, c.To...)
|
||||
emails = append(emails, c.Cc...)
|
||||
|
||||
if emailErrs := checkEmails(emails); 0 < len(emailErrs) {
|
||||
errs = append(errs, emailErrs...)
|
||||
}
|
||||
|
||||
if len(c.SMTPAddr) == 0 {
|
||||
errs = append(errs, xerrors.New("email.smtpAddr must not be empty"))
|
||||
}
|
||||
if len(c.SMTPPort) == 0 {
|
||||
errs = append(errs, xerrors.New("email.smtpPort must not be empty"))
|
||||
}
|
||||
if len(c.To) == 0 {
|
||||
errs = append(errs, xerrors.New("email.To required at least one address"))
|
||||
}
|
||||
if len(c.From) == 0 {
|
||||
errs = append(errs, xerrors.New("email.From required at least one address"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log/syslog"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// SyslogConf is syslog config
|
||||
type SyslogConf struct {
|
||||
Protocol string `json:"-"`
|
||||
Host string `valid:"host" json:"-"`
|
||||
Port string `valid:"port" json:"-"`
|
||||
Severity string `json:"-"`
|
||||
Facility string `json:"-"`
|
||||
Tag string `json:"-"`
|
||||
Verbose bool `json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SyslogConf) Validate() (errs []error) {
|
||||
if !Conf.ToSyslog {
|
||||
return nil
|
||||
}
|
||||
// If protocol is empty, it will connect to the local syslog server.
|
||||
if len(c.Protocol) > 0 && c.Protocol != "tcp" && c.Protocol != "udp" {
|
||||
errs = append(errs, errors.New(`syslog.protocol must be "tcp" or "udp"`))
|
||||
}
|
||||
|
||||
// Default port: 514
|
||||
if c.Port == "" {
|
||||
c.Port = "514"
|
||||
}
|
||||
|
||||
if _, err := c.GetSeverity(); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if _, err := c.GetFacility(); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
// GetSeverity gets severity
|
||||
func (c *SyslogConf) GetSeverity() (syslog.Priority, error) {
|
||||
if c.Severity == "" {
|
||||
return syslog.LOG_INFO, nil
|
||||
}
|
||||
|
||||
switch c.Severity {
|
||||
case "emerg":
|
||||
return syslog.LOG_EMERG, nil
|
||||
case "alert":
|
||||
return syslog.LOG_ALERT, nil
|
||||
case "crit":
|
||||
return syslog.LOG_CRIT, nil
|
||||
case "err":
|
||||
return syslog.LOG_ERR, nil
|
||||
case "warning":
|
||||
return syslog.LOG_WARNING, nil
|
||||
case "notice":
|
||||
return syslog.LOG_NOTICE, nil
|
||||
case "info":
|
||||
return syslog.LOG_INFO, nil
|
||||
case "debug":
|
||||
return syslog.LOG_DEBUG, nil
|
||||
default:
|
||||
return -1, xerrors.Errorf("Invalid severity: %s", c.Severity)
|
||||
}
|
||||
}
|
||||
|
||||
// GetFacility gets facility
|
||||
func (c *SyslogConf) GetFacility() (syslog.Priority, error) {
|
||||
if c.Facility == "" {
|
||||
return syslog.LOG_AUTH, nil
|
||||
}
|
||||
|
||||
switch c.Facility {
|
||||
case "kern":
|
||||
return syslog.LOG_KERN, nil
|
||||
case "user":
|
||||
return syslog.LOG_USER, nil
|
||||
case "mail":
|
||||
return syslog.LOG_MAIL, nil
|
||||
case "daemon":
|
||||
return syslog.LOG_DAEMON, nil
|
||||
case "auth":
|
||||
return syslog.LOG_AUTH, nil
|
||||
case "syslog":
|
||||
return syslog.LOG_SYSLOG, nil
|
||||
case "lpr":
|
||||
return syslog.LOG_LPR, nil
|
||||
case "news":
|
||||
return syslog.LOG_NEWS, nil
|
||||
case "uucp":
|
||||
return syslog.LOG_UUCP, nil
|
||||
case "cron":
|
||||
return syslog.LOG_CRON, nil
|
||||
case "authpriv":
|
||||
return syslog.LOG_AUTHPRIV, nil
|
||||
case "ftp":
|
||||
return syslog.LOG_FTP, nil
|
||||
case "local0":
|
||||
return syslog.LOG_LOCAL0, nil
|
||||
case "local1":
|
||||
return syslog.LOG_LOCAL1, nil
|
||||
case "local2":
|
||||
return syslog.LOG_LOCAL2, nil
|
||||
case "local3":
|
||||
return syslog.LOG_LOCAL3, nil
|
||||
case "local4":
|
||||
return syslog.LOG_LOCAL4, nil
|
||||
case "local5":
|
||||
return syslog.LOG_LOCAL5, nil
|
||||
case "local6":
|
||||
return syslog.LOG_LOCAL6, nil
|
||||
case "local7":
|
||||
return syslog.LOG_LOCAL7, nil
|
||||
default:
|
||||
return -1, xerrors.Errorf("Invalid facility: %s", c.Facility)
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// TelegramConf is Telegram config
|
||||
type TelegramConf struct {
|
||||
Token string `json:"-"`
|
||||
ChatID string `json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *TelegramConf) Validate() (errs []error) {
|
||||
if !Conf.ToTelegram {
|
||||
return
|
||||
}
|
||||
if len(c.ChatID) == 0 {
|
||||
errs = append(errs, xerrors.New("TelegramConf.ChatID must not be empty"))
|
||||
}
|
||||
|
||||
if len(c.Token) == 0 {
|
||||
errs = append(errs, xerrors.New("TelegramConf.Token must not be empty"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,241 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/knqyf263/go-cpe/naming"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// TOMLLoader loads config
|
||||
type TOMLLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration TOML file specified by path arg.
|
||||
func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
if _, err := toml.DecodeFile(pathToToml, &Conf); err != nil {
|
||||
return err
|
||||
}
|
||||
if keyPass != "" {
|
||||
Conf.Default.KeyPassword = keyPass
|
||||
}
|
||||
|
||||
Conf.CveDict.Init()
|
||||
Conf.OvalDict.Init()
|
||||
Conf.Gost.Init()
|
||||
Conf.Exploit.Init()
|
||||
Conf.Metasploit.Init()
|
||||
|
||||
index := 0
|
||||
for name, server := range Conf.Servers {
|
||||
server.ServerName = name
|
||||
if 0 < len(server.KeyPassword) {
|
||||
return xerrors.Errorf("[Deprecated] KEYPASSWORD IN CONFIG FILE ARE UNSECURE. REMOVE THEM IMMEDIATELY FOR A SECURITY REASONS. THEY WILL BE REMOVED IN A FUTURE RELEASE: %s", name)
|
||||
}
|
||||
|
||||
if err := setDefaultIfEmpty(&server, Conf.Default); err != nil {
|
||||
return xerrors.Errorf("Failed to set default value to config. server: %s, err: %w", name, err)
|
||||
}
|
||||
|
||||
if err := setScanMode(&server, Conf.Default); err != nil {
|
||||
return xerrors.Errorf("Failed to set ScanMode: %w", err)
|
||||
}
|
||||
|
||||
if err := setScanModules(&server, Conf.Default); err != nil {
|
||||
return xerrors.Errorf("Failed to set ScanModule: %w", err)
|
||||
}
|
||||
|
||||
if len(server.CpeNames) == 0 {
|
||||
server.CpeNames = Conf.Default.CpeNames
|
||||
}
|
||||
for i, n := range server.CpeNames {
|
||||
uri, err := toCpeURI(n)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse CPENames %s in %s, err: %w", n, name, err)
|
||||
}
|
||||
server.CpeNames[i] = uri
|
||||
}
|
||||
|
||||
for _, cve := range Conf.Default.IgnoreCves {
|
||||
found := false
|
||||
for _, c := range server.IgnoreCves {
|
||||
if cve == c {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
server.IgnoreCves = append(server.IgnoreCves, cve)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range Conf.Default.IgnorePkgsRegexp {
|
||||
found := false
|
||||
for _, p := range server.IgnorePkgsRegexp {
|
||||
if pkg == p {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
server.IgnorePkgsRegexp = append(server.IgnorePkgsRegexp, pkg)
|
||||
}
|
||||
}
|
||||
for _, reg := range server.IgnorePkgsRegexp {
|
||||
_, err := regexp.Compile(reg)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse %s in %s. err: %w", reg, name, err)
|
||||
}
|
||||
}
|
||||
for contName, cont := range server.Containers {
|
||||
for _, reg := range cont.IgnorePkgsRegexp {
|
||||
_, err := regexp.Compile(reg)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to parse %s in %s@%s. err: %w",
|
||||
reg, contName, name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ownerRepo, githubSetting := range server.GitHubRepos {
|
||||
if ss := strings.Split(ownerRepo, "/"); len(ss) != 2 {
|
||||
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s in %s",
|
||||
ownerRepo, name)
|
||||
}
|
||||
if githubSetting.Token == "" {
|
||||
return xerrors.Errorf("GitHub owner/repo: %s in %s token is empty",
|
||||
ownerRepo, name)
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.Enablerepo) == 0 {
|
||||
server.Enablerepo = Conf.Default.Enablerepo
|
||||
}
|
||||
if len(server.Enablerepo) != 0 {
|
||||
for _, repo := range server.Enablerepo {
|
||||
switch repo {
|
||||
case "base", "updates":
|
||||
// nop
|
||||
default:
|
||||
return xerrors.Errorf(
|
||||
"For now, enablerepo have to be base or updates: %s",
|
||||
server.Enablerepo)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server.LogMsgAnsiColor = Colors[index%len(Colors)]
|
||||
index++
|
||||
|
||||
Conf.Servers[name] = server
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
|
||||
if server.Type != ServerTypePseudo {
|
||||
if len(server.Host) == 0 {
|
||||
return xerrors.Errorf("server.host is empty")
|
||||
}
|
||||
|
||||
if len(server.JumpServer) == 0 {
|
||||
server.JumpServer = Conf.Default.JumpServer
|
||||
}
|
||||
|
||||
if server.Port == "" {
|
||||
if Conf.Default.Port != "" {
|
||||
server.Port = Conf.Default.Port
|
||||
} else {
|
||||
server.Port = "22"
|
||||
}
|
||||
}
|
||||
|
||||
if server.User == "" {
|
||||
server.User = Conf.Default.User
|
||||
if server.User == "" && server.Port != "local" {
|
||||
return xerrors.Errorf("server.user is empty")
|
||||
}
|
||||
}
|
||||
|
||||
if server.SSHConfigPath == "" {
|
||||
server.SSHConfigPath = Conf.Default.SSHConfigPath
|
||||
}
|
||||
|
||||
if server.KeyPath == "" {
|
||||
server.KeyPath = Conf.Default.KeyPath
|
||||
}
|
||||
|
||||
if server.KeyPassword == "" {
|
||||
server.KeyPassword = Conf.Default.KeyPassword
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.Lockfiles) == 0 {
|
||||
server.Lockfiles = Conf.Default.Lockfiles
|
||||
}
|
||||
|
||||
if len(server.ContainersIncluded) == 0 {
|
||||
server.ContainersIncluded = Conf.Default.ContainersIncluded
|
||||
}
|
||||
|
||||
if len(server.ContainersExcluded) == 0 {
|
||||
server.ContainersExcluded = Conf.Default.ContainersExcluded
|
||||
}
|
||||
|
||||
if server.ContainerType == "" {
|
||||
server.ContainerType = Conf.Default.ContainerType
|
||||
}
|
||||
|
||||
for contName, cont := range server.Containers {
|
||||
cont.IgnoreCves = append(cont.IgnoreCves, Conf.Default.IgnoreCves...)
|
||||
server.Containers[contName] = cont
|
||||
}
|
||||
|
||||
if server.OwaspDCXMLPath == "" {
|
||||
server.OwaspDCXMLPath = Conf.Default.OwaspDCXMLPath
|
||||
}
|
||||
|
||||
if server.Memo == "" {
|
||||
server.Memo = Conf.Default.Memo
|
||||
}
|
||||
|
||||
// TODO set default WordPress
|
||||
if server.WordPress == nil {
|
||||
server.WordPress = &WordPressConf{}
|
||||
}
|
||||
//TODO set nil in config re-generate in saas subcmd
|
||||
|
||||
if len(server.IgnoredJSONKeys) == 0 {
|
||||
server.IgnoredJSONKeys = Conf.Default.IgnoredJSONKeys
|
||||
}
|
||||
|
||||
opt := map[string]interface{}{}
|
||||
for k, v := range Conf.Default.Optional {
|
||||
opt[k] = v
|
||||
}
|
||||
for k, v := range server.Optional {
|
||||
opt[k] = v
|
||||
}
|
||||
server.Optional = opt
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func toCpeURI(cpename string) (string, error) {
|
||||
if strings.HasPrefix(cpename, "cpe:2.3:") {
|
||||
wfn, err := naming.UnbindFS(cpename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return naming.BindToURI(wfn), nil
|
||||
} else if strings.HasPrefix(cpename, "cpe:/") {
|
||||
wfn, err := naming.UnbindURI(cpename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return naming.BindToURI(wfn), nil
|
||||
}
|
||||
return "", xerrors.Errorf("Unknown CPE format: %s", cpename)
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestToCpeURI(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in string
|
||||
expected string
|
||||
err bool
|
||||
}{
|
||||
{
|
||||
in: "",
|
||||
expected: "",
|
||||
err: true,
|
||||
},
|
||||
{
|
||||
in: "cpe:/a:microsoft:internet_explorer:10",
|
||||
expected: "cpe:/a:microsoft:internet_explorer:10",
|
||||
err: false,
|
||||
},
|
||||
{
|
||||
in: "cpe:2.3:a:microsoft:internet_explorer:10:*:*:*:*:*:*:*",
|
||||
expected: "cpe:/a:microsoft:internet_explorer:10",
|
||||
err: false,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
actual, err := toCpeURI(tt.in)
|
||||
if err != nil && !tt.err {
|
||||
t.Errorf("[%d] unexpected error occurred, in: %s act: %s, exp: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
} else if err == nil && tt.err {
|
||||
t.Errorf("[%d] expected error is not occurred, in: %s act: %s, exp: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
}
|
||||
if actual != tt.expected {
|
||||
t.Errorf("[%d] in: %s, actual: %s, expected: %s",
|
||||
i, tt.in, actual, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
# future-vuls
|
||||
|
||||
## Main Features
|
||||
|
||||
- upload vuls results json to future-vuls
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
git clone https://github.com/future-architect/vuls.git
|
||||
make build-future-vuls
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
```
|
||||
Upload to FutureVuls
|
||||
|
||||
Usage:
|
||||
future-vuls upload [flags]
|
||||
|
||||
Flags:
|
||||
--config string config file (default is $HOME/.cobra.yaml)
|
||||
-g, --group-id int future vuls group id, ENV: VULS_GROUP_ID
|
||||
-h, --help help for upload
|
||||
-s, --stdin input from stdin. ENV: VULS_STDIN
|
||||
-t, --token string future vuls token
|
||||
--url string future vuls upload url
|
||||
--uuid string server uuid. ENV: VULS_SERVER_UUID
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- update results json
|
||||
|
||||
```
|
||||
cat results.json | future-vuls upload --stdin --token xxxx --url https://xxxx --group-id 1 --uuid xxxx
|
||||
```
|
||||
@@ -1,98 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/saas"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
configFile string
|
||||
stdIn bool
|
||||
jsonDir string
|
||||
serverUUID string
|
||||
groupID int64
|
||||
token string
|
||||
url string
|
||||
)
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
var cmdFvulsUploader = &cobra.Command{
|
||||
Use: "upload",
|
||||
Short: "Upload to FutureVuls",
|
||||
Long: `Upload to FutureVuls`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(serverUUID) == 0 {
|
||||
serverUUID = os.Getenv("VULS_SERVER_UUID")
|
||||
}
|
||||
if groupID == 0 {
|
||||
envGroupID := os.Getenv("VULS_GROUP_ID")
|
||||
if groupID, err = strconv.ParseInt(envGroupID, 10, 64); err != nil {
|
||||
fmt.Printf("Invalid GroupID: %s\n", envGroupID)
|
||||
return
|
||||
}
|
||||
}
|
||||
if len(url) == 0 {
|
||||
url = os.Getenv("VULS_URL")
|
||||
}
|
||||
if len(token) == 0 {
|
||||
token = os.Getenv("VULS_TOKEN")
|
||||
}
|
||||
|
||||
var scanResultJSON []byte
|
||||
if stdIn {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err = buf.ReadFrom(reader); err != nil {
|
||||
return
|
||||
}
|
||||
scanResultJSON = buf.Bytes()
|
||||
} else {
|
||||
fmt.Println("use --stdin option")
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
|
||||
var scanResult models.ScanResult
|
||||
if err = json.Unmarshal(scanResultJSON, &scanResult); err != nil {
|
||||
fmt.Println("Failed to parse json", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
scanResult.ServerUUID = serverUUID
|
||||
|
||||
config.Conf.Saas.GroupID = groupID
|
||||
config.Conf.Saas.Token = token
|
||||
config.Conf.Saas.URL = url
|
||||
if err = (saas.Writer{}).Write(scanResult); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
return
|
||||
},
|
||||
}
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&serverUUID, "uuid", "", "server uuid. ENV: VULS_SERVER_UUID")
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&configFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
|
||||
cmdFvulsUploader.PersistentFlags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin. ENV: VULS_STDIN")
|
||||
// TODO Read JSON file from directory
|
||||
// cmdFvulsUploader.Flags().StringVarP(&jsonDir, "results-dir", "d", "./", "vuls scan results json dir")
|
||||
cmdFvulsUploader.PersistentFlags().Int64VarP(&groupID, "group-id", "g", 0, "future vuls group id, ENV: VULS_GROUP_ID")
|
||||
cmdFvulsUploader.PersistentFlags().StringVarP(&token, "token", "t", "", "future vuls token")
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&url, "url", "", "future vuls upload url")
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "future-vuls"}
|
||||
rootCmd.AddCommand(cmdFvulsUploader)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
}
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/knqyf263/go-cpe/naming"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type analysis struct {
|
||||
Dependencies []dependency `xml:"dependencies>dependency"`
|
||||
}
|
||||
|
||||
type dependency struct {
|
||||
Identifiers []vulnerabilityID `xml:"identifiers>vulnerabilityIds"`
|
||||
}
|
||||
|
||||
type vulnerabilityID struct {
|
||||
ID string `xml:"id"`
|
||||
}
|
||||
|
||||
func appendIfMissing(slice []string, str string) []string {
|
||||
for _, s := range slice {
|
||||
if s == str {
|
||||
return slice
|
||||
}
|
||||
}
|
||||
return append(slice, str)
|
||||
}
|
||||
|
||||
// Parse parses OWASP dependency check XML and collect list of cpe
|
||||
func Parse(path string) ([]string, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
log.Warnf("OWASP Dependency Check XML is not found: %s", path)
|
||||
return []string{}, nil
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
b, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
log.Warnf("Failed to read OWASP Dependency Check XML: %s", path)
|
||||
return []string{}, nil
|
||||
}
|
||||
|
||||
var anal analysis
|
||||
if err := xml.Unmarshal(b, &anal); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to unmarshal: %s", err)
|
||||
}
|
||||
|
||||
cpes := []string{}
|
||||
for _, d := range anal.Dependencies {
|
||||
for _, ident := range d.Identifiers {
|
||||
id := ident.ID // Start with cpe:2.3:
|
||||
// Convert from CPE 2.3 to CPE 2.2
|
||||
if strings.HasPrefix(id, "cpe:2.3:") {
|
||||
wfn, err := naming.UnbindFS(id)
|
||||
if err != nil {
|
||||
return []string{}, err
|
||||
}
|
||||
id = naming.BindToURI(wfn)
|
||||
}
|
||||
cpes = appendIfMissing(cpes, id)
|
||||
}
|
||||
}
|
||||
return cpes, nil
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
# trivy-to-vuls
|
||||
|
||||
## Main Features
|
||||
|
||||
- convert trivy's results json to vuls's report json
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
git clone https://github.com/future-architect/vuls.git
|
||||
make build-trivy-to-vuls
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
```
|
||||
Parse trivy json to vuls results
|
||||
|
||||
Usage:
|
||||
trivy-to-vuls parse [flags]
|
||||
|
||||
Flags:
|
||||
-h, --help help for parse
|
||||
-s, --stdin input from stdin
|
||||
-d, --trivy-json-dir string trivy json dir (default "./")
|
||||
-f, --trivy-json-file-name string trivy json file name (default "results.json")
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- use trivy output
|
||||
|
||||
```
|
||||
trivy -q image -f=json python:3.4-alpine | trivy-to-vuls parse --stdin
|
||||
```
|
||||
@@ -1,78 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/future-architect/vuls/contrib/trivy/parser"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
serverUUID string
|
||||
stdIn bool
|
||||
jsonDir string
|
||||
jsonFileName string
|
||||
)
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
var cmdTrivyToVuls = &cobra.Command{
|
||||
Use: "parse",
|
||||
Short: "Parse trivy json to vuls results",
|
||||
Long: `Parse trivy json to vuls results`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
jsonFilePath := filepath.Join(jsonDir, jsonFileName)
|
||||
var trivyJSON []byte
|
||||
if stdIn {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err = buf.ReadFrom(reader); err != nil {
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
trivyJSON = buf.Bytes()
|
||||
} else {
|
||||
if trivyJSON, err = ioutil.ReadFile(jsonFilePath); err != nil {
|
||||
fmt.Println("Failed to read file", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
scanResult := &models.ScanResult{
|
||||
JSONVersion: models.JSONVersion,
|
||||
ScannedCves: models.VulnInfos{},
|
||||
}
|
||||
if scanResult, err = parser.Parse(trivyJSON, scanResult); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
var resultJSON []byte
|
||||
if resultJSON, err = json.MarshalIndent(scanResult, "", " "); err != nil {
|
||||
fmt.Println("Failed to create json", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(resultJSON))
|
||||
return
|
||||
},
|
||||
}
|
||||
cmdTrivyToVuls.Flags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonDir, "trivy-json-dir", "d", "./", "trivy json dir")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonFileName, "trivy-json-file-name", "f", "results.json", "trivy json file name")
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "trivy-to-vuls"}
|
||||
rootCmd.AddCommand(cmdTrivyToVuls)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
@@ -1,164 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// Parse :
|
||||
func Parse(vulnJSON []byte, scanResult *models.ScanResult) (result *models.ScanResult, err error) {
|
||||
var trivyResults report.Results
|
||||
if err = json.Unmarshal(vulnJSON, &trivyResults); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pkgs := models.Packages{}
|
||||
vulnInfos := models.VulnInfos{}
|
||||
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
|
||||
for _, trivyResult := range trivyResults {
|
||||
for _, vuln := range trivyResult.Vulnerabilities {
|
||||
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
|
||||
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
|
||||
CveID: vuln.VulnerabilityID,
|
||||
Confidences: models.Confidences{
|
||||
{
|
||||
Score: 100,
|
||||
DetectionMethod: models.TrivyMatchStr,
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
// VulnType : "",
|
||||
}
|
||||
}
|
||||
vulnInfo := vulnInfos[vuln.VulnerabilityID]
|
||||
var notFixedYet bool
|
||||
fixState := ""
|
||||
if len(vuln.FixedVersion) == 0 {
|
||||
notFixedYet = true
|
||||
fixState = "Affected"
|
||||
}
|
||||
var references models.References
|
||||
for _, reference := range vuln.References {
|
||||
references = append(references, models.Reference{
|
||||
Source: "trivy",
|
||||
Link: reference,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(references, func(i, j int) bool {
|
||||
return references[i].Link < references[j].Link
|
||||
})
|
||||
|
||||
vulnInfo.CveContents = models.CveContents{
|
||||
models.Trivy: models.CveContent{
|
||||
Cvss3Severity: vuln.Severity,
|
||||
References: references,
|
||||
Title: vuln.Title,
|
||||
Summary: vuln.Description,
|
||||
},
|
||||
}
|
||||
// do only if image type is Vuln
|
||||
if IsTrivySupportedOS(trivyResult.Type) {
|
||||
pkgs[vuln.PkgName] = models.Package{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
}
|
||||
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
|
||||
Name: vuln.PkgName,
|
||||
NotFixedYet: notFixedYet,
|
||||
FixState: fixState,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
|
||||
// overwrite every time if os package
|
||||
scanResult.Family = trivyResult.Type
|
||||
scanResult.ServerName = trivyResult.Target
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
"trivy-target": trivyResult.Target,
|
||||
}
|
||||
scanResult.ScannedAt = time.Now()
|
||||
scanResult.ScannedBy = "trivy"
|
||||
scanResult.ScannedVia = "trivy"
|
||||
} else {
|
||||
// LibraryScanの結果
|
||||
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
|
||||
Key: trivyResult.Type,
|
||||
Name: vuln.PkgName,
|
||||
Path: trivyResult.Target,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Libs = append(libScanner.Libs, types.Library{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
})
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
vulnInfos[vuln.VulnerabilityID] = vulnInfo
|
||||
}
|
||||
}
|
||||
// flatten and unique libraries
|
||||
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
|
||||
for path, v := range uniqueLibraryScannerPaths {
|
||||
uniqueLibrary := map[string]types.Library{}
|
||||
for _, lib := range v.Libs {
|
||||
uniqueLibrary[lib.Name+lib.Version] = lib
|
||||
}
|
||||
|
||||
var libraries []types.Library
|
||||
for _, library := range uniqueLibrary {
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
sort.Slice(libraries, func(i, j int) bool {
|
||||
return libraries[i].Name < libraries[j].Name
|
||||
})
|
||||
|
||||
libscanner := models.LibraryScanner{
|
||||
Path: path,
|
||||
Libs: libraries,
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscanner)
|
||||
}
|
||||
sort.Slice(libraryScanners, func(i, j int) bool {
|
||||
return libraryScanners[i].Path < libraryScanners[j].Path
|
||||
})
|
||||
scanResult.ScannedCves = vulnInfos
|
||||
scanResult.Packages = pkgs
|
||||
scanResult.LibraryScanners = libraryScanners
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
// IsTrivySupportedOS :
|
||||
func IsTrivySupportedOS(family string) bool {
|
||||
supportedFamilies := []string{
|
||||
os.RedHat,
|
||||
os.Debian,
|
||||
os.Ubuntu,
|
||||
os.CentOS,
|
||||
os.Fedora,
|
||||
os.Amazon,
|
||||
os.Oracle,
|
||||
os.Windows,
|
||||
os.OpenSUSE,
|
||||
os.OpenSUSELeap,
|
||||
os.OpenSUSETumbleweed,
|
||||
os.SLES,
|
||||
os.Photon,
|
||||
os.Alpine,
|
||||
}
|
||||
for _, supportedFamily := range supportedFamilies {
|
||||
if family == supportedFamily {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
33
cwe/cwe.go
33
cwe/cwe.go
@@ -1,33 +0,0 @@
|
||||
package cwe
|
||||
|
||||
// CweTopTwentyfive2019 has CWE-ID in CWE Top 25
|
||||
var CweTopTwentyfive2019 = map[string]string{
|
||||
"119": "1",
|
||||
"79": "2",
|
||||
"20": "3",
|
||||
"200": "4",
|
||||
"125": "5",
|
||||
"89": "6",
|
||||
"416": "7",
|
||||
"190": "8",
|
||||
"352": "9",
|
||||
"22": "10",
|
||||
"78": "11",
|
||||
"787": "12",
|
||||
"287": "13",
|
||||
"476": "14",
|
||||
"732": "16",
|
||||
"434": "16",
|
||||
"611": "17",
|
||||
"94": "18",
|
||||
"798": "19",
|
||||
"400": "20",
|
||||
"772": "21",
|
||||
"426": "22",
|
||||
"502": "23",
|
||||
"269": "24",
|
||||
"295": "25",
|
||||
}
|
||||
|
||||
// CweTopTwentyfive2019URL has CWE Top25 links
|
||||
var CweTopTwentyfive2019URL = "https://cwe.mitre.org/top25/archive/2019/2019_cwe_top25.html"
|
||||
65
cwe/owasp.go
65
cwe/owasp.go
@@ -1,65 +0,0 @@
|
||||
package cwe
|
||||
|
||||
// OwaspTopTen2017 has CWE-ID in OWSP Top 10
|
||||
var OwaspTopTen2017 = map[string]string{
|
||||
"77": "1",
|
||||
"89": "1",
|
||||
"564": "1",
|
||||
"917": "1",
|
||||
|
||||
"287": "2",
|
||||
"384": "2",
|
||||
|
||||
"220": "3",
|
||||
"310": "3",
|
||||
"312": "3",
|
||||
"319": "3",
|
||||
"326": "3",
|
||||
"359": "3",
|
||||
|
||||
"611": "4",
|
||||
|
||||
"22": "5",
|
||||
"284": "5",
|
||||
"285": "5",
|
||||
"639": "5",
|
||||
|
||||
"2": "6",
|
||||
"16": "6",
|
||||
"388": "6",
|
||||
|
||||
"79": "7",
|
||||
|
||||
"502": "8",
|
||||
|
||||
"223": "10",
|
||||
"778": "10",
|
||||
}
|
||||
|
||||
// OwaspTopTen2017GitHubURLEn has GitHub links
|
||||
var OwaspTopTen2017GitHubURLEn = map[string]string{
|
||||
"1": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa1-injection.md",
|
||||
"2": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa2-broken-authentication.md",
|
||||
"3": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa3-sensitive-data-disclosure.md",
|
||||
"4": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa4-xxe.md",
|
||||
"5": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa5-broken-access-control.md",
|
||||
"6": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa6-security-misconfiguration.md",
|
||||
"7": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa7-xss.md",
|
||||
"8": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa8-insecure-deserialization.md",
|
||||
"9": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa9-known-vulns.md<Paste>",
|
||||
"10": "https://github.com/OWASP/Top10/blob/master/2017/en/0xaa-logging-detection-response.md",
|
||||
}
|
||||
|
||||
// OwaspTopTen2017GitHubURLJa has GitHub links
|
||||
var OwaspTopTen2017GitHubURLJa = map[string]string{
|
||||
"1": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa1-injection.md",
|
||||
"2": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa2-broken-authentication.md",
|
||||
"3": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa3-sensitive-data-disclosure.md",
|
||||
"4": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa4-xxe.md",
|
||||
"5": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa5-broken-access-control.md",
|
||||
"6": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa6-security-misconfiguration.md",
|
||||
"7": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa7-xss.md",
|
||||
"8": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa8-insecure-deserialization.md",
|
||||
"9": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa9-known-vulns.md<Paste>",
|
||||
"10": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xaa-logging-detection-response.md",
|
||||
}
|
||||
33
cwe/sans.go
33
cwe/sans.go
@@ -1,33 +0,0 @@
|
||||
package cwe
|
||||
|
||||
// SansTopTwentyfive has CWE-ID in CWE/SANS Top 25
|
||||
var SansTopTwentyfive = map[string]string{
|
||||
"89": "1",
|
||||
"78": "2",
|
||||
"120": "3",
|
||||
"79": "4",
|
||||
"306": "5",
|
||||
"862": "6",
|
||||
"798": "7",
|
||||
"311": "8",
|
||||
"434": "9",
|
||||
"807": "10",
|
||||
"250": "11",
|
||||
"352": "12",
|
||||
"22": "13",
|
||||
"494": "14",
|
||||
"863": "15",
|
||||
"829": "16",
|
||||
"732": "17",
|
||||
"676": "18",
|
||||
"327": "19",
|
||||
"131": "20",
|
||||
"307": "21",
|
||||
"601": "22",
|
||||
"134": "23",
|
||||
"190": "24",
|
||||
"759": "25",
|
||||
}
|
||||
|
||||
// SansTopTwentyfiveURL is a URL of sans 25
|
||||
var SansTopTwentyfiveURL = "https://www.sans.org/top25-software-errors/"
|
||||
@@ -1,30 +0,0 @@
|
||||
package errof
|
||||
|
||||
// ErrorCode is vuls error code
|
||||
type ErrorCode string
|
||||
|
||||
// Error is vuls error
|
||||
type Error struct {
|
||||
Code ErrorCode
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e Error) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrFailedToAccessGithubAPI is error of github alert's api access
|
||||
ErrFailedToAccessGithubAPI ErrorCode = "ErrFailedToAccessGithubAPI"
|
||||
|
||||
// ErrFailedToAccessWpScan is error of wpscan.com api access
|
||||
ErrFailedToAccessWpScan ErrorCode = "ErrFailedToAccessWpScan"
|
||||
)
|
||||
|
||||
// New :
|
||||
func New(code ErrorCode, msg string) Error {
|
||||
return Error{
|
||||
Code: code,
|
||||
Message: msg,
|
||||
}
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package exploit
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
cnf "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/mozqnet/go-exploitdb/db"
|
||||
exploitmodels "github.com/mozqnet/go-exploitdb/models"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// FillWithExploit fills exploit information that has in Exploit
|
||||
func FillWithExploit(driver db.DB, r *models.ScanResult) (nExploitCve int, err error) {
|
||||
if cnf.Conf.Exploit.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, _ := util.URLPathJoin(cnf.Conf.Exploit.URL, "cves")
|
||||
responses, err := getCvesViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
exps := []*exploitmodels.Exploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
exploits := ConvertToModels(exps)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Exploits = exploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nExploitCve++
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
es := driver.GetExploitByCveID(cveID)
|
||||
if len(es) == 0 {
|
||||
continue
|
||||
}
|
||||
exploits := ConvertToModels(es)
|
||||
vuln.Exploits = exploits
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return nExploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModels converts gost model to vuls model
|
||||
func ConvertToModels(es []*exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
for _, e := range es {
|
||||
var documentURL, shellURL *string
|
||||
if e.OffensiveSecurity != nil {
|
||||
os := e.OffensiveSecurity
|
||||
if os.Document != nil {
|
||||
documentURL = &os.Document.DocumentURL
|
||||
}
|
||||
if os.ShellCode != nil {
|
||||
shellURL = &os.ShellCode.ShellCodeURL
|
||||
}
|
||||
}
|
||||
exploit := models.Exploit{
|
||||
ExploitType: e.ExploitType,
|
||||
ID: e.ExploitUniqueID,
|
||||
URL: e.URL,
|
||||
Description: e.Description,
|
||||
DocumentURL: documentURL,
|
||||
ShellCodeURL: shellURL,
|
||||
}
|
||||
exploits = append(exploits, exploit)
|
||||
}
|
||||
return exploits
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func CheckHTTPHealth() error {
|
||||
if !cnf.Conf.Exploit.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.Conf.Exploit.URL)
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
resp, _, errs = gorequest.New().Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to connect to exploit server. url: %s, errs: %w", url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckIfExploitFetched checks if oval entries are in DB by family, release.
|
||||
func CheckIfExploitFetched(driver db.DB, osFamily string) (fetched bool, err error) {
|
||||
//TODO
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// CheckIfExploitFresh checks if oval entries are fresh enough
|
||||
func CheckIfExploitFresh(driver db.DB, osFamily string) (ok bool, err error) {
|
||||
//TODO
|
||||
return true, nil
|
||||
}
|
||||
115
exploit/util.go
115
exploit/util.go
@@ -1,115 +0,0 @@
|
||||
package exploit
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type response struct {
|
||||
request request
|
||||
json string
|
||||
}
|
||||
|
||||
func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- request{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
util.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type request struct {
|
||||
osMajorVersion string
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGet(url string, req request, resChan chan<- response, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %w", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
util.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %s", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- response{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
178
github/github.go
178
github/github.go
@@ -1,178 +0,0 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/errof"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
// DetectGitHubSecurityAlerts access to owner/repo on GitHub and fetch security alerts of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
|
||||
// https://help.github.com/articles/about-security-alerts-for-vulnerable-dependencies/
|
||||
//TODO move to report
|
||||
func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string) (nCVEs int, err error) {
|
||||
src := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: token},
|
||||
)
|
||||
httpClient := oauth2.NewClient(context.Background(), src)
|
||||
|
||||
// TODO Use `https://github.com/shurcooL/githubv4` if the tool supports vulnerabilityAlerts Endpoint
|
||||
// Memo : https://developer.github.com/v4/explorer/
|
||||
const jsonfmt = `{"query":
|
||||
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
|
||||
after := ""
|
||||
|
||||
for {
|
||||
jsonStr := fmt.Sprintf(jsonfmt, owner, repo, 100, after)
|
||||
req, err := http.NewRequest("POST",
|
||||
"https://api.github.com/graphql",
|
||||
bytes.NewBuffer([]byte(jsonStr)),
|
||||
)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// https://developer.github.com/v4/previews/#repository-vulnerability-alerts
|
||||
// To toggle this preview and access data, need to provide a custom media type in the Accept header:
|
||||
// MEMO: I tried to get the affected version via GitHub API. Bit it seems difficult to determin the affected version if there are multiple dependency files such as package.json.
|
||||
// TODO remove this header if it is no longer preview status in the future.
|
||||
req.Header.Set("Accept", "application/vnd.github.package-deletes-preview+json")
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := httpClient.Do(req)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
alerts := SecurityAlerts{}
|
||||
if err := json.Unmarshal(body, &alerts); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// util.Log.Debugf("%s", pp.Sprint(alerts))
|
||||
// util.Log.Debugf("%s", string(body))
|
||||
if alerts.Data.Repository.URL == "" {
|
||||
return 0, errof.New(errof.ErrFailedToAccessGithubAPI,
|
||||
fmt.Sprintf("Failed to access to GitHub API. Response: %s", string(body)))
|
||||
}
|
||||
|
||||
for _, v := range alerts.Data.Repository.VulnerabilityAlerts.Edges {
|
||||
if config.Conf.IgnoreGitHubDismissed && v.Node.DismissReason != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
pkgName := fmt.Sprintf("%s %s",
|
||||
alerts.Data.Repository.URL, v.Node.SecurityVulnerability.Package.Name)
|
||||
|
||||
m := models.GitHubSecurityAlert{
|
||||
PackageName: pkgName,
|
||||
FixedIn: v.Node.SecurityVulnerability.FirstPatchedVersion.Identifier,
|
||||
AffectedRange: v.Node.SecurityVulnerability.VulnerableVersionRange,
|
||||
Dismissed: len(v.Node.DismissReason) != 0,
|
||||
DismissedAt: v.Node.DismissedAt,
|
||||
DismissReason: v.Node.DismissReason,
|
||||
}
|
||||
|
||||
cveIDs, other := []string{}, []string{}
|
||||
for _, identifier := range v.Node.SecurityAdvisory.Identifiers {
|
||||
if identifier.Type == "CVE" {
|
||||
cveIDs = append(cveIDs, identifier.Value)
|
||||
} else {
|
||||
other = append(other, identifier.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// If CVE-ID has not been assigned, use the GHSA ID etc as a ID.
|
||||
if len(cveIDs) == 0 {
|
||||
cveIDs = other
|
||||
}
|
||||
|
||||
for _, cveID := range cveIDs {
|
||||
if val, ok := r.ScannedCves[cveID]; ok {
|
||||
val.GitHubSecurityAlerts = val.GitHubSecurityAlerts.Add(m)
|
||||
r.ScannedCves[cveID] = val
|
||||
nCVEs++
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
CveID: cveID,
|
||||
Confidences: models.Confidences{models.GitHubMatch},
|
||||
GitHubSecurityAlerts: models.GitHubSecurityAlerts{m},
|
||||
}
|
||||
r.ScannedCves[cveID] = v
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
if !alerts.Data.Repository.VulnerabilityAlerts.PageInfo.HasNextPage {
|
||||
break
|
||||
}
|
||||
after = fmt.Sprintf(`after: \"%s\"`, alerts.Data.Repository.VulnerabilityAlerts.PageInfo.EndCursor)
|
||||
}
|
||||
return nCVEs, err
|
||||
}
|
||||
|
||||
//SecurityAlerts has detected CVE-IDs, PackageNames, Refs
|
||||
type SecurityAlerts struct {
|
||||
Data struct {
|
||||
Repository struct {
|
||||
URL string `json:"url"`
|
||||
VulnerabilityAlerts struct {
|
||||
PageInfo struct {
|
||||
EndCursor string `json:"endCursor"`
|
||||
HasNextPage bool `json:"hasNextPage"`
|
||||
StartCursor string `json:"startCursor"`
|
||||
} `json:"pageInfo"`
|
||||
Edges []struct {
|
||||
Node struct {
|
||||
ID string `json:"id"`
|
||||
DismissReason string `json:"dismissReason"`
|
||||
DismissedAt time.Time `json:"dismissedAt"`
|
||||
SecurityVulnerability struct {
|
||||
Package struct {
|
||||
Name string `json:"name"`
|
||||
Ecosystem string `json:"ecosystem"`
|
||||
} `json:"package"`
|
||||
Severity string `json:"severity"`
|
||||
VulnerableVersionRange string `json:"vulnerableVersionRange"`
|
||||
FirstPatchedVersion struct {
|
||||
Identifier string `json:"identifier"`
|
||||
} `json:"firstPatchedVersion"`
|
||||
} `json:"securityVulnerability"`
|
||||
SecurityAdvisory struct {
|
||||
Description string `json:"description"`
|
||||
GhsaID string `json:"ghsaId"`
|
||||
Permalink string `json:"permalink"`
|
||||
PublishedAt time.Time `json:"publishedAt"`
|
||||
Summary string `json:"summary"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
WithdrawnAt time.Time `json:"withdrawnAt"`
|
||||
Origin string `json:"origin"`
|
||||
Severity string `json:"severity"`
|
||||
References []struct {
|
||||
URL string `json:"url"`
|
||||
} `json:"references"`
|
||||
Identifiers []struct {
|
||||
Type string `json:"type"`
|
||||
Value string `json:"value"`
|
||||
} `json:"identifiers"`
|
||||
} `json:"securityAdvisory"`
|
||||
} `json:"node"`
|
||||
} `json:"edges"`
|
||||
} `json:"vulnerabilityAlerts"`
|
||||
} `json:"repository"`
|
||||
} `json:"data"`
|
||||
}
|
||||
146
go.mod
146
go.mod
@@ -1,85 +1,75 @@
|
||||
module github.com/future-architect/vuls
|
||||
|
||||
go 1.15
|
||||
go 1.19
|
||||
|
||||
replace (
|
||||
gopkg.in/mattn/go-colorable.v0 => github.com/mattn/go-colorable v0.1.0
|
||||
gopkg.in/mattn/go-isatty.v0 => github.com/mattn/go-isatty v0.0.6
|
||||
require (
|
||||
github.com/MakeNowJust/heredoc v1.0.0
|
||||
github.com/go-redis/redis/v9 v9.0.0-rc.1
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/hashicorp/go-version v1.6.0
|
||||
github.com/knqyf263/go-cpe v0.0.0-20201213041631-54f6ab28673f
|
||||
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
|
||||
github.com/labstack/echo/v4 v4.9.1
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/opencontainers/image-spec v1.1.0-rc2
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/ulikunitz/xz v0.5.10
|
||||
go.etcd.io/bbolt v1.3.6
|
||||
go.uber.org/zap v1.13.0
|
||||
golang.org/x/exp v0.0.0-20221106115401-f9659909a136
|
||||
gorm.io/driver/mysql v1.4.3
|
||||
gorm.io/driver/postgres v1.4.5
|
||||
gorm.io/gorm v1.24.1
|
||||
modernc.org/sqlite v1.19.4
|
||||
oras.land/oras-go/v2 v2.0.0-rc.4
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go v50.0.0+incompatible
|
||||
github.com/Azure/go-autorest/autorest v0.11.16 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.10 // indirect
|
||||
github.com/BurntSushi/toml v0.3.1
|
||||
github.com/aquasecurity/fanal v0.0.0-20210111044704-9cb28297c870
|
||||
github.com/aquasecurity/go-dep-parser v0.0.0-20210113052454-251388ce94e5 // indirect
|
||||
github.com/aquasecurity/trivy v0.15.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20210111152553-7d4d1aa5f0d4
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef
|
||||
github.com/aws/aws-sdk-go v1.36.26
|
||||
github.com/boltdb/bolt v1.3.1
|
||||
github.com/caarlos0/env/v6 v6.4.0 // indirect
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible
|
||||
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
|
||||
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
|
||||
github.com/emersion/go-smtp v0.14.0
|
||||
github.com/go-redis/redis/v8 v8.4.8 // indirect
|
||||
github.com/goccy/go-yaml v1.8.4 // indirect
|
||||
github.com/golang/protobuf v1.4.3 // indirect
|
||||
github.com/google/subcommands v1.2.0
|
||||
github.com/google/wire v0.4.0 // indirect
|
||||
github.com/gosuri/uitable v0.0.4
|
||||
github.com/grokify/html-strip-tags-go v0.0.1 // indirect
|
||||
github.com/hashicorp/go-uuid v1.0.2
|
||||
github.com/hashicorp/go-version v1.2.1
|
||||
github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c
|
||||
github.com/jesseduffield/gocui v0.3.0
|
||||
github.com/k0kubun/pp v3.0.1+incompatible
|
||||
github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f
|
||||
github.com/knqyf263/go-cpe v0.0.0-20201213041631-54f6ab28673f
|
||||
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
|
||||
github.com/knqyf263/go-rpm-version v0.0.0-20170716094938-74609b86c936
|
||||
github.com/knqyf263/gost v0.1.7
|
||||
github.com/kotakanbe/go-cve-dictionary v0.5.6
|
||||
github.com/kotakanbe/go-pingscanner v0.1.0
|
||||
github.com/kotakanbe/goval-dictionary v0.3.0
|
||||
github.com/kotakanbe/logrus-prefixed-formatter v0.0.0-20180123152602-928f7356cb96
|
||||
github.com/magiconair/properties v1.8.4 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.10 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/mitchellh/mapstructure v1.4.1 // indirect
|
||||
github.com/mozqnet/go-exploitdb v0.1.2
|
||||
github.com/nlopes/slack v0.6.0
|
||||
github.com/nsf/termbox-go v0.0.0-20201124104050-ed494de23a00 // indirect
|
||||
github.com/olekukonko/tablewriter v0.0.4
|
||||
github.com/parnurzeal/gorequest v0.2.16
|
||||
github.com/pelletier/go-toml v1.8.1 // indirect
|
||||
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/sirupsen/logrus v1.7.0
|
||||
github.com/spf13/afero v1.5.1
|
||||
github.com/spf13/cast v1.3.1 // indirect
|
||||
github.com/spf13/cobra v1.1.1
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/viper v1.7.1 // indirect
|
||||
github.com/stretchr/testify v1.7.0 // indirect
|
||||
github.com/takuzoo3868/go-msfdb v0.1.3
|
||||
go.uber.org/multierr v1.6.0 // indirect
|
||||
go.uber.org/zap v1.16.0 // indirect
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad
|
||||
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 // indirect
|
||||
golang.org/x/net v0.0.0-20201224014010-6772e930b67b // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20210113205817-d3ed898aa8a3
|
||||
golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78 // indirect
|
||||
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf // indirect
|
||||
golang.org/x/text v0.3.5 // indirect
|
||||
golang.org/x/tools v0.0.0-20201211185031-d93e913c1a58 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
honnef.co/go/tools v0.1.0 // indirect
|
||||
k8s.io/utils v0.0.0-20210111153108-fddb29f9d009
|
||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.4 // indirect
|
||||
github.com/go-sql-driver/mysql v1.6.0 // indirect
|
||||
github.com/gofrs/uuid v4.2.0+incompatible // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.13.0 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.3.1 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||
github.com/jackc/pgtype v1.12.0 // indirect
|
||||
github.com/jackc/pgx/v4 v4.17.2 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/labstack/gommon v0.4.0 // indirect
|
||||
github.com/lib/pq v1.10.6 // indirect
|
||||
github.com/mattn/go-colorable v0.1.11 // indirect
|
||||
github.com/mattn/go-isatty v0.0.16 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasttemplate v1.2.1 // indirect
|
||||
go.uber.org/atomic v1.6.0 // indirect
|
||||
go.uber.org/multierr v1.5.0 // indirect
|
||||
golang.org/x/crypto v0.1.0 // indirect
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de // indirect
|
||||
golang.org/x/mod v0.6.0 // indirect
|
||||
golang.org/x/net v0.1.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.1.0 // indirect
|
||||
golang.org/x/text v0.4.0 // indirect
|
||||
golang.org/x/tools v0.2.0 // indirect
|
||||
lukechampine.com/uint128 v1.2.0 // indirect
|
||||
modernc.org/cc/v3 v3.40.0 // indirect
|
||||
modernc.org/ccgo/v3 v3.16.13 // indirect
|
||||
modernc.org/libc v1.21.4 // indirect
|
||||
modernc.org/mathutil v1.5.0 // indirect
|
||||
modernc.org/memory v1.4.0 // indirect
|
||||
modernc.org/opt v0.1.3 // indirect
|
||||
modernc.org/strutil v1.1.3 // indirect
|
||||
modernc.org/token v1.0.1 // indirect
|
||||
)
|
||||
|
||||
53
gost/base.go
53
gost/base.go
@@ -1,53 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
cnf "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Base is a base struct
|
||||
type Base struct {
|
||||
}
|
||||
|
||||
// FillCVEsWithRedHat fills cve information that has in Gost
|
||||
func (b Base) FillCVEsWithRedHat(driver db.DB, r *models.ScanResult) error {
|
||||
return RedHat{}.fillCvesWithRedHatAPI(driver, r)
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func (b Base) CheckHTTPHealth() error {
|
||||
if !cnf.Conf.Gost.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.Conf.Gost.URL)
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
resp, _, errs = gorequest.New().Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to connect to gost server. url: %s, errs: %w", url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckIfGostFetched checks if oval entries are in DB by family, release.
|
||||
func (b Base) CheckIfGostFetched(driver db.DB, osFamily string) (fetched bool, err error) {
|
||||
//TODO
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// CheckIfGostFresh checks if oval entries are fresh enough
|
||||
func (b Base) CheckIfGostFresh(driver db.DB, osFamily string) (ok bool, err error) {
|
||||
//TODO
|
||||
return true, nil
|
||||
}
|
||||
190
gost/debian.go
190
gost/debian.go
@@ -1,190 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/knqyf263/gost/db"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
)
|
||||
|
||||
// Debian is Gost client for Debian GNU/Linux
|
||||
type Debian struct {
|
||||
Base
|
||||
}
|
||||
|
||||
type packCves struct {
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cves []models.CveContent
|
||||
}
|
||||
|
||||
func (deb Debian) supported(major string) bool {
|
||||
_, ok := map[string]string{
|
||||
"8": "jessie",
|
||||
"9": "stretch",
|
||||
"10": "buster",
|
||||
}[major]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if !deb.supported(major(r.Release)) {
|
||||
// only logging
|
||||
util.Log.Warnf("Debian %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
// Add linux and set the version of running kernel to search OVAL.
|
||||
if r.Container.ContainerID == "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages[linuxImage]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
Name: "linux",
|
||||
Version: r.RunningKernel.Version,
|
||||
NewVersion: newVer,
|
||||
}
|
||||
}
|
||||
|
||||
// Debian Security Tracker does not support Package for Raspbian, so skip it.
|
||||
var scanResult models.ScanResult
|
||||
if r.Family != config.Raspbian {
|
||||
scanResult = *r
|
||||
} else {
|
||||
scanResult = r.RemoveRaspbianPackFromResult()
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if config.Conf.Gost.IsFetchViaHTTP() {
|
||||
url, _ := util.URLPathJoin(config.Conf.Gost.URL, "debian", major(scanResult.Release), "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, url)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for _, res := range responses {
|
||||
debCves := map[string]gostmodels.DebianCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &debCves); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, debcve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&debcve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range scanResult.Packages {
|
||||
cveDebs := driver.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
|
||||
cves := []models.CveContent{}
|
||||
for _, cveDeb := range cveDebs {
|
||||
cves = append(cves, *deb.ConvertToModel(&cveDeb))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range scanResult.SrcPackages {
|
||||
cveDebs := driver.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
|
||||
cves := []models.CveContent{}
|
||||
for _, cveDeb := range cveDebs {
|
||||
cves = append(cves, *deb.ConvertToModel(&cveDeb))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for _, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.DebianSecurityTracker] = cve
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.DebianSecurityTrackerMatch},
|
||||
}
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
if p.isSrcPack {
|
||||
if srcPack, ok := r.SrcPackages[p.packName]; ok {
|
||||
for _, binName := range srcPack.BinaryNames {
|
||||
if _, ok := r.Packages[binName]; ok {
|
||||
names = append(names, binName)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, linuxImage)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
|
||||
severity := ""
|
||||
for _, p := range cve.Package {
|
||||
for _, r := range p.Release {
|
||||
severity = r.Urgency
|
||||
break
|
||||
}
|
||||
}
|
||||
return &models.CveContent{
|
||||
Type: models.DebianSecurityTracker,
|
||||
CveID: cve.CveID,
|
||||
Summary: cve.Description,
|
||||
Cvss2Severity: severity,
|
||||
Cvss3Severity: severity,
|
||||
SourceLink: "https://security-tracker.debian.org/tracker/" + cve.CveID,
|
||||
Optional: map[string]string{
|
||||
"attack range": cve.Scope,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package gost
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestDebian_Supported(t *testing.T) {
|
||||
type fields struct {
|
||||
Base Base
|
||||
}
|
||||
type args struct {
|
||||
major string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "8 is supported",
|
||||
args: args{
|
||||
major: "8",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "9 is supported",
|
||||
args: args{
|
||||
major: "9",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "10 is supported",
|
||||
args: args{
|
||||
major: "10",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "11 is not supported yet",
|
||||
args: args{
|
||||
major: "11",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "empty string is not supported yet",
|
||||
args: args{
|
||||
major: "",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
deb := Debian{}
|
||||
if got := deb.supported(tt.args.major); got != tt.want {
|
||||
t.Errorf("Debian.Supported() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
35
gost/gost.go
35
gost/gost.go
@@ -1,35 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
cnf "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
)
|
||||
|
||||
// Client is the interface of OVAL client.
|
||||
type Client interface {
|
||||
DetectUnfixed(db.DB, *models.ScanResult, bool) (int, error)
|
||||
FillCVEsWithRedHat(db.DB, *models.ScanResult) error
|
||||
|
||||
//TODO implement
|
||||
// CheckHTTPHealth() error
|
||||
// CheckIfGostFetched checks if Gost entries are fetched
|
||||
// CheckIfGostFetched(db.DB, string, string) (bool, error)
|
||||
// CheckIfGostFresh(db.DB, string, string) (bool, error)
|
||||
}
|
||||
|
||||
// NewClient make Client by family
|
||||
func NewClient(family string) Client {
|
||||
switch family {
|
||||
case cnf.RedHat, cnf.CentOS:
|
||||
return RedHat{}
|
||||
case cnf.Debian, cnf.Raspbian:
|
||||
return Debian{}
|
||||
case cnf.Windows:
|
||||
return Microsoft{}
|
||||
default:
|
||||
return Pseudo{}
|
||||
}
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
)
|
||||
|
||||
func TestSetPackageStates(t *testing.T) {
|
||||
var tests = []struct {
|
||||
pkgstats []gostmodels.RedhatPackageState
|
||||
installed models.Packages
|
||||
release string
|
||||
in models.VulnInfo
|
||||
out models.PackageFixStatuses
|
||||
}{
|
||||
|
||||
//0 one
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "Will not fix",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{},
|
||||
out: []models.PackageFixStatus{
|
||||
{
|
||||
Name: "bouncycastle",
|
||||
FixState: "Will not fix",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
//1 two
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "Will not fix",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
{
|
||||
FixState: "Fix deferred",
|
||||
PackageName: "pack_a",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
// ignore not-installed-package
|
||||
{
|
||||
FixState: "Fix deferred",
|
||||
PackageName: "pack_b",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
"pack_a": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{},
|
||||
out: []models.PackageFixStatus{
|
||||
{
|
||||
Name: "bouncycastle",
|
||||
FixState: "Will not fix",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
{
|
||||
Name: "pack_a",
|
||||
FixState: "Fix deferred",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
//2 ignore affected
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "affected",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:7",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
out: models.PackageFixStatuses{},
|
||||
},
|
||||
|
||||
//3 look only the same os release.
|
||||
{
|
||||
pkgstats: []gostmodels.RedhatPackageState{
|
||||
{
|
||||
FixState: "Will not fix",
|
||||
PackageName: "bouncycastle",
|
||||
Cpe: "cpe:/o:redhat:enterprise_linux:6",
|
||||
},
|
||||
},
|
||||
installed: models.Packages{
|
||||
"bouncycastle": models.Package{},
|
||||
},
|
||||
release: "7",
|
||||
in: models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
out: models.PackageFixStatuses{},
|
||||
},
|
||||
}
|
||||
|
||||
r := RedHat{}
|
||||
for i, tt := range tests {
|
||||
out := r.mergePackageStates(tt.in, tt.pkgstats, tt.installed, tt.release)
|
||||
if ok := reflect.DeepEqual(tt.out, out); !ok {
|
||||
t.Errorf("[%d]\nexpected: %v:%T\n actual: %v:%T\n", i, tt.out, tt.out, out, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
)
|
||||
|
||||
// Microsoft is Gost client for windows
|
||||
type Microsoft struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (ms Microsoft) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
cveIDs := []string{}
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
for cveID, msCve := range driver.GetMicrosoftMulti(cveIDs) {
|
||||
if _, ok := r.ScannedCves[cveID]; !ok {
|
||||
continue
|
||||
}
|
||||
cveCont, mitigations := ms.ConvertToModel(&msCve)
|
||||
v, _ := r.ScannedCves[cveID]
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.CveContents{}
|
||||
}
|
||||
v.CveContents[models.Microsoft] = *cveCont
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveID] = v
|
||||
}
|
||||
return len(cveIDs), nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ms Microsoft) ConvertToModel(cve *gostmodels.MicrosoftCVE) (*models.CveContent, []models.Mitigation) {
|
||||
v3score := 0.0
|
||||
var v3Vector string
|
||||
for _, scoreSet := range cve.ScoreSets {
|
||||
if v3score < scoreSet.BaseScore {
|
||||
v3score = scoreSet.BaseScore
|
||||
v3Vector = scoreSet.Vector
|
||||
}
|
||||
}
|
||||
|
||||
var v3Severity string
|
||||
for _, s := range cve.Severity {
|
||||
v3Severity = s.Description
|
||||
}
|
||||
|
||||
var refs []models.Reference
|
||||
for _, r := range cve.References {
|
||||
if r.AttrType == "External" {
|
||||
refs = append(refs, models.Reference{Link: r.URL})
|
||||
}
|
||||
}
|
||||
|
||||
var cwe []string
|
||||
if 0 < len(cve.CWE) {
|
||||
cwe = []string{cve.CWE}
|
||||
}
|
||||
|
||||
option := map[string]string{}
|
||||
if 0 < len(cve.ExploitStatus) {
|
||||
option["exploit"] = cve.ExploitStatus
|
||||
}
|
||||
if 0 < len(cve.Workaround) {
|
||||
option["workaround"] = cve.Workaround
|
||||
}
|
||||
kbids := []string{}
|
||||
for _, kbid := range cve.KBIDs {
|
||||
kbids = append(kbids, kbid.KBID)
|
||||
}
|
||||
if 0 < len(kbids) {
|
||||
option["kbids"] = strings.Join(kbids, ",")
|
||||
}
|
||||
|
||||
vendorURL := "https://msrc.microsoft.com/update-guide/vulnerability/" + cve.CveID
|
||||
mitigations := []models.Mitigation{}
|
||||
if cve.Mitigation != "" {
|
||||
mitigations = []models.Mitigation{
|
||||
{
|
||||
CveContentType: models.Microsoft,
|
||||
Mitigation: cve.Mitigation,
|
||||
URL: vendorURL,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.Microsoft,
|
||||
CveID: cve.CveID,
|
||||
Title: cve.Title,
|
||||
Summary: cve.Description,
|
||||
Cvss3Score: v3score,
|
||||
Cvss3Vector: v3Vector,
|
||||
Cvss3Severity: v3Severity,
|
||||
References: refs,
|
||||
CweIDs: cwe,
|
||||
Published: cve.PublishDate,
|
||||
LastModified: cve.LastUpdateDate,
|
||||
SourceLink: vendorURL,
|
||||
Optional: option,
|
||||
}, mitigations
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
)
|
||||
|
||||
// Pseudo is Gost client except for RedHat family and Debian
|
||||
type Pseudo struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (pse Pseudo) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
259
gost/redhat.go
259
gost/redhat.go
@@ -1,259 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/knqyf263/gost/db"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
)
|
||||
|
||||
// RedHat is Gost client for RedHat family linux
|
||||
type RedHat struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (red RedHat) DetectUnfixed(driver db.DB, r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
return red.detectUnfixed(driver, r, ignoreWillNotFix)
|
||||
}
|
||||
|
||||
func (red RedHat) detectUnfixed(driver db.DB, r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
if config.Conf.Gost.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(config.Conf.Gost.URL,
|
||||
"redhat", major(r.Release), "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
// CVE-ID: RedhatCVE
|
||||
cves := map[string]gostmodels.RedhatCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &cves); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, cve := range cves {
|
||||
if newly := red.setUnfixedCveToScanResult(&cve, r); newly {
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range r.Packages {
|
||||
// CVE-ID: RedhatCVE
|
||||
cves := driver.GetUnfixedCvesRedhat(major(r.Release), pack.Name, ignoreWillNotFix)
|
||||
for _, cve := range cves {
|
||||
if newly := red.setUnfixedCveToScanResult(&cve, r); newly {
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
func (red RedHat) fillCvesWithRedHatAPI(driver db.DB, r *models.ScanResult) error {
|
||||
cveIDs := []string{}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if _, ok := vuln.CveContents[models.RedHatAPI]; ok {
|
||||
continue
|
||||
}
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
|
||||
if config.Conf.Gost.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(config.Conf.Gost.URL,
|
||||
"redhat", "cves")
|
||||
responses, err := getCvesViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, res := range responses {
|
||||
redCve := gostmodels.RedhatCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &redCve); err != nil {
|
||||
return err
|
||||
}
|
||||
if redCve.ID == 0 {
|
||||
continue
|
||||
}
|
||||
red.setFixedCveToScanResult(&redCve, r)
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
return nil
|
||||
}
|
||||
for _, redCve := range driver.GetRedhatMulti(cveIDs) {
|
||||
if len(redCve.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
red.setFixedCveToScanResult(&redCve, r)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (red RedHat) setFixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models.ScanResult) {
|
||||
cveCont, mitigations := red.ConvertToModel(cve)
|
||||
v, ok := r.ScannedCves[cveCont.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = *cveCont
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cveCont.CveID,
|
||||
CveContents: models.NewCveContents(*cveCont),
|
||||
Confidences: models.Confidences{models.RedHatAPIMatch},
|
||||
}
|
||||
}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveCont.CveID] = v
|
||||
}
|
||||
|
||||
func (red RedHat) setUnfixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models.ScanResult) (newly bool) {
|
||||
cveCont, mitigations := red.ConvertToModel(cve)
|
||||
v, ok := r.ScannedCves[cve.Name]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = *cveCont
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cveCont.CveID,
|
||||
CveContents: models.NewCveContents(*cveCont),
|
||||
Confidences: models.Confidences{models.RedHatAPIMatch},
|
||||
}
|
||||
newly = true
|
||||
}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
pkgStats := red.mergePackageStates(v,
|
||||
cve.PackageState, r.Packages, r.Release)
|
||||
if 0 < len(pkgStats) {
|
||||
v.AffectedPackages = pkgStats
|
||||
r.ScannedCves[cve.Name] = v
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (red RedHat) mergePackageStates(v models.VulnInfo, ps []gostmodels.RedhatPackageState, installed models.Packages, release string) (pkgStats models.PackageFixStatuses) {
|
||||
pkgStats = v.AffectedPackages
|
||||
for _, pstate := range ps {
|
||||
if pstate.Cpe !=
|
||||
"cpe:/o:redhat:enterprise_linux:"+major(release) {
|
||||
return
|
||||
}
|
||||
|
||||
if !(pstate.FixState == "Will not fix" ||
|
||||
pstate.FixState == "Fix deferred" ||
|
||||
pstate.FixState == "Affected") {
|
||||
return
|
||||
}
|
||||
|
||||
if _, ok := installed[pstate.PackageName]; !ok {
|
||||
return
|
||||
}
|
||||
|
||||
notFixedYet := false
|
||||
switch pstate.FixState {
|
||||
case "Will not fix", "Fix deferred", "Affected":
|
||||
notFixedYet = true
|
||||
}
|
||||
|
||||
pkgStats = pkgStats.Store(models.PackageFixStatus{
|
||||
Name: pstate.PackageName,
|
||||
FixState: pstate.FixState,
|
||||
NotFixedYet: notFixedYet,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (red RedHat) parseCwe(str string) (cwes []string) {
|
||||
if str != "" {
|
||||
s := strings.Replace(str, "(", "|", -1)
|
||||
s = strings.Replace(s, ")", "|", -1)
|
||||
s = strings.Replace(s, "->", "|", -1)
|
||||
for _, s := range strings.Split(s, "|") {
|
||||
if s != "" {
|
||||
cwes = append(cwes, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (red RedHat) ConvertToModel(cve *gostmodels.RedhatCVE) (*models.CveContent, []models.Mitigation) {
|
||||
cwes := red.parseCwe(cve.Cwe)
|
||||
|
||||
details := []string{}
|
||||
for _, detail := range cve.Details {
|
||||
details = append(details, detail.Detail)
|
||||
}
|
||||
|
||||
v2score := 0.0
|
||||
if cve.Cvss.CvssBaseScore != "" {
|
||||
v2score, _ = strconv.ParseFloat(cve.Cvss.CvssBaseScore, 64)
|
||||
}
|
||||
v2severity := ""
|
||||
if v2score != 0 {
|
||||
v2severity = cve.ThreatSeverity
|
||||
}
|
||||
|
||||
v3score := 0.0
|
||||
if cve.Cvss3.Cvss3BaseScore != "" {
|
||||
v3score, _ = strconv.ParseFloat(cve.Cvss3.Cvss3BaseScore, 64)
|
||||
}
|
||||
v3severity := ""
|
||||
if v3score != 0 {
|
||||
v3severity = cve.ThreatSeverity
|
||||
}
|
||||
|
||||
refs := []models.Reference{}
|
||||
for _, r := range cve.References {
|
||||
refs = append(refs, models.Reference{Link: r.Reference})
|
||||
}
|
||||
|
||||
vendorURL := "https://access.redhat.com/security/cve/" + cve.Name
|
||||
mitigations := []models.Mitigation{}
|
||||
if cve.Mitigation != "" {
|
||||
mitigations = []models.Mitigation{
|
||||
{
|
||||
CveContentType: models.RedHatAPI,
|
||||
Mitigation: cve.Mitigation,
|
||||
URL: vendorURL,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.RedHatAPI,
|
||||
CveID: cve.Name,
|
||||
Title: cve.Bugzilla.Description,
|
||||
Summary: strings.Join(details, "\n"),
|
||||
Cvss2Score: v2score,
|
||||
Cvss2Vector: cve.Cvss.CvssScoringVector,
|
||||
Cvss2Severity: v2severity,
|
||||
Cvss3Score: v3score,
|
||||
Cvss3Vector: cve.Cvss3.Cvss3ScoringVector,
|
||||
Cvss3Severity: v3severity,
|
||||
References: refs,
|
||||
CweIDs: cwes,
|
||||
Published: cve.PublicDate,
|
||||
SourceLink: vendorURL,
|
||||
}, mitigations
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseCwe(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in string
|
||||
out []string
|
||||
}{
|
||||
{
|
||||
in: "CWE-665->(CWE-200|CWE-89)",
|
||||
out: []string{"CWE-665", "CWE-200", "CWE-89"},
|
||||
},
|
||||
{
|
||||
in: "CWE-841->CWE-770->CWE-454",
|
||||
out: []string{"CWE-841", "CWE-770", "CWE-454"},
|
||||
},
|
||||
{
|
||||
in: "(CWE-122|CWE-125)",
|
||||
out: []string{"CWE-122", "CWE-125"},
|
||||
},
|
||||
}
|
||||
|
||||
r := RedHat{}
|
||||
for i, tt := range tests {
|
||||
out := r.parseCwe(tt.in)
|
||||
sort.Strings(out)
|
||||
sort.Strings(tt.out)
|
||||
if !reflect.DeepEqual(tt.out, out) {
|
||||
t.Errorf("[%d]expected: %s, actual: %s", i, tt.out, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
188
gost/util.go
188
gost/util.go
@@ -1,188 +0,0 @@
|
||||
package gost
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type response struct {
|
||||
request request
|
||||
json string
|
||||
}
|
||||
|
||||
func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- request{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
util.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type request struct {
|
||||
osMajorVersion string
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cveID string
|
||||
}
|
||||
|
||||
func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
|
||||
nReq := len(r.Packages) + len(r.SrcPackages)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, pack := range r.Packages {
|
||||
reqChan <- request{
|
||||
osMajorVersion: major(r.Release),
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
}
|
||||
}
|
||||
for _, pack := range r.SrcPackages {
|
||||
reqChan <- request{
|
||||
osMajorVersion: major(r.Release),
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.packName,
|
||||
"unfixed-cves",
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
util.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func httpGet(url string, req request, resChan chan<- response, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %w", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
util.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %s", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- response{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func major(osVer string) (majorVersion string) {
|
||||
return strings.Split(osVer, ".")[0]
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 297 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 36 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 198 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 19 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 8.8 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 56 KiB |
@@ -1,112 +0,0 @@
|
||||
package libmanager
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
db2 "github.com/aquasecurity/trivy-db/pkg/db"
|
||||
"github.com/aquasecurity/trivy/pkg/db"
|
||||
"github.com/aquasecurity/trivy/pkg/github"
|
||||
"github.com/aquasecurity/trivy/pkg/indicator"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/spf13/afero"
|
||||
"golang.org/x/xerrors"
|
||||
"k8s.io/utils/clock"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
)
|
||||
|
||||
// DetectLibsCves fills LibraryScanner information
|
||||
func DetectLibsCves(r *models.ScanResult) (err error) {
|
||||
totalCnt := 0
|
||||
if len(r.LibraryScanners) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// initialize trivy's logger and db
|
||||
err = log.InitLogger(false, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
util.Log.Info("Updating library db...")
|
||||
if err := downloadDB(config.Version, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress, false, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := db2.Init(config.Conf.TrivyCacheDBDir); err != nil {
|
||||
return err
|
||||
}
|
||||
defer db2.Close()
|
||||
|
||||
for _, lib := range r.LibraryScanners {
|
||||
vinfos, err := lib.Scan()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, vinfo := range vinfos {
|
||||
vinfo.Confidences.AppendIfMissing(models.TrivyMatch)
|
||||
if v, ok := r.ScannedCves[vinfo.CveID]; !ok {
|
||||
r.ScannedCves[vinfo.CveID] = vinfo
|
||||
} else {
|
||||
v.LibraryFixedIns = append(v.LibraryFixedIns, vinfo.LibraryFixedIns...)
|
||||
r.ScannedCves[vinfo.CveID] = v
|
||||
}
|
||||
}
|
||||
totalCnt += len(vinfos)
|
||||
}
|
||||
|
||||
util.Log.Infof("%s: %d CVEs are detected with Library",
|
||||
r.FormatServerName(), totalCnt)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadDB(appVersion, cacheDir string, quiet, light, skipUpdate bool) error {
|
||||
client := initializeDBClient(cacheDir, quiet)
|
||||
ctx := context.Background()
|
||||
needsUpdate, err := client.NeedsUpdate(appVersion, light, skipUpdate)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("database error: %w", err)
|
||||
}
|
||||
|
||||
if needsUpdate {
|
||||
util.Log.Info("Need to update DB")
|
||||
util.Log.Info("Downloading DB...")
|
||||
if err := client.Download(ctx, cacheDir, light); err != nil {
|
||||
return xerrors.Errorf("failed to download vulnerability DB: %w", err)
|
||||
}
|
||||
if err = client.UpdateMetadata(cacheDir); err != nil {
|
||||
return xerrors.Errorf("unable to update database metadata: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// for debug
|
||||
if err := showDBInfo(cacheDir); err != nil {
|
||||
return xerrors.Errorf("failed to show database info: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func initializeDBClient(cacheDir string, quiet bool) db.Client {
|
||||
config := db2.Config{}
|
||||
client := github.NewClient()
|
||||
progressBar := indicator.NewProgressBar(quiet)
|
||||
realClock := clock.RealClock{}
|
||||
fs := afero.NewOsFs()
|
||||
metadata := db.NewMetadata(fs, cacheDir)
|
||||
dbClient := db.NewClient(config, client, progressBar, realClock, metadata)
|
||||
return dbClient
|
||||
}
|
||||
|
||||
func showDBInfo(cacheDir string) error {
|
||||
m := db.NewMetadata(afero.NewOsFs(), cacheDir)
|
||||
metadata, err := m.Get()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("something wrong with DB: %w", err)
|
||||
}
|
||||
util.Log.Debugf("DB Schema: %d, Type: %d, UpdatedAt: %s, NextUpdate: %s",
|
||||
metadata.Version, metadata.Type, metadata.UpdatedAt, metadata.NextUpdate)
|
||||
return nil
|
||||
}
|
||||
@@ -1,354 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability"
|
||||
)
|
||||
|
||||
// CveContents has CveContent
|
||||
type CveContents map[CveContentType]CveContent
|
||||
|
||||
// NewCveContents create CveContents
|
||||
func NewCveContents(conts ...CveContent) CveContents {
|
||||
m := CveContents{}
|
||||
for _, cont := range conts {
|
||||
m[cont.Type] = cont
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// CveContentStr has CveContentType and Value
|
||||
type CveContentStr struct {
|
||||
Type CveContentType
|
||||
Value string
|
||||
}
|
||||
|
||||
// Except returns CveContents except given keys for enumeration
|
||||
func (v CveContents) Except(exceptCtypes ...CveContentType) (values CveContents) {
|
||||
values = CveContents{}
|
||||
for ctype, content := range v {
|
||||
found := false
|
||||
for _, exceptCtype := range exceptCtypes {
|
||||
if ctype == exceptCtype {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
values[ctype] = content
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// PrimarySrcURLs returns link of source
|
||||
func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveContentStr) {
|
||||
if cveID == "" {
|
||||
return
|
||||
}
|
||||
|
||||
if cont, found := v[Nvd]; found {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Vendor Advisory" {
|
||||
values = append(values, CveContentStr{Nvd, r.Link})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
order := CveContentTypes{Nvd, NewCveContentType(myFamily)}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found {
|
||||
if cont.SourceLink == "" {
|
||||
continue
|
||||
}
|
||||
values = append(values, CveContentStr{ctype, cont.SourceLink})
|
||||
}
|
||||
}
|
||||
|
||||
if lang == "ja" {
|
||||
if cont, found := v[Jvn]; found && 0 < len(cont.SourceLink) {
|
||||
values = append(values, CveContentStr{Jvn, cont.SourceLink})
|
||||
}
|
||||
}
|
||||
|
||||
if len(values) == 0 {
|
||||
return []CveContentStr{{
|
||||
Type: Nvd,
|
||||
Value: "https://nvd.nist.gov/vuln/detail/" + cveID,
|
||||
}}
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
// PatchURLs returns link of patch
|
||||
func (v CveContents) PatchURLs() (urls []string) {
|
||||
cont, found := v[Nvd]
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Patch" {
|
||||
urls = append(urls, r.Link)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
/*
|
||||
// Severities returns Severities
|
||||
func (v CveContents) Severities(myFamily string) (values []CveContentStr) {
|
||||
order := CveContentTypes{NVD, NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(append(order)...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.Severity) {
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: cont.Severity,
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
*/
|
||||
|
||||
// CveContentCpes has CveContentType and Value
|
||||
type CveContentCpes struct {
|
||||
Type CveContentType
|
||||
Value []Cpe
|
||||
}
|
||||
|
||||
// Cpes returns affected CPEs of this Vulnerability
|
||||
func (v CveContents) Cpes(myFamily string) (values []CveContentCpes) {
|
||||
order := CveContentTypes{NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.Cpes) {
|
||||
values = append(values, CveContentCpes{
|
||||
Type: ctype,
|
||||
Value: cont.Cpes,
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// CveContentRefs has CveContentType and Cpes
|
||||
type CveContentRefs struct {
|
||||
Type CveContentType
|
||||
Value []Reference
|
||||
}
|
||||
|
||||
// References returns References
|
||||
func (v CveContents) References(myFamily string) (values []CveContentRefs) {
|
||||
order := CveContentTypes{NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.References) {
|
||||
values = append(values, CveContentRefs{
|
||||
Type: ctype,
|
||||
Value: cont.References,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// CweIDs returns related CweIDs of the vulnerability
|
||||
func (v CveContents) CweIDs(myFamily string) (values []CveContentStr) {
|
||||
order := CveContentTypes{NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.CweIDs) {
|
||||
for _, cweID := range cont.CweIDs {
|
||||
for _, val := range values {
|
||||
if val.Value == cweID {
|
||||
continue
|
||||
}
|
||||
}
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: cweID,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// UniqCweIDs returns Uniq CweIDs
|
||||
func (v CveContents) UniqCweIDs(myFamily string) (values []CveContentStr) {
|
||||
uniq := map[string]CveContentStr{}
|
||||
for _, cwes := range v.CweIDs(myFamily) {
|
||||
uniq[cwes.Value] = cwes
|
||||
}
|
||||
for _, cwe := range uniq {
|
||||
values = append(values, cwe)
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
// CveContent has abstraction of various vulnerability information
|
||||
type CveContent struct {
|
||||
Type CveContentType `json:"type"`
|
||||
CveID string `json:"cveID"`
|
||||
Title string `json:"title"`
|
||||
Summary string `json:"summary"`
|
||||
Cvss2Score float64 `json:"cvss2Score"`
|
||||
Cvss2Vector string `json:"cvss2Vector"`
|
||||
Cvss2Severity string `json:"cvss2Severity"`
|
||||
Cvss3Score float64 `json:"cvss3Score"`
|
||||
Cvss3Vector string `json:"cvss3Vector"`
|
||||
Cvss3Severity string `json:"cvss3Severity"`
|
||||
SourceLink string `json:"sourceLink"`
|
||||
Cpes []Cpe `json:"cpes,omitempty"`
|
||||
References References `json:"references,omitempty"`
|
||||
CweIDs []string `json:"cweIDs,omitempty"`
|
||||
Published time.Time `json:"published"`
|
||||
LastModified time.Time `json:"lastModified"`
|
||||
Optional map[string]string `json:"optional,omitempty"`
|
||||
}
|
||||
|
||||
// Empty checks the content is empty
|
||||
func (c CveContent) Empty() bool {
|
||||
return c.Summary == ""
|
||||
}
|
||||
|
||||
// CveContentType is a source of CVE information
|
||||
type CveContentType string
|
||||
|
||||
// NewCveContentType create CveContentType
|
||||
func NewCveContentType(name string) CveContentType {
|
||||
switch name {
|
||||
case "nvd":
|
||||
return Nvd
|
||||
case "jvn":
|
||||
return Jvn
|
||||
case "redhat", "centos":
|
||||
return RedHat
|
||||
case "oracle":
|
||||
return Oracle
|
||||
case "ubuntu":
|
||||
return Ubuntu
|
||||
case "debian", vulnerability.DebianOVAL:
|
||||
return Debian
|
||||
case "redhat_api":
|
||||
return RedHatAPI
|
||||
case "debian_security_tracker":
|
||||
return DebianSecurityTracker
|
||||
case "microsoft":
|
||||
return Microsoft
|
||||
case "wordpress":
|
||||
return WpScan
|
||||
case "amazon":
|
||||
return Amazon
|
||||
case "trivy":
|
||||
return Trivy
|
||||
default:
|
||||
return Unknown
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
// Nvd is Nvd JSON
|
||||
Nvd CveContentType = "nvd"
|
||||
|
||||
// Jvn is Jvn
|
||||
Jvn CveContentType = "jvn"
|
||||
|
||||
// RedHat is RedHat
|
||||
RedHat CveContentType = "redhat"
|
||||
|
||||
// RedHatAPI is RedHat
|
||||
RedHatAPI CveContentType = "redhat_api"
|
||||
|
||||
// DebianSecurityTracker is Debian Security tracker
|
||||
DebianSecurityTracker CveContentType = "debian_security_tracker"
|
||||
|
||||
// Debian is Debian
|
||||
Debian CveContentType = "debian"
|
||||
|
||||
// Ubuntu is Ubuntu
|
||||
Ubuntu CveContentType = "ubuntu"
|
||||
|
||||
// Oracle is Oracle Linux
|
||||
Oracle CveContentType = "oracle"
|
||||
|
||||
// Amazon is Amazon Linux
|
||||
Amazon CveContentType = "amazon"
|
||||
|
||||
// SUSE is SUSE Linux
|
||||
SUSE CveContentType = "suse"
|
||||
|
||||
// Microsoft is Microsoft
|
||||
Microsoft CveContentType = "microsoft"
|
||||
|
||||
// WpScan is WordPress
|
||||
WpScan CveContentType = "wpscan"
|
||||
|
||||
// Trivy is Trivy
|
||||
Trivy CveContentType = "trivy"
|
||||
|
||||
// Unknown is Unknown
|
||||
Unknown CveContentType = "unknown"
|
||||
)
|
||||
|
||||
// CveContentTypes has slide of CveContentType
|
||||
type CveContentTypes []CveContentType
|
||||
|
||||
// AllCveContetTypes has all of CveContentTypes
|
||||
var AllCveContetTypes = CveContentTypes{
|
||||
Nvd,
|
||||
Jvn,
|
||||
RedHat,
|
||||
RedHatAPI,
|
||||
Debian,
|
||||
Ubuntu,
|
||||
Amazon,
|
||||
SUSE,
|
||||
DebianSecurityTracker,
|
||||
WpScan,
|
||||
Trivy,
|
||||
}
|
||||
|
||||
// Except returns CveContentTypes except for given args
|
||||
func (c CveContentTypes) Except(excepts ...CveContentType) (excepted CveContentTypes) {
|
||||
for _, ctype := range c {
|
||||
found := false
|
||||
for _, except := range excepts {
|
||||
if ctype == except {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
excepted = append(excepted, ctype)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Cpe is Common Platform Enumeration
|
||||
type Cpe struct {
|
||||
URI string `json:"uri"`
|
||||
FormattedString string `json:"formattedString"`
|
||||
}
|
||||
|
||||
// References is a slice of Reference
|
||||
type References []Reference
|
||||
|
||||
// Reference has a related link of the CVE
|
||||
type Reference struct {
|
||||
Link string `json:"link,omitempty"`
|
||||
Source string `json:"source,omitempty"`
|
||||
RefID string `json:"refID,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestExcept(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in CveContents
|
||||
out CveContents
|
||||
}{{
|
||||
in: CveContents{
|
||||
RedHat: {Type: RedHat},
|
||||
Ubuntu: {Type: Ubuntu},
|
||||
Debian: {Type: Debian},
|
||||
},
|
||||
out: CveContents{
|
||||
RedHat: {Type: RedHat},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
actual := tt.in.Except(Ubuntu, Debian)
|
||||
if !reflect.DeepEqual(tt.out, actual) {
|
||||
t.Errorf("\nexpected: %v\n actual: %v\n", tt.out, actual)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSourceLinks(t *testing.T) {
|
||||
type in struct {
|
||||
lang string
|
||||
cveID string
|
||||
cont CveContents
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
out []CveContentStr
|
||||
}{
|
||||
// lang: ja
|
||||
{
|
||||
in: in{
|
||||
lang: "ja",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: {
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
RedHat: {
|
||||
Type: RedHat,
|
||||
SourceLink: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
Nvd: {
|
||||
Type: Nvd,
|
||||
References: []Reference{
|
||||
{
|
||||
Link: "https://lists.apache.org/thread.html/765be3606d865de513f6df9288842c3cf58b09a987c617a535f2b99d@%3Cusers.tapestry.apache.org%3E",
|
||||
Source: "",
|
||||
RefID: "",
|
||||
Tags: []string{"Vendor Advisory"},
|
||||
},
|
||||
{
|
||||
Link: "http://yahoo.com",
|
||||
Source: "",
|
||||
RefID: "",
|
||||
Tags: []string{"Vendor"},
|
||||
},
|
||||
},
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/CVE-2017-6074",
|
||||
},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
{
|
||||
Type: Nvd,
|
||||
Value: "https://lists.apache.org/thread.html/765be3606d865de513f6df9288842c3cf58b09a987c617a535f2b99d@%3Cusers.tapestry.apache.org%3E",
|
||||
},
|
||||
{
|
||||
Type: Nvd,
|
||||
Value: "https://nvd.nist.gov/vuln/detail/CVE-2017-6074",
|
||||
},
|
||||
{
|
||||
Type: RedHat,
|
||||
Value: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
{
|
||||
Type: Jvn,
|
||||
Value: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
},
|
||||
},
|
||||
// lang: en
|
||||
{
|
||||
in: in{
|
||||
lang: "en",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: {
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
RedHat: {
|
||||
Type: RedHat,
|
||||
SourceLink: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
{
|
||||
Type: RedHat,
|
||||
Value: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
},
|
||||
},
|
||||
// lang: empty
|
||||
{
|
||||
in: in{
|
||||
lang: "en",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
{
|
||||
Type: Nvd,
|
||||
Value: "https://nvd.nist.gov/vuln/detail/CVE-2017-6074",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for i, tt := range tests {
|
||||
actual := tt.in.cont.PrimarySrcURLs(tt.in.lang, "redhat", tt.in.cveID)
|
||||
if !reflect.DeepEqual(tt.out, actual) {
|
||||
t.Errorf("\n[%d] expected: %v\n actual: %v\n", i, tt.out, actual)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/trivy-db/pkg/db"
|
||||
trivyDBTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
"github.com/aquasecurity/trivy/pkg/detector/library"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"golang.org/x/xerrors"
|
||||
// "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
|
||||
// LibraryScanners is an array of LibraryScanner
|
||||
type LibraryScanners []LibraryScanner
|
||||
|
||||
// Find : find by name
|
||||
func (lss LibraryScanners) Find(path, name string) map[string]types.Library {
|
||||
filtered := map[string]types.Library{}
|
||||
for _, ls := range lss {
|
||||
for _, lib := range ls.Libs {
|
||||
if ls.Path == path && lib.Name == name {
|
||||
filtered[ls.Path] = lib
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
// Total returns total count of pkgs
|
||||
func (lss LibraryScanners) Total() (total int) {
|
||||
for _, lib := range lss {
|
||||
total += len(lib.Libs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// LibraryScanner has libraries information
|
||||
type LibraryScanner struct {
|
||||
Path string
|
||||
Libs []types.Library
|
||||
}
|
||||
|
||||
// Scan : scan target library
|
||||
func (s LibraryScanner) Scan() ([]VulnInfo, error) {
|
||||
scanner, err := library.DriverFactory{}.NewDriver(filepath.Base(string(s.Path)))
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to new a library driver: %w", err)
|
||||
}
|
||||
var vulnerabilities = []VulnInfo{}
|
||||
for _, pkg := range s.Libs {
|
||||
tvulns, err := scanner.Detect(pkg.Name, pkg.Version)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to detect %s vulnerabilities: %w", scanner.Type(), err)
|
||||
}
|
||||
if len(tvulns) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
vulns := s.convertFanalToVuln(tvulns)
|
||||
vulnerabilities = append(vulnerabilities, vulns...)
|
||||
}
|
||||
|
||||
return vulnerabilities, nil
|
||||
}
|
||||
|
||||
func (s LibraryScanner) convertFanalToVuln(tvulns []types.DetectedVulnerability) (vulns []VulnInfo) {
|
||||
for _, tvuln := range tvulns {
|
||||
vinfo, err := s.getVulnDetail(tvuln)
|
||||
if err != nil {
|
||||
util.Log.Debugf("failed to getVulnDetail. err: %s, tvuln: %#v", err, tvuln)
|
||||
continue
|
||||
}
|
||||
vulns = append(vulns, vinfo)
|
||||
}
|
||||
return vulns
|
||||
}
|
||||
|
||||
func (s LibraryScanner) getVulnDetail(tvuln types.DetectedVulnerability) (vinfo VulnInfo, err error) {
|
||||
vul, err := db.Config{}.GetVulnerability(tvuln.VulnerabilityID)
|
||||
if err != nil {
|
||||
return vinfo, err
|
||||
}
|
||||
|
||||
vinfo.CveID = tvuln.VulnerabilityID
|
||||
vinfo.CveContents = getCveContents(tvuln.VulnerabilityID, vul)
|
||||
if tvuln.FixedVersion != "" {
|
||||
vinfo.LibraryFixedIns = []LibraryFixedIn{
|
||||
{
|
||||
Key: s.GetLibraryKey(),
|
||||
Name: tvuln.PkgName,
|
||||
FixedIn: tvuln.FixedVersion,
|
||||
Path: s.Path,
|
||||
},
|
||||
}
|
||||
}
|
||||
return vinfo, nil
|
||||
}
|
||||
|
||||
func getCveContents(cveID string, vul trivyDBTypes.Vulnerability) (contents map[CveContentType]CveContent) {
|
||||
contents = map[CveContentType]CveContent{}
|
||||
refs := []Reference{}
|
||||
for _, refURL := range vul.References {
|
||||
refs = append(refs, Reference{Source: "trivy", Link: refURL})
|
||||
}
|
||||
|
||||
content := CveContent{
|
||||
Type: Trivy,
|
||||
CveID: cveID,
|
||||
Title: vul.Title,
|
||||
Summary: vul.Description,
|
||||
Cvss3Severity: string(vul.Severity),
|
||||
References: refs,
|
||||
}
|
||||
contents[Trivy] = content
|
||||
return contents
|
||||
}
|
||||
|
||||
// LibraryMap is filename and library type
|
||||
var LibraryMap = map[string]string{
|
||||
"package-lock.json": "node",
|
||||
"yarn.lock": "node",
|
||||
"Gemfile.lock": "ruby",
|
||||
"Cargo.lock": "rust",
|
||||
"composer.lock": "php",
|
||||
"Pipfile.lock": "python",
|
||||
"poetry.lock": "python",
|
||||
}
|
||||
|
||||
// GetLibraryKey returns target library key
|
||||
func (s LibraryScanner) GetLibraryKey() string {
|
||||
fileName := filepath.Base(s.Path)
|
||||
return LibraryMap[fileName]
|
||||
}
|
||||
|
||||
// LibraryFixedIn has library fixed information
|
||||
type LibraryFixedIn struct {
|
||||
Key string `json:"key,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
FixedIn string `json:"fixedIn,omitempty"`
|
||||
Path string `json:"path,omitempty"`
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
func TestLibraryScanners_Find(t *testing.T) {
|
||||
type args struct {
|
||||
path string
|
||||
name string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
lss LibraryScanners
|
||||
args args
|
||||
want map[string]types.Library
|
||||
}{
|
||||
{
|
||||
name: "single file",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libA"},
|
||||
want: map[string]types.Library{
|
||||
"/pathA": {
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multi file",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "/pathB",
|
||||
Libs: []types.Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.5",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libA"},
|
||||
want: map[string]types.Library{
|
||||
"/pathA": {
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "miss",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libB"},
|
||||
want: map[string]types.Library{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.lss.Find(tt.args.path, tt.args.name); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("LibraryScanners.Find() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
package models
|
||||
|
||||
// JSONVersion is JSON Version
|
||||
const JSONVersion = 4
|
||||
@@ -1,290 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Packages is Map of Package
|
||||
// { "package-name": Package }
|
||||
type Packages map[string]Package
|
||||
|
||||
// NewPackages create Packages
|
||||
func NewPackages(packs ...Package) Packages {
|
||||
m := Packages{}
|
||||
for _, pack := range packs {
|
||||
m[pack.Name] = pack
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// MergeNewVersion merges candidate version information to the receiver struct
|
||||
func (ps Packages) MergeNewVersion(as Packages) {
|
||||
for name, pack := range ps {
|
||||
pack.NewVersion = pack.Version
|
||||
pack.NewRelease = pack.Release
|
||||
ps[name] = pack
|
||||
}
|
||||
|
||||
for _, a := range as {
|
||||
if pack, ok := ps[a.Name]; ok {
|
||||
pack.NewVersion = a.NewVersion
|
||||
pack.NewRelease = a.NewRelease
|
||||
pack.Repository = a.Repository
|
||||
ps[a.Name] = pack
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge returns merged map (immutable)
|
||||
func (ps Packages) Merge(other Packages) Packages {
|
||||
merged := Packages{}
|
||||
for k, v := range ps {
|
||||
merged[k] = v
|
||||
}
|
||||
for k, v := range other {
|
||||
merged[k] = v
|
||||
}
|
||||
return merged
|
||||
}
|
||||
|
||||
// FindOne search a element
|
||||
func (ps Packages) FindOne(f func(Package) bool) (string, Package, bool) {
|
||||
for key, p := range ps {
|
||||
if f(p) {
|
||||
return key, p, true
|
||||
}
|
||||
}
|
||||
return "", Package{}, false
|
||||
}
|
||||
|
||||
// FindByFQPN search a package by Fully-Qualified-Package-Name
|
||||
func (ps Packages) FindByFQPN(nameVerRelArc string) (*Package, error) {
|
||||
for _, p := range ps {
|
||||
if nameVerRelArc == p.FQPN() {
|
||||
return &p, nil
|
||||
}
|
||||
}
|
||||
return nil, xerrors.Errorf("Failed to find the package: %s", nameVerRelArc)
|
||||
}
|
||||
|
||||
// Package has installed binary packages.
|
||||
type Package struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Release string `json:"release"`
|
||||
NewVersion string `json:"newVersion"`
|
||||
NewRelease string `json:"newRelease"`
|
||||
Arch string `json:"arch"`
|
||||
Repository string `json:"repository"`
|
||||
Changelog Changelog `json:"changelog"`
|
||||
AffectedProcs []AffectedProcess `json:",omitempty"`
|
||||
NeedRestartProcs []NeedRestartProcess `json:",omitempty"`
|
||||
}
|
||||
|
||||
// FQPN returns Fully-Qualified-Package-Name
|
||||
// name-version-release.arch
|
||||
func (p Package) FQPN() string {
|
||||
fqpn := p.Name
|
||||
if p.Version != "" {
|
||||
fqpn += fmt.Sprintf("-%s", p.Version)
|
||||
}
|
||||
if p.Release != "" {
|
||||
fqpn += fmt.Sprintf("-%s", p.Release)
|
||||
}
|
||||
if p.Arch != "" {
|
||||
fqpn += fmt.Sprintf(".%s", p.Arch)
|
||||
}
|
||||
return fqpn
|
||||
}
|
||||
|
||||
// FormatVer returns package version-release
|
||||
func (p Package) FormatVer() string {
|
||||
ver := p.Version
|
||||
if 0 < len(p.Release) {
|
||||
ver = fmt.Sprintf("%s-%s", ver, p.Release)
|
||||
}
|
||||
return ver
|
||||
}
|
||||
|
||||
// FormatNewVer returns package version-release
|
||||
func (p Package) FormatNewVer() string {
|
||||
ver := p.NewVersion
|
||||
if 0 < len(p.NewRelease) {
|
||||
ver = fmt.Sprintf("%s-%s", ver, p.NewRelease)
|
||||
}
|
||||
return ver
|
||||
}
|
||||
|
||||
// FormatVersionFromTo formats installed and new package version
|
||||
func (p Package) FormatVersionFromTo(stat PackageFixStatus) string {
|
||||
to := p.FormatNewVer()
|
||||
if stat.NotFixedYet {
|
||||
if stat.FixState != "" {
|
||||
to = stat.FixState
|
||||
} else {
|
||||
to = "Not Fixed Yet"
|
||||
}
|
||||
} else if p.NewVersion == "" {
|
||||
to = "Unknown"
|
||||
}
|
||||
var fixedIn string
|
||||
if stat.FixedIn != "" {
|
||||
fixedIn = fmt.Sprintf(" (FixedIn: %s)", stat.FixedIn)
|
||||
}
|
||||
return fmt.Sprintf("%s-%s -> %s%s",
|
||||
p.Name, p.FormatVer(), to, fixedIn)
|
||||
}
|
||||
|
||||
// FormatChangelog formats the changelog
|
||||
func (p Package) FormatChangelog() string {
|
||||
buf := []string{}
|
||||
packVer := fmt.Sprintf("%s-%s -> %s",
|
||||
p.Name, p.FormatVer(), p.FormatNewVer())
|
||||
var delim bytes.Buffer
|
||||
for i := 0; i < len(packVer); i++ {
|
||||
delim.WriteString("-")
|
||||
}
|
||||
|
||||
clog := p.Changelog.Contents
|
||||
if lines := strings.Split(clog, "\n"); len(lines) != 0 {
|
||||
clog = strings.Join(lines[0:len(lines)-1], "\n")
|
||||
}
|
||||
|
||||
switch p.Changelog.Method {
|
||||
case FailedToGetChangelog:
|
||||
clog = "No changelogs"
|
||||
case FailedToFindVersionInChangelog:
|
||||
clog = "Failed to parse changelogs. For details, check yourself"
|
||||
}
|
||||
buf = append(buf, packVer, delim.String(), clog)
|
||||
return strings.Join(buf, "\n")
|
||||
}
|
||||
|
||||
// Changelog has contents of changelog and how to get it.
|
||||
// Method: models.detectionMethodStr
|
||||
type Changelog struct {
|
||||
Contents string `json:"contents"`
|
||||
Method DetectionMethod `json:"method"`
|
||||
}
|
||||
|
||||
// AffectedProcess keep a processes information affected by software update
|
||||
type AffectedProcess struct {
|
||||
PID string `json:"pid,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
ListenPorts []string `json:"listenPorts,omitempty"`
|
||||
ListenPortStats []PortStat `json:"listenPortStats,omitempty"`
|
||||
}
|
||||
|
||||
// PortStat has the result of parsing the port information to the address and port.
|
||||
type PortStat struct {
|
||||
BindAddress string `json:"bindAddress"`
|
||||
Port string `json:"port"`
|
||||
PortReachableTo []string `json:"portReachableTo"`
|
||||
}
|
||||
|
||||
// NewPortStat create a PortStat from ipPort str
|
||||
func NewPortStat(ipPort string) (*PortStat, error) {
|
||||
if ipPort == "" {
|
||||
return &PortStat{}, nil
|
||||
}
|
||||
sep := strings.LastIndex(ipPort, ":")
|
||||
if sep == -1 {
|
||||
return nil, xerrors.Errorf("Failed to parse IP:Port: %s", ipPort)
|
||||
}
|
||||
return &PortStat{
|
||||
BindAddress: ipPort[:sep],
|
||||
Port: ipPort[sep+1:],
|
||||
}, nil
|
||||
}
|
||||
|
||||
// HasReachablePort checks if Package.AffectedProcs has PortReachableTo
|
||||
func (p Package) HasReachablePort() bool {
|
||||
for _, ap := range p.AffectedProcs {
|
||||
for _, lp := range ap.ListenPortStats {
|
||||
if len(lp.PortReachableTo) > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NeedRestartProcess keep a processes information affected by software update
|
||||
type NeedRestartProcess struct {
|
||||
PID string `json:"pid"`
|
||||
Path string `json:"path"`
|
||||
ServiceName string `json:"serviceName"`
|
||||
InitSystem string `json:"initSystem"`
|
||||
HasInit bool `json:"-"`
|
||||
}
|
||||
|
||||
// SrcPackage has installed source package information.
|
||||
// Debian based Linux has both of package and source information in dpkg.
|
||||
// OVAL database often includes a source version (Not a binary version),
|
||||
// so it is also needed to capture source version for OVAL version comparison.
|
||||
// https://github.com/future-architect/vuls/issues/504
|
||||
type SrcPackage struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Arch string `json:"arch"`
|
||||
BinaryNames []string `json:"binaryNames"`
|
||||
}
|
||||
|
||||
// AddBinaryName add the name if not exists
|
||||
func (s *SrcPackage) AddBinaryName(name string) {
|
||||
found := false
|
||||
for _, n := range s.BinaryNames {
|
||||
if n == name {
|
||||
return
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
s.BinaryNames = append(s.BinaryNames, name)
|
||||
}
|
||||
}
|
||||
|
||||
// SrcPackages is Map of SrcPackage
|
||||
// { "package-name": SrcPackage }
|
||||
type SrcPackages map[string]SrcPackage
|
||||
|
||||
// FindByBinName finds by bin-package-name
|
||||
func (s SrcPackages) FindByBinName(name string) (*SrcPackage, bool) {
|
||||
for _, p := range s {
|
||||
for _, binName := range p.BinaryNames {
|
||||
if binName == name {
|
||||
return &p, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// raspiPackNamePattern is a regular expression pattern to detect the Raspberry Pi specific package from the package name.
|
||||
// e.g. libraspberrypi-dev, rpi-eeprom, python3-rpi.gpio, pi-bluetooth
|
||||
var raspiPackNamePattern = regexp.MustCompile(`(.*raspberry.*|^rpi.*|.*-rpi.*|^pi-.*)`)
|
||||
|
||||
// raspiPackNamePattern is a regular expression pattern to detect the Raspberry Pi specific package from the version.
|
||||
// e.g. ffmpeg 7:4.1.4-1+rpt7~deb10u1, vlc 3.0.10-0+deb10u1+rpt2
|
||||
var raspiPackVersionPattern = regexp.MustCompile(`.+\+rp(t|i)\d+`)
|
||||
|
||||
// raspiPackNameList is a package name array of Raspberry Pi specific packages that are difficult to detect with regular expressions.
|
||||
var raspiPackNameList = []string{"piclone", "pipanel", "pishutdown", "piwiz", "pixflat-icons"}
|
||||
|
||||
// IsRaspbianPackage judges whether it is a package related to Raspberry Pi from the package name and version
|
||||
func IsRaspbianPackage(name, version string) bool {
|
||||
if raspiPackNamePattern.MatchString(name) || raspiPackVersionPattern.MatchString(version) {
|
||||
return true
|
||||
}
|
||||
for _, n := range raspiPackNameList {
|
||||
if n == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
@@ -1,430 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/k0kubun/pp"
|
||||
)
|
||||
|
||||
func TestMergeNewVersion(t *testing.T) {
|
||||
var test = struct {
|
||||
a Packages
|
||||
b Packages
|
||||
expected Packages
|
||||
}{
|
||||
Packages{
|
||||
"hoge": {
|
||||
Name: "hoge",
|
||||
},
|
||||
},
|
||||
Packages{
|
||||
"hoge": {
|
||||
Name: "hoge",
|
||||
NewVersion: "1.0.0",
|
||||
NewRelease: "release1",
|
||||
},
|
||||
},
|
||||
Packages{
|
||||
"hoge": {
|
||||
Name: "hoge",
|
||||
NewVersion: "1.0.0",
|
||||
NewRelease: "release1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
test.a.MergeNewVersion(test.b)
|
||||
if !reflect.DeepEqual(test.a, test.expected) {
|
||||
e := pp.Sprintf("%v", test.a)
|
||||
a := pp.Sprintf("%v", test.expected)
|
||||
t.Errorf("expected %s, actual %s", e, a)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMerge(t *testing.T) {
|
||||
var test = struct {
|
||||
a Packages
|
||||
b Packages
|
||||
expected Packages
|
||||
}{
|
||||
Packages{
|
||||
"hoge": {Name: "hoge"},
|
||||
"fuga": {Name: "fuga"},
|
||||
},
|
||||
Packages{
|
||||
"hega": {Name: "hega"},
|
||||
"hage": {Name: "hage"},
|
||||
},
|
||||
Packages{
|
||||
"hoge": {Name: "hoge"},
|
||||
"fuga": {Name: "fuga"},
|
||||
"hega": {Name: "hega"},
|
||||
"hage": {Name: "hage"},
|
||||
},
|
||||
}
|
||||
|
||||
actual := test.a.Merge(test.b)
|
||||
if !reflect.DeepEqual(actual, test.expected) {
|
||||
e := pp.Sprintf("%v", test.expected)
|
||||
a := pp.Sprintf("%v", actual)
|
||||
t.Errorf("expected %s, actual %s", e, a)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddBinaryName(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in SrcPackage
|
||||
name string
|
||||
expected SrcPackage
|
||||
}{
|
||||
{
|
||||
SrcPackage{Name: "hoge"},
|
||||
"curl",
|
||||
SrcPackage{
|
||||
Name: "hoge",
|
||||
BinaryNames: []string{"curl"},
|
||||
},
|
||||
},
|
||||
{
|
||||
SrcPackage{
|
||||
Name: "hoge",
|
||||
BinaryNames: []string{"curl"},
|
||||
},
|
||||
"curl",
|
||||
SrcPackage{
|
||||
Name: "hoge",
|
||||
BinaryNames: []string{"curl"},
|
||||
},
|
||||
},
|
||||
{
|
||||
SrcPackage{
|
||||
Name: "hoge",
|
||||
BinaryNames: []string{"curl"},
|
||||
},
|
||||
"openssh",
|
||||
SrcPackage{
|
||||
Name: "hoge",
|
||||
BinaryNames: []string{"curl", "openssh"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
tt.in.AddBinaryName(tt.name)
|
||||
if !reflect.DeepEqual(tt.in, tt.expected) {
|
||||
t.Errorf("expected %#v, actual %#v", tt.in, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindByBinName(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in SrcPackages
|
||||
name string
|
||||
expected *SrcPackage
|
||||
ok bool
|
||||
}{
|
||||
{
|
||||
in: map[string]SrcPackage{
|
||||
"packA": {
|
||||
Name: "srcA",
|
||||
BinaryNames: []string{"binA"},
|
||||
Version: "1.0.0",
|
||||
},
|
||||
"packB": {
|
||||
Name: "srcB",
|
||||
BinaryNames: []string{"binB"},
|
||||
Version: "2.0.0",
|
||||
},
|
||||
},
|
||||
name: "binA",
|
||||
expected: &SrcPackage{
|
||||
Name: "srcA",
|
||||
BinaryNames: []string{"binA"},
|
||||
Version: "1.0.0",
|
||||
},
|
||||
ok: true,
|
||||
},
|
||||
{
|
||||
in: map[string]SrcPackage{
|
||||
"packA": {
|
||||
Name: "srcA",
|
||||
BinaryNames: []string{"binA"},
|
||||
Version: "1.0.0",
|
||||
},
|
||||
"packB": {
|
||||
Name: "srcB",
|
||||
BinaryNames: []string{"binB"},
|
||||
Version: "2.0.0",
|
||||
},
|
||||
},
|
||||
name: "nobin",
|
||||
expected: nil,
|
||||
ok: false,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
act, ok := tt.in.FindByBinName(tt.name)
|
||||
if ok != tt.ok {
|
||||
t.Errorf("[%d] expected %#v, actual %#v", i, tt.in, tt.expected)
|
||||
}
|
||||
if act != nil && !reflect.DeepEqual(*tt.expected, *act) {
|
||||
t.Errorf("[%d] expected %#v, actual %#v", i, tt.in, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPackage_FormatVersionFromTo(t *testing.T) {
|
||||
type fields struct {
|
||||
Name string
|
||||
Version string
|
||||
Release string
|
||||
NewVersion string
|
||||
NewRelease string
|
||||
Arch string
|
||||
Repository string
|
||||
Changelog Changelog
|
||||
AffectedProcs []AffectedProcess
|
||||
NeedRestartProcs []NeedRestartProcess
|
||||
}
|
||||
type args struct {
|
||||
stat PackageFixStatus
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "fixed",
|
||||
fields: fields{
|
||||
Name: "packA",
|
||||
Version: "1.0.0",
|
||||
Release: "a",
|
||||
NewVersion: "1.0.1",
|
||||
NewRelease: "b",
|
||||
},
|
||||
args: args{
|
||||
stat: PackageFixStatus{
|
||||
NotFixedYet: false,
|
||||
FixedIn: "1.0.1-b",
|
||||
},
|
||||
},
|
||||
want: "packA-1.0.0-a -> 1.0.1-b (FixedIn: 1.0.1-b)",
|
||||
},
|
||||
{
|
||||
name: "nfy",
|
||||
fields: fields{
|
||||
Name: "packA",
|
||||
Version: "1.0.0",
|
||||
Release: "a",
|
||||
},
|
||||
args: args{
|
||||
stat: PackageFixStatus{
|
||||
NotFixedYet: true,
|
||||
},
|
||||
},
|
||||
want: "packA-1.0.0-a -> Not Fixed Yet",
|
||||
},
|
||||
{
|
||||
name: "nfy",
|
||||
fields: fields{
|
||||
Name: "packA",
|
||||
Version: "1.0.0",
|
||||
Release: "a",
|
||||
},
|
||||
args: args{
|
||||
stat: PackageFixStatus{
|
||||
NotFixedYet: false,
|
||||
FixedIn: "1.0.1-b",
|
||||
},
|
||||
},
|
||||
want: "packA-1.0.0-a -> Unknown (FixedIn: 1.0.1-b)",
|
||||
},
|
||||
{
|
||||
name: "nfy2",
|
||||
fields: fields{
|
||||
Name: "packA",
|
||||
Version: "1.0.0",
|
||||
Release: "a",
|
||||
},
|
||||
args: args{
|
||||
stat: PackageFixStatus{
|
||||
NotFixedYet: true,
|
||||
FixedIn: "1.0.1-b",
|
||||
FixState: "open",
|
||||
},
|
||||
},
|
||||
want: "packA-1.0.0-a -> open (FixedIn: 1.0.1-b)",
|
||||
},
|
||||
{
|
||||
name: "nfy3",
|
||||
fields: fields{
|
||||
Name: "packA",
|
||||
Version: "1.0.0",
|
||||
Release: "a",
|
||||
},
|
||||
args: args{
|
||||
stat: PackageFixStatus{
|
||||
NotFixedYet: true,
|
||||
FixedIn: "1.0.1-b",
|
||||
FixState: "open",
|
||||
},
|
||||
},
|
||||
want: "packA-1.0.0-a -> open (FixedIn: 1.0.1-b)",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
p := Package{
|
||||
Name: tt.fields.Name,
|
||||
Version: tt.fields.Version,
|
||||
Release: tt.fields.Release,
|
||||
NewVersion: tt.fields.NewVersion,
|
||||
NewRelease: tt.fields.NewRelease,
|
||||
Arch: tt.fields.Arch,
|
||||
Repository: tt.fields.Repository,
|
||||
Changelog: tt.fields.Changelog,
|
||||
AffectedProcs: tt.fields.AffectedProcs,
|
||||
NeedRestartProcs: tt.fields.NeedRestartProcs,
|
||||
}
|
||||
if got := p.FormatVersionFromTo(tt.args.stat); got != tt.want {
|
||||
t.Errorf("Package.FormatVersionFromTo() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_IsRaspbianPackage(t *testing.T) {
|
||||
type args struct {
|
||||
name string
|
||||
ver string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
in []args
|
||||
expect []bool
|
||||
}{
|
||||
{
|
||||
name: "nameRegExp",
|
||||
in: []args{
|
||||
{
|
||||
name: "libraspberrypi-dev",
|
||||
ver: "1.20200811-1",
|
||||
},
|
||||
{
|
||||
name: "rpi-eeprom",
|
||||
ver: "7.10-1",
|
||||
},
|
||||
{
|
||||
name: "python3-rpi.gpio",
|
||||
ver: "0.7.0-0.1~bpo10+1",
|
||||
},
|
||||
{
|
||||
name: "arping",
|
||||
ver: "2.19-6",
|
||||
},
|
||||
{
|
||||
name: "pi-bluetooth",
|
||||
ver: "0.1.14",
|
||||
},
|
||||
},
|
||||
expect: []bool{true, true, true, false, true, false},
|
||||
},
|
||||
{
|
||||
name: "verRegExp",
|
||||
in: []args{
|
||||
{
|
||||
name: "ffmpeg",
|
||||
ver: "7:4.1.6-1~deb10u1+rpt1",
|
||||
},
|
||||
{
|
||||
name: "gcc",
|
||||
ver: "4:8.3.0-1+rpi2",
|
||||
},
|
||||
},
|
||||
expect: []bool{true, true},
|
||||
},
|
||||
{
|
||||
name: "nameList",
|
||||
in: []args{
|
||||
{
|
||||
name: "piclone",
|
||||
ver: "0.16",
|
||||
},
|
||||
},
|
||||
expect: []bool{true},
|
||||
},
|
||||
{
|
||||
name: "debianPackage",
|
||||
in: []args{
|
||||
{
|
||||
name: "apt",
|
||||
ver: "1.8.2.1",
|
||||
},
|
||||
},
|
||||
expect: []bool{false},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
for i, p := range tt.in {
|
||||
ret := IsRaspbianPackage(p.name, p.ver)
|
||||
if !reflect.DeepEqual(ret, tt.expect[i]) {
|
||||
t.Errorf("[%s->%s] expected: %t, actual: %t, in: %#v", tt.name, tt.in[i].name, tt.expect[i], ret, tt.in[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_parseListenPorts(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
args string
|
||||
expect PortStat
|
||||
}{{
|
||||
name: "empty",
|
||||
args: "",
|
||||
expect: PortStat{
|
||||
BindAddress: "",
|
||||
Port: "",
|
||||
},
|
||||
}, {
|
||||
name: "normal",
|
||||
args: "127.0.0.1:22",
|
||||
expect: PortStat{
|
||||
BindAddress: "127.0.0.1",
|
||||
Port: "22",
|
||||
},
|
||||
}, {
|
||||
name: "asterisk",
|
||||
args: "*:22",
|
||||
expect: PortStat{
|
||||
BindAddress: "*",
|
||||
Port: "22",
|
||||
},
|
||||
}, {
|
||||
name: "ipv6_loopback",
|
||||
args: "[::1]:22",
|
||||
expect: PortStat{
|
||||
BindAddress: "[::1]",
|
||||
Port: "22",
|
||||
},
|
||||
}}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
listenPort, err := NewPortStat(tt.args)
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error occurred: %s", err)
|
||||
} else if !reflect.DeepEqual(*listenPort, tt.expect) {
|
||||
t.Errorf("base.parseListenPorts() = %v, want %v", *listenPort, tt.expect)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,536 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/cwe"
|
||||
"github.com/future-architect/vuls/util"
|
||||
)
|
||||
|
||||
// ScanResults is a slide of ScanResult
|
||||
type ScanResults []ScanResult
|
||||
|
||||
// ScanResult has the result of scanned CVE information.
|
||||
type ScanResult struct {
|
||||
JSONVersion int `json:"jsonVersion"`
|
||||
Lang string `json:"lang"`
|
||||
ServerUUID string `json:"serverUUID"`
|
||||
ServerName string `json:"serverName"` // TOML Section key
|
||||
Family string `json:"family"`
|
||||
Release string `json:"release"`
|
||||
Container Container `json:"container"`
|
||||
Platform Platform `json:"platform"`
|
||||
IPv4Addrs []string `json:"ipv4Addrs,omitempty"` // only global unicast address (https://golang.org/pkg/net/#IP.IsGlobalUnicast)
|
||||
IPv6Addrs []string `json:"ipv6Addrs,omitempty"` // only global unicast address (https://golang.org/pkg/net/#IP.IsGlobalUnicast)
|
||||
IPSIdentifiers map[config.IPS]string `json:"ipsIdentifiers,omitempty"`
|
||||
ScannedAt time.Time `json:"scannedAt"`
|
||||
ScanMode string `json:"scanMode"`
|
||||
ScannedVersion string `json:"scannedVersion"`
|
||||
ScannedRevision string `json:"scannedRevision"`
|
||||
ScannedBy string `json:"scannedBy"`
|
||||
ScannedVia string `json:"scannedVia"`
|
||||
ScannedIPv4Addrs []string `json:"scannedIpv4Addrs,omitempty"`
|
||||
ScannedIPv6Addrs []string `json:"scannedIpv6Addrs,omitempty"`
|
||||
ReportedAt time.Time `json:"reportedAt"`
|
||||
ReportedVersion string `json:"reportedVersion"`
|
||||
ReportedRevision string `json:"reportedRevision"`
|
||||
ReportedBy string `json:"reportedBy"`
|
||||
Errors []string `json:"errors"`
|
||||
Warnings []string `json:"warnings"`
|
||||
|
||||
ScannedCves VulnInfos `json:"scannedCves"`
|
||||
RunningKernel Kernel `json:"runningKernel"`
|
||||
Packages Packages `json:"packages"`
|
||||
SrcPackages SrcPackages `json:",omitempty"`
|
||||
EnabledDnfModules []string `json:"enabledDnfModules,omitempty"` // for dnf modules
|
||||
WordPressPackages WordPressPackages `json:",omitempty"`
|
||||
LibraryScanners LibraryScanners `json:"libraries,omitempty"`
|
||||
CweDict CweDict `json:"cweDict,omitempty"`
|
||||
Optional map[string]interface{} `json:",omitempty"`
|
||||
Config struct {
|
||||
Scan config.Config `json:"scan"`
|
||||
Report config.Config `json:"report"`
|
||||
} `json:"config"`
|
||||
}
|
||||
|
||||
// CweDict is a dictionary for CWE
|
||||
type CweDict map[string]CweDictEntry
|
||||
|
||||
// Get the name, url, top10URL for the specified cweID, lang
|
||||
func (c CweDict) Get(cweID, lang string) (name, url, top10Rank, top10URL, cweTop25Rank, cweTop25URL, sansTop25Rank, sansTop25URL string) {
|
||||
cweNum := strings.TrimPrefix(cweID, "CWE-")
|
||||
switch config.Conf.Lang {
|
||||
case "ja":
|
||||
if dict, ok := c[cweNum]; ok && dict.OwaspTopTen2017 != "" {
|
||||
top10Rank = dict.OwaspTopTen2017
|
||||
top10URL = cwe.OwaspTopTen2017GitHubURLJa[dict.OwaspTopTen2017]
|
||||
}
|
||||
if dict, ok := c[cweNum]; ok && dict.CweTopTwentyfive2019 != "" {
|
||||
cweTop25Rank = dict.CweTopTwentyfive2019
|
||||
cweTop25URL = cwe.CweTopTwentyfive2019URL
|
||||
}
|
||||
if dict, ok := c[cweNum]; ok && dict.SansTopTwentyfive != "" {
|
||||
sansTop25Rank = dict.SansTopTwentyfive
|
||||
sansTop25URL = cwe.SansTopTwentyfiveURL
|
||||
}
|
||||
if dict, ok := cwe.CweDictJa[cweNum]; ok {
|
||||
name = dict.Name
|
||||
url = fmt.Sprintf("http://jvndb.jvn.jp/ja/cwe/%s.html", cweID)
|
||||
} else {
|
||||
if dict, ok := cwe.CweDictEn[cweNum]; ok {
|
||||
name = dict.Name
|
||||
}
|
||||
url = fmt.Sprintf("https://cwe.mitre.org/data/definitions/%s.html", cweID)
|
||||
}
|
||||
default:
|
||||
if dict, ok := c[cweNum]; ok && dict.OwaspTopTen2017 != "" {
|
||||
top10Rank = dict.OwaspTopTen2017
|
||||
top10URL = cwe.OwaspTopTen2017GitHubURLEn[dict.OwaspTopTen2017]
|
||||
}
|
||||
if dict, ok := c[cweNum]; ok && dict.CweTopTwentyfive2019 != "" {
|
||||
cweTop25Rank = dict.CweTopTwentyfive2019
|
||||
cweTop25URL = cwe.CweTopTwentyfive2019URL
|
||||
}
|
||||
if dict, ok := c[cweNum]; ok && dict.SansTopTwentyfive != "" {
|
||||
sansTop25Rank = dict.SansTopTwentyfive
|
||||
sansTop25URL = cwe.SansTopTwentyfiveURL
|
||||
}
|
||||
url = fmt.Sprintf("https://cwe.mitre.org/data/definitions/%s.html", cweID)
|
||||
if dict, ok := cwe.CweDictEn[cweNum]; ok {
|
||||
name = dict.Name
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// CweDictEntry is a entry of CWE
|
||||
type CweDictEntry struct {
|
||||
En *cwe.Cwe `json:"en,omitempty"`
|
||||
Ja *cwe.Cwe `json:"ja,omitempty"`
|
||||
OwaspTopTen2017 string `json:"owaspTopTen2017"`
|
||||
CweTopTwentyfive2019 string `json:"cweTopTwentyfive2019"`
|
||||
SansTopTwentyfive string `json:"sansTopTwentyfive"`
|
||||
}
|
||||
|
||||
// Kernel has the Release, version and whether need restart
|
||||
type Kernel struct {
|
||||
Release string `json:"release"`
|
||||
Version string `json:"version"`
|
||||
RebootRequired bool `json:"rebootRequired"`
|
||||
}
|
||||
|
||||
// FilterByCvssOver is filter function.
|
||||
func (r ScanResult) FilterByCvssOver(over float64) ScanResult {
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
v2Max := v.MaxCvss2Score()
|
||||
v3Max := v.MaxCvss3Score()
|
||||
max := v2Max.Value.Score
|
||||
if max < v3Max.Value.Score {
|
||||
max = v3Max.Value.Score
|
||||
}
|
||||
if over <= max {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterIgnoreCves is filter function.
|
||||
func (r ScanResult) FilterIgnoreCves() ScanResult {
|
||||
ignoreCves := []string{}
|
||||
if len(r.Container.Name) == 0 {
|
||||
//TODO pass by args
|
||||
ignoreCves = config.Conf.Servers[r.ServerName].IgnoreCves
|
||||
} else {
|
||||
//TODO pass by args
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
if con, ok := s.Containers[r.Container.Name]; ok {
|
||||
ignoreCves = con.IgnoreCves
|
||||
} else {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
util.Log.Errorf("%s is not found in config.toml",
|
||||
r.ServerName)
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
for _, c := range ignoreCves {
|
||||
if v.CveID == c {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterUnfixed is filter function.
|
||||
func (r ScanResult) FilterUnfixed(ignoreUnfixed bool) ScanResult {
|
||||
if !ignoreUnfixed {
|
||||
return r
|
||||
}
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
// Report cves detected by CPE because Vuls can't know 'fixed' or 'unfixed'
|
||||
if len(v.CpeURIs) != 0 {
|
||||
return true
|
||||
}
|
||||
NotFixedAll := true
|
||||
for _, p := range v.AffectedPackages {
|
||||
NotFixedAll = NotFixedAll && p.NotFixedYet
|
||||
}
|
||||
return !NotFixedAll
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterIgnorePkgs is filter function.
|
||||
func (r ScanResult) FilterIgnorePkgs() ScanResult {
|
||||
var ignorePkgsRegexps []string
|
||||
if len(r.Container.Name) == 0 {
|
||||
//TODO pass by args
|
||||
ignorePkgsRegexps = config.Conf.Servers[r.ServerName].IgnorePkgsRegexp
|
||||
} else {
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
if con, ok := s.Containers[r.Container.Name]; ok {
|
||||
ignorePkgsRegexps = con.IgnorePkgsRegexp
|
||||
} else {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
util.Log.Errorf("%s is not found in config.toml",
|
||||
r.ServerName)
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
regexps := []*regexp.Regexp{}
|
||||
for _, pkgRegexp := range ignorePkgsRegexps {
|
||||
re, err := regexp.Compile(pkgRegexp)
|
||||
if err != nil {
|
||||
util.Log.Errorf("Failed to parse %s. err: %+v", pkgRegexp, err)
|
||||
continue
|
||||
} else {
|
||||
regexps = append(regexps, re)
|
||||
}
|
||||
}
|
||||
if len(regexps) == 0 {
|
||||
return r
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
if len(v.AffectedPackages) == 0 {
|
||||
return true
|
||||
}
|
||||
for _, p := range v.AffectedPackages {
|
||||
match := false
|
||||
for _, re := range regexps {
|
||||
if re.MatchString(p.Name) {
|
||||
match = true
|
||||
}
|
||||
}
|
||||
if !match {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterInactiveWordPressLibs is filter function.
|
||||
func (r ScanResult) FilterInactiveWordPressLibs(detectInactive bool) ScanResult {
|
||||
if detectInactive {
|
||||
return r
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
if len(v.WpPackageFixStats) == 0 {
|
||||
return true
|
||||
}
|
||||
// Ignore if all libs in this vulnInfo inactive
|
||||
for _, wp := range v.WpPackageFixStats {
|
||||
if p, ok := r.WordPressPackages.Find(wp.Name); ok {
|
||||
if p.Status != Inactive {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// ReportFileName returns the filename on localhost without extension
|
||||
func (r ScanResult) ReportFileName() (name string) {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
return fmt.Sprintf("%s", r.ServerName)
|
||||
}
|
||||
return fmt.Sprintf("%s@%s", r.Container.Name, r.ServerName)
|
||||
}
|
||||
|
||||
// ReportKeyName returns the name of key on S3, Azure-Blob without extension
|
||||
func (r ScanResult) ReportKeyName() (name string) {
|
||||
timestr := r.ScannedAt.Format(time.RFC3339)
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
return fmt.Sprintf("%s/%s", timestr, r.ServerName)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s@%s", timestr, r.Container.Name, r.ServerName)
|
||||
}
|
||||
|
||||
// ServerInfo returns server name one line
|
||||
func (r ScanResult) ServerInfo() string {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
return fmt.Sprintf("%s (%s%s)",
|
||||
r.FormatServerName(), r.Family, r.Release)
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"%s (%s%s) on %s",
|
||||
r.FormatServerName(),
|
||||
r.Family,
|
||||
r.Release,
|
||||
r.ServerName,
|
||||
)
|
||||
}
|
||||
|
||||
// ServerInfoTui returns server information for TUI sidebar
|
||||
func (r ScanResult) ServerInfoTui() string {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
line := fmt.Sprintf("%s (%s%s)",
|
||||
r.ServerName, r.Family, r.Release)
|
||||
if len(r.Warnings) != 0 {
|
||||
line = "[Warn] " + line
|
||||
}
|
||||
if r.RunningKernel.RebootRequired {
|
||||
return "[Reboot] " + line
|
||||
}
|
||||
return line
|
||||
}
|
||||
|
||||
fmtstr := "|-- %s (%s%s)"
|
||||
if r.RunningKernel.RebootRequired {
|
||||
fmtstr = "|-- [Reboot] %s (%s%s)"
|
||||
}
|
||||
return fmt.Sprintf(fmtstr, r.Container.Name, r.Family, r.Release)
|
||||
}
|
||||
|
||||
// FormatServerName returns server and container name
|
||||
func (r ScanResult) FormatServerName() (name string) {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
name = r.ServerName
|
||||
} else {
|
||||
name = fmt.Sprintf("%s@%s",
|
||||
r.Container.Name, r.ServerName)
|
||||
}
|
||||
if r.RunningKernel.RebootRequired {
|
||||
name = "[Reboot Required] " + name
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// FormatTextReportHeader returns header of text report
|
||||
func (r ScanResult) FormatTextReportHeader() string {
|
||||
var buf bytes.Buffer
|
||||
for i := 0; i < len(r.ServerInfo()); i++ {
|
||||
buf.WriteString("=")
|
||||
}
|
||||
|
||||
pkgs := r.FormatUpdatablePacksSummary()
|
||||
if 0 < len(r.WordPressPackages) {
|
||||
pkgs = fmt.Sprintf("%s, %d WordPress pkgs", pkgs, len(r.WordPressPackages))
|
||||
}
|
||||
if 0 < len(r.LibraryScanners) {
|
||||
pkgs = fmt.Sprintf("%s, %d libs", pkgs, r.LibraryScanners.Total())
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s\n%s\n%s, %s, %s, %s, %s\n%s\n",
|
||||
r.ServerInfo(),
|
||||
buf.String(),
|
||||
r.ScannedCves.FormatCveSummary(),
|
||||
r.ScannedCves.FormatFixedStatus(r.Packages),
|
||||
r.FormatExploitCveSummary(),
|
||||
r.FormatMetasploitCveSummary(),
|
||||
r.FormatAlertSummary(),
|
||||
pkgs)
|
||||
}
|
||||
|
||||
// FormatUpdatablePacksSummary returns a summary of updatable packages
|
||||
func (r ScanResult) FormatUpdatablePacksSummary() string {
|
||||
if !r.isDisplayUpdatableNum() {
|
||||
return fmt.Sprintf("%d installed", len(r.Packages))
|
||||
}
|
||||
|
||||
nUpdatable := 0
|
||||
for _, p := range r.Packages {
|
||||
if p.NewVersion == "" {
|
||||
continue
|
||||
}
|
||||
if p.Version != p.NewVersion || p.Release != p.NewRelease {
|
||||
nUpdatable++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d installed, %d updatable",
|
||||
len(r.Packages),
|
||||
nUpdatable)
|
||||
}
|
||||
|
||||
// FormatExploitCveSummary returns a summary of exploit cve
|
||||
func (r ScanResult) FormatExploitCveSummary() string {
|
||||
nExploitCve := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if 0 < len(vuln.Exploits) {
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d poc", nExploitCve)
|
||||
}
|
||||
|
||||
// FormatMetasploitCveSummary returns a summary of exploit cve
|
||||
func (r ScanResult) FormatMetasploitCveSummary() string {
|
||||
nMetasploitCve := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if 0 < len(vuln.Metasploits) {
|
||||
nMetasploitCve++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d exploits", nMetasploitCve)
|
||||
}
|
||||
|
||||
// FormatAlertSummary returns a summary of CERT alerts
|
||||
func (r ScanResult) FormatAlertSummary() string {
|
||||
jaCnt := 0
|
||||
enCnt := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if len(vuln.AlertDict.En) > 0 {
|
||||
enCnt += len(vuln.AlertDict.En)
|
||||
}
|
||||
if len(vuln.AlertDict.Ja) > 0 {
|
||||
jaCnt += len(vuln.AlertDict.Ja)
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("en: %d, ja: %d alerts", enCnt, jaCnt)
|
||||
}
|
||||
|
||||
func (r ScanResult) isDisplayUpdatableNum() bool {
|
||||
if r.Family == config.FreeBSD {
|
||||
return false
|
||||
}
|
||||
|
||||
var mode config.ScanMode
|
||||
//TODO pass by args
|
||||
s, _ := config.Conf.Servers[r.ServerName]
|
||||
mode = s.Mode
|
||||
|
||||
if mode.IsOffline() {
|
||||
return false
|
||||
}
|
||||
if mode.IsFastRoot() || mode.IsDeep() {
|
||||
return true
|
||||
}
|
||||
if mode.IsFast() {
|
||||
switch r.Family {
|
||||
case config.RedHat,
|
||||
config.Oracle,
|
||||
config.Debian,
|
||||
config.Ubuntu,
|
||||
config.Raspbian:
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsContainer returns whether this ServerInfo is about container
|
||||
func (r ScanResult) IsContainer() bool {
|
||||
return 0 < len(r.Container.ContainerID)
|
||||
}
|
||||
|
||||
// IsDeepScanMode checks if the scan mode is deep scan mode.
|
||||
func (r ScanResult) IsDeepScanMode() bool {
|
||||
for _, s := range r.Config.Scan.Servers {
|
||||
if ok := s.Mode.IsDeep(); ok {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Container has Container information
|
||||
type Container struct {
|
||||
ContainerID string `json:"containerID"`
|
||||
Name string `json:"name"`
|
||||
Image string `json:"image"`
|
||||
Type string `json:"type"`
|
||||
UUID string `json:"uuid"`
|
||||
}
|
||||
|
||||
// Platform has platform information
|
||||
type Platform struct {
|
||||
Name string `json:"name"` // aws or azure or gcp or other...
|
||||
InstanceID string `json:"instanceID"`
|
||||
}
|
||||
|
||||
// RemoveRaspbianPackFromResult is for Raspberry Pi and removes the Raspberry Pi dedicated package from ScanResult.
|
||||
func (r ScanResult) RemoveRaspbianPackFromResult() ScanResult {
|
||||
if r.Family != config.Raspbian {
|
||||
return r
|
||||
}
|
||||
|
||||
result := r
|
||||
packs := make(Packages)
|
||||
for _, pack := range r.Packages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
packs[pack.Name] = pack
|
||||
}
|
||||
}
|
||||
srcPacks := make(SrcPackages)
|
||||
for _, pack := range r.SrcPackages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
srcPacks[pack.Name] = pack
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
result.Packages = packs
|
||||
result.SrcPackages = srcPacks
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// ClearFields clears a given fields of ScanResult
|
||||
func (r ScanResult) ClearFields(targetTagNames []string) ScanResult {
|
||||
if len(targetTagNames) == 0 {
|
||||
return r
|
||||
}
|
||||
target := map[string]bool{}
|
||||
for _, n := range targetTagNames {
|
||||
target[strings.ToLower(n)] = true
|
||||
}
|
||||
t := reflect.ValueOf(r).Type()
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
jsonValue := strings.Split(f.Tag.Get("json"), ",")[0]
|
||||
if ok := target[strings.ToLower(jsonValue)]; ok {
|
||||
vv := reflect.New(f.Type).Elem().Interface()
|
||||
reflect.ValueOf(&r).Elem().FieldByName(f.Name).Set(reflect.ValueOf(vv))
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
@@ -1,721 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/k0kubun/pp"
|
||||
)
|
||||
|
||||
func TestFilterByCvssOver(t *testing.T) {
|
||||
type in struct {
|
||||
over float64
|
||||
rs ScanResult
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
out ScanResult
|
||||
}{
|
||||
{
|
||||
in: in{
|
||||
over: 7.0,
|
||||
rs: ScanResult{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Nvd,
|
||||
CveID: "CVE-2017-0001",
|
||||
Cvss2Score: 7.1,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Nvd,
|
||||
CveID: "CVE-2017-0002",
|
||||
Cvss2Score: 6.9,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Nvd,
|
||||
CveID: "CVE-2017-0003",
|
||||
Cvss2Score: 6.9,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
CveContent{
|
||||
Type: Jvn,
|
||||
CveID: "CVE-2017-0003",
|
||||
Cvss2Score: 7.2,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Nvd,
|
||||
CveID: "CVE-2017-0001",
|
||||
Cvss2Score: 7.1,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Nvd,
|
||||
CveID: "CVE-2017-0003",
|
||||
Cvss2Score: 6.9,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
CveContent{
|
||||
Type: Jvn,
|
||||
CveID: "CVE-2017-0003",
|
||||
Cvss2Score: 7.2,
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// OVAL Severity
|
||||
{
|
||||
in: in{
|
||||
over: 7.0,
|
||||
rs: ScanResult{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Ubuntu,
|
||||
CveID: "CVE-2017-0001",
|
||||
Cvss2Severity: "HIGH",
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: RedHat,
|
||||
CveID: "CVE-2017-0002",
|
||||
Cvss2Severity: "CRITICAL",
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Oracle,
|
||||
CveID: "CVE-2017-0003",
|
||||
Cvss2Severity: "IMPORTANT",
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Ubuntu,
|
||||
CveID: "CVE-2017-0001",
|
||||
Cvss2Severity: "HIGH",
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: RedHat,
|
||||
CveID: "CVE-2017-0002",
|
||||
Cvss2Severity: "CRITICAL",
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
CveContents: NewCveContents(
|
||||
CveContent{
|
||||
Type: Oracle,
|
||||
CveID: "CVE-2017-0003",
|
||||
Cvss2Severity: "IMPORTANT",
|
||||
LastModified: time.Time{},
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
actual := tt.in.rs.FilterByCvssOver(tt.in.over)
|
||||
for k := range tt.out.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
func TestFilterIgnoreCveIDs(t *testing.T) {
|
||||
type in struct {
|
||||
cves []string
|
||||
rs ScanResult
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
out ScanResult
|
||||
}{
|
||||
{
|
||||
in: in{
|
||||
cves: []string{"CVE-2017-0002"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
config.Conf.Servers = map[string]config.ServerInfo{
|
||||
"name": {IgnoreCves: tt.in.cves},
|
||||
}
|
||||
actual := tt.in.rs.FilterIgnoreCves()
|
||||
for k := range tt.out.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
for k := range actual.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterIgnoreCveIDsContainer(t *testing.T) {
|
||||
type in struct {
|
||||
cves []string
|
||||
rs ScanResult
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
out ScanResult
|
||||
}{
|
||||
{
|
||||
in: in{
|
||||
cves: []string{"CVE-2017-0002"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
config.Conf.Servers = map[string]config.ServerInfo{
|
||||
"name": {
|
||||
Containers: map[string]config.ContainerSetting{
|
||||
"dockerA": {
|
||||
IgnoreCves: tt.in.cves,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
actual := tt.in.rs.FilterIgnoreCves()
|
||||
for k := range tt.out.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
for k := range actual.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterUnfixed(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in ScanResult
|
||||
out ScanResult
|
||||
}{
|
||||
{
|
||||
in: ScanResult{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{
|
||||
Name: "c",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
{
|
||||
Name: "d",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{
|
||||
Name: "c",
|
||||
NotFixedYet: true,
|
||||
},
|
||||
{
|
||||
Name: "d",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for i, tt := range tests {
|
||||
actual := tt.in.FilterUnfixed(true)
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves, actual.ScannedCves) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves)
|
||||
a := pp.Sprintf("%v", actual.ScannedCves)
|
||||
t.Errorf("[%d] expected: %v\n actual: %v\n", i, o, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterIgnorePkgs(t *testing.T) {
|
||||
type in struct {
|
||||
ignorePkgsRegexp []string
|
||||
rs ScanResult
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
out ScanResult
|
||||
}{
|
||||
{
|
||||
in: in{
|
||||
ignorePkgsRegexp: []string{"^kernel"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
ignorePkgsRegexp: []string{"^kernel"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
{Name: "vim"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
{Name: "vim"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
ignorePkgsRegexp: []string{"^kernel", "^vim", "^bind"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
{Name: "vim"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
ScannedCves: VulnInfos{},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
config.Conf.Servers = map[string]config.ServerInfo{
|
||||
"name": {IgnorePkgsRegexp: tt.in.ignorePkgsRegexp},
|
||||
}
|
||||
actual := tt.in.rs.FilterIgnorePkgs()
|
||||
for k := range tt.out.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
for k := range actual.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterIgnorePkgsContainer(t *testing.T) {
|
||||
type in struct {
|
||||
ignorePkgsRegexp []string
|
||||
rs ScanResult
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
out ScanResult
|
||||
}{
|
||||
{
|
||||
in: in{
|
||||
ignorePkgsRegexp: []string{"^kernel"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
ignorePkgsRegexp: []string{"^kernel"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
{Name: "vim"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
{Name: "vim"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
ignorePkgsRegexp: []string{"^kernel", "^vim", "^bind"},
|
||||
rs: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
AffectedPackages: PackageFixStatuses{
|
||||
{Name: "kernel"},
|
||||
{Name: "vim"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
out: ScanResult{
|
||||
ServerName: "name",
|
||||
Container: Container{Name: "dockerA"},
|
||||
ScannedCves: VulnInfos{},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
config.Conf.Servers = map[string]config.ServerInfo{
|
||||
"name": {
|
||||
Containers: map[string]config.ContainerSetting{
|
||||
"dockerA": {
|
||||
IgnorePkgsRegexp: tt.in.ignorePkgsRegexp,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
actual := tt.in.rs.FilterIgnorePkgs()
|
||||
for k := range tt.out.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
for k := range actual.ScannedCves {
|
||||
if !reflect.DeepEqual(tt.out.ScannedCves[k], actual.ScannedCves[k]) {
|
||||
o := pp.Sprintf("%v", tt.out.ScannedCves[k])
|
||||
a := pp.Sprintf("%v", actual.ScannedCves[k])
|
||||
t.Errorf("[%s] expected: %v\n actual: %v\n", k, o, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsDisplayUpdatableNum(t *testing.T) {
|
||||
var tests = []struct {
|
||||
mode []byte
|
||||
family string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
mode: []byte{config.Offline},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.FastRoot},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Deep},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.RedHat,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.Oracle,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.Debian,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.Ubuntu,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.Raspbian,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.CentOS,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.Amazon,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.FreeBSD,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.OpenSUSE,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: config.Alpine,
|
||||
expected: true,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
mode := config.ScanMode{}
|
||||
for _, m := range tt.mode {
|
||||
mode.Set(m)
|
||||
}
|
||||
config.Conf.Servers = map[string]config.ServerInfo{
|
||||
"name": {Mode: mode},
|
||||
}
|
||||
r := ScanResult{
|
||||
ServerName: "name",
|
||||
Family: tt.family,
|
||||
}
|
||||
act := r.isDisplayUpdatableNum()
|
||||
if tt.expected != act {
|
||||
t.Errorf("[%d] expected %#v, actual %#v", i, tt.expected, act)
|
||||
}
|
||||
}
|
||||
}
|
||||
120
models/utils.go
120
models/utils.go
@@ -1,120 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
cvedict "github.com/kotakanbe/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
// ConvertJvnToModel convert JVN to CveContent
|
||||
func ConvertJvnToModel(cveID string, jvn *cvedict.Jvn) *CveContent {
|
||||
if jvn == nil {
|
||||
return nil
|
||||
}
|
||||
// var cpes = []Cpe{}
|
||||
// for _, c := range jvn.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
refs := []Reference{}
|
||||
for _, r := range jvn.References {
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
})
|
||||
}
|
||||
|
||||
return &CveContent{
|
||||
Type: Jvn,
|
||||
CveID: cveID,
|
||||
Title: jvn.Title,
|
||||
Summary: jvn.Summary,
|
||||
Cvss2Score: jvn.Cvss2.BaseScore,
|
||||
Cvss2Vector: jvn.Cvss2.VectorString,
|
||||
Cvss2Severity: jvn.Cvss2.Severity,
|
||||
Cvss3Score: jvn.Cvss3.BaseScore,
|
||||
Cvss3Vector: jvn.Cvss3.VectorString,
|
||||
Cvss3Severity: jvn.Cvss3.BaseSeverity,
|
||||
SourceLink: jvn.JvnLink,
|
||||
// Cpes: cpes,
|
||||
References: refs,
|
||||
Published: jvn.PublishedDate,
|
||||
LastModified: jvn.LastModifiedDate,
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertNvdJSONToModel convert NVD to CveContent
|
||||
func ConvertNvdJSONToModel(cveID string, nvd *cvedict.NvdJSON) (*CveContent, []Exploit, []Mitigation) {
|
||||
if nvd == nil {
|
||||
return nil, nil, nil
|
||||
}
|
||||
// var cpes = []Cpe{}
|
||||
// for _, c := range nvd.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
refs := []Reference{}
|
||||
exploits := []Exploit{}
|
||||
mitigations := []Mitigation{}
|
||||
for _, r := range nvd.References {
|
||||
var tags []string
|
||||
if 0 < len(r.Tags) {
|
||||
tags = strings.Split(r.Tags, ",")
|
||||
}
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
Tags: tags,
|
||||
})
|
||||
if strings.Contains(r.Tags, "Exploit") {
|
||||
exploits = append(exploits, Exploit{
|
||||
//TODO Add const to here
|
||||
// https://github.com/vulsio/go-exploitdb/blob/master/models/exploit.go#L13-L18
|
||||
ExploitType: "nvd",
|
||||
URL: r.Link,
|
||||
})
|
||||
}
|
||||
if strings.Contains(r.Tags, "Mitigation") {
|
||||
mitigations = append(mitigations, Mitigation{
|
||||
CveContentType: Nvd,
|
||||
URL: r.Link,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
cweIDs := []string{}
|
||||
for _, cid := range nvd.Cwes {
|
||||
cweIDs = append(cweIDs, cid.CweID)
|
||||
}
|
||||
|
||||
desc := []string{}
|
||||
for _, d := range nvd.Descriptions {
|
||||
desc = append(desc, d.Value)
|
||||
}
|
||||
|
||||
return &CveContent{
|
||||
Type: Nvd,
|
||||
CveID: cveID,
|
||||
Summary: strings.Join(desc, "\n"),
|
||||
Cvss2Score: nvd.Cvss2.BaseScore,
|
||||
Cvss2Vector: nvd.Cvss2.VectorString,
|
||||
Cvss2Severity: nvd.Cvss2.Severity,
|
||||
Cvss3Score: nvd.Cvss3.BaseScore,
|
||||
Cvss3Vector: nvd.Cvss3.VectorString,
|
||||
Cvss3Severity: nvd.Cvss3.BaseSeverity,
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/" + cveID,
|
||||
// Cpes: cpes,
|
||||
CweIDs: cweIDs,
|
||||
References: refs,
|
||||
Published: nvd.PublishedDate,
|
||||
LastModified: nvd.LastModifiedDate,
|
||||
}, exploits, mitigations
|
||||
}
|
||||
@@ -1,856 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
exploitmodels "github.com/mozqnet/go-exploitdb/models"
|
||||
)
|
||||
|
||||
// VulnInfos has a map of VulnInfo
|
||||
// Key: CveID
|
||||
type VulnInfos map[string]VulnInfo
|
||||
|
||||
// Find elements that matches the function passed in argument
|
||||
func (v VulnInfos) Find(f func(VulnInfo) bool) VulnInfos {
|
||||
filtered := VulnInfos{}
|
||||
for _, vv := range v {
|
||||
if f(vv) {
|
||||
filtered[vv.CveID] = vv
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
// FindScoredVulns return scored vulnerabilities
|
||||
func (v VulnInfos) FindScoredVulns() VulnInfos {
|
||||
return v.Find(func(vv VulnInfo) bool {
|
||||
if 0 < vv.MaxCvss2Score().Value.Score ||
|
||||
0 < vv.MaxCvss3Score().Value.Score {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// ToSortedSlice returns slice of VulnInfos that is sorted by Score, CVE-ID
|
||||
func (v VulnInfos) ToSortedSlice() (sorted []VulnInfo) {
|
||||
for k := range v {
|
||||
sorted = append(sorted, v[k])
|
||||
}
|
||||
sort.Slice(sorted, func(i, j int) bool {
|
||||
maxI := sorted[i].MaxCvssScore()
|
||||
maxJ := sorted[j].MaxCvssScore()
|
||||
if maxI.Value.Score != maxJ.Value.Score {
|
||||
return maxJ.Value.Score < maxI.Value.Score
|
||||
}
|
||||
return sorted[i].CveID < sorted[j].CveID
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// CountGroupBySeverity summarize the number of CVEs group by CVSSv2 Severity
|
||||
func (v VulnInfos) CountGroupBySeverity() map[string]int {
|
||||
m := map[string]int{}
|
||||
for _, vInfo := range v {
|
||||
score := vInfo.MaxCvss2Score().Value.Score
|
||||
if score < 0.1 {
|
||||
score = vInfo.MaxCvss3Score().Value.Score
|
||||
}
|
||||
switch {
|
||||
case 7.0 <= score:
|
||||
m["High"]++
|
||||
case 4.0 <= score:
|
||||
m["Medium"]++
|
||||
case 0 < score:
|
||||
m["Low"]++
|
||||
default:
|
||||
m["Unknown"]++
|
||||
}
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// FormatCveSummary summarize the number of CVEs group by CVSSv2 Severity
|
||||
func (v VulnInfos) FormatCveSummary() string {
|
||||
m := v.CountGroupBySeverity()
|
||||
|
||||
if config.Conf.IgnoreUnscoredCves {
|
||||
return fmt.Sprintf("Total: %d (High:%d Medium:%d Low:%d)",
|
||||
m["High"]+m["Medium"]+m["Low"], m["High"], m["Medium"], m["Low"])
|
||||
}
|
||||
return fmt.Sprintf("Total: %d (High:%d Medium:%d Low:%d ?:%d)",
|
||||
m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
m["High"], m["Medium"], m["Low"], m["Unknown"])
|
||||
}
|
||||
|
||||
// FormatFixedStatus summarize the number of cves are fixed.
|
||||
func (v VulnInfos) FormatFixedStatus(packs Packages) string {
|
||||
total, fixed := 0, 0
|
||||
for _, vInfo := range v {
|
||||
if len(vInfo.CpeURIs) != 0 {
|
||||
continue
|
||||
}
|
||||
total++
|
||||
if vInfo.PatchStatus(packs) == "fixed" {
|
||||
fixed++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d/%d Fixed", fixed, total)
|
||||
}
|
||||
|
||||
// PackageFixStatuses is a list of PackageStatus
|
||||
type PackageFixStatuses []PackageFixStatus
|
||||
|
||||
// Names return a slice of package names
|
||||
func (ps PackageFixStatuses) Names() (names []string) {
|
||||
for _, p := range ps {
|
||||
names = append(names, p.Name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// Store insert given pkg if missing, update pkg if exists
|
||||
func (ps PackageFixStatuses) Store(pkg PackageFixStatus) PackageFixStatuses {
|
||||
for i, p := range ps {
|
||||
if p.Name == pkg.Name {
|
||||
ps[i] = pkg
|
||||
return ps
|
||||
}
|
||||
}
|
||||
ps = append(ps, pkg)
|
||||
return ps
|
||||
}
|
||||
|
||||
// Sort by Name
|
||||
func (ps PackageFixStatuses) Sort() {
|
||||
sort.Slice(ps, func(i, j int) bool {
|
||||
return ps[i].Name < ps[j].Name
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// PackageFixStatus has name and other status about the package
|
||||
type PackageFixStatus struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
NotFixedYet bool `json:"notFixedYet,omitempty"`
|
||||
FixState string `json:"fixState,omitempty"`
|
||||
FixedIn string `json:"fixedIn,omitempty"`
|
||||
}
|
||||
|
||||
// VulnInfo has a vulnerability information and unsecure packages
|
||||
type VulnInfo struct {
|
||||
CveID string `json:"cveID,omitempty"`
|
||||
Confidences Confidences `json:"confidences,omitempty"`
|
||||
AffectedPackages PackageFixStatuses `json:"affectedPackages,omitempty"`
|
||||
DistroAdvisories DistroAdvisories `json:"distroAdvisories,omitempty"` // for Amazon, RHEL, FreeBSD
|
||||
CveContents CveContents `json:"cveContents,omitempty"`
|
||||
Exploits []Exploit `json:"exploits,omitempty"`
|
||||
Metasploits []Metasploit `json:"metasploits,omitempty"`
|
||||
Mitigations []Mitigation `json:"mitigations,omitempty"`
|
||||
AlertDict AlertDict `json:"alertDict,omitempty"`
|
||||
CpeURIs []string `json:"cpeURIs,omitempty"` // CpeURIs related to this CVE defined in config.toml
|
||||
GitHubSecurityAlerts GitHubSecurityAlerts `json:"gitHubSecurityAlerts,omitempty"`
|
||||
WpPackageFixStats WpPackageFixStats `json:"wpPackageFixStats,omitempty"`
|
||||
LibraryFixedIns LibraryFixedIns `json:"libraryFixedIns,omitempty"`
|
||||
|
||||
VulnType string `json:"vulnType,omitempty"`
|
||||
}
|
||||
|
||||
// Alert has CERT alert information
|
||||
type Alert struct {
|
||||
URL string `json:"url,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
Team string `json:"team,omitempty"`
|
||||
}
|
||||
|
||||
// GitHubSecurityAlerts is a list of GitHubSecurityAlert
|
||||
type GitHubSecurityAlerts []GitHubSecurityAlert
|
||||
|
||||
// Add adds given arg to the slice and return the slice (immutable)
|
||||
func (g GitHubSecurityAlerts) Add(alert GitHubSecurityAlert) GitHubSecurityAlerts {
|
||||
for _, a := range g {
|
||||
if a.PackageName == alert.PackageName {
|
||||
return g
|
||||
}
|
||||
}
|
||||
return append(g, alert)
|
||||
}
|
||||
|
||||
// Names return a slice of lib names
|
||||
func (g GitHubSecurityAlerts) Names() (names []string) {
|
||||
for _, a := range g {
|
||||
names = append(names, a.PackageName)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// GitHubSecurityAlert has detected CVE-ID, PackageName, Status fetched via GitHub API
|
||||
type GitHubSecurityAlert struct {
|
||||
PackageName string `json:"packageName"`
|
||||
FixedIn string `json:"fixedIn"`
|
||||
AffectedRange string `json:"affectedRange"`
|
||||
Dismissed bool `json:"dismissed"`
|
||||
DismissedAt time.Time `json:"dismissedAt"`
|
||||
DismissReason string `json:"dismissReason"`
|
||||
}
|
||||
|
||||
// LibraryFixedIns is a list of Library's FixedIn
|
||||
type LibraryFixedIns []LibraryFixedIn
|
||||
|
||||
// Names return a slice of names
|
||||
func (lfs LibraryFixedIns) Names() (names []string) {
|
||||
for _, lf := range lfs {
|
||||
names = append(names, lf.Name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// WpPackageFixStats is a list of WpPackageFixStatus
|
||||
type WpPackageFixStats []WpPackageFixStatus
|
||||
|
||||
// Names return a slice of names
|
||||
func (ws WpPackageFixStats) Names() (names []string) {
|
||||
for _, w := range ws {
|
||||
names = append(names, w.Name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// WpPackages has a list of WpPackage
|
||||
type WpPackages []WpPackage
|
||||
|
||||
// Add adds given arg to the slice and return the slice (immutable)
|
||||
func (g WpPackages) Add(pkg WpPackage) WpPackages {
|
||||
for _, a := range g {
|
||||
if a.Name == pkg.Name {
|
||||
return g
|
||||
}
|
||||
}
|
||||
return append(g, pkg)
|
||||
}
|
||||
|
||||
// Titles returns title (TUI)
|
||||
func (v VulnInfo) Titles(lang, myFamily string) (values []CveContentStr) {
|
||||
if lang == "ja" {
|
||||
if cont, found := v.CveContents[Jvn]; found && 0 < len(cont.Title) {
|
||||
values = append(values, CveContentStr{Jvn, cont.Title})
|
||||
}
|
||||
}
|
||||
|
||||
// RedHat API has one line title.
|
||||
if cont, found := v.CveContents[RedHatAPI]; found && 0 < len(cont.Title) {
|
||||
values = append(values, CveContentStr{RedHatAPI, cont.Title})
|
||||
}
|
||||
|
||||
order := CveContentTypes{Trivy, Nvd, NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
|
||||
for _, ctype := range order {
|
||||
// Only JVN has meaningful title. so return first 100 char of summary
|
||||
if cont, found := v.CveContents[ctype]; found && 0 < len(cont.Summary) {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, adv := range v.DistroAdvisories {
|
||||
values = append(values, CveContentStr{
|
||||
Type: "Vendor",
|
||||
Value: strings.Replace(adv.Description, "\n", " ", -1),
|
||||
})
|
||||
}
|
||||
|
||||
if len(values) == 0 {
|
||||
values = []CveContentStr{{
|
||||
Type: Unknown,
|
||||
Value: "-",
|
||||
}}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Summaries returns summaries
|
||||
func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
if lang == "ja" {
|
||||
if cont, found := v.CveContents[Jvn]; found && 0 < len(cont.Summary) {
|
||||
summary := cont.Title
|
||||
summary += "\n" + strings.Replace(
|
||||
strings.Replace(cont.Summary, "\n", " ", -1), "\r", " ", -1)
|
||||
values = append(values, CveContentStr{Jvn, summary})
|
||||
}
|
||||
}
|
||||
|
||||
order := CveContentTypes{Trivy, NewCveContentType(myFamily), Nvd}
|
||||
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && 0 < len(cont.Summary) {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, adv := range v.DistroAdvisories {
|
||||
values = append(values, CveContentStr{
|
||||
Type: "Vendor",
|
||||
Value: adv.Description,
|
||||
})
|
||||
}
|
||||
|
||||
if v, ok := v.CveContents[WpScan]; ok {
|
||||
values = append(values, CveContentStr{
|
||||
Type: "WPVDB",
|
||||
Value: v.Title,
|
||||
})
|
||||
}
|
||||
|
||||
if len(values) == 0 {
|
||||
return []CveContentStr{{
|
||||
Type: Unknown,
|
||||
Value: "-",
|
||||
}}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Cvss2Scores returns CVSS V2 Scores
|
||||
func (v VulnInfo) Cvss2Scores(myFamily string) (values []CveContentCvss) {
|
||||
order := []CveContentType{Nvd, RedHatAPI, RedHat, Jvn}
|
||||
if myFamily != config.RedHat && myFamily != config.CentOS {
|
||||
order = append(order, NewCveContentType(myFamily))
|
||||
}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found {
|
||||
if cont.Cvss2Score == 0 || cont.Cvss2Severity == "" {
|
||||
continue
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: cont.Cvss2Score,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, adv := range v.DistroAdvisories {
|
||||
if adv.Severity != "" {
|
||||
values = append(values, CveContentCvss{
|
||||
Type: "Advisory",
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: severityToV2ScoreRoughly(adv.Severity),
|
||||
CalculatedBySeverity: true,
|
||||
Vector: "-",
|
||||
Severity: strings.ToUpper(adv.Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// An OVAL entry in Ubuntu and Debian has only severity (CVSS score isn't included).
|
||||
// Show severity and dummy score calculated roughly.
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found &&
|
||||
cont.Cvss2Score == 0 &&
|
||||
cont.Cvss3Score == 0 &&
|
||||
cont.Cvss2Severity != "" {
|
||||
|
||||
values = append(values, CveContentCvss{
|
||||
Type: cont.Type,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: severityToV2ScoreRoughly(cont.Cvss2Severity),
|
||||
CalculatedBySeverity: true,
|
||||
Vector: "-",
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Cvss3Scores returns CVSS V3 Score
|
||||
func (v VulnInfo) Cvss3Scores() (values []CveContentCvss) {
|
||||
order := []CveContentType{Nvd, RedHatAPI, RedHat, Jvn}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found {
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: cont.Cvss3Score,
|
||||
Vector: cont.Cvss3Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if cont, found := v.CveContents[Trivy]; found && cont.Cvss3Severity != "" {
|
||||
values = append(values, CveContentCvss{
|
||||
Type: Trivy,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: severityToV2ScoreRoughly(cont.Cvss3Severity),
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// MaxCvss3Score returns Max CVSS V3 Score
|
||||
func (v VulnInfo) MaxCvss3Score() CveContentCvss {
|
||||
order := []CveContentType{Nvd, RedHat, RedHatAPI, Jvn}
|
||||
max := 0.0
|
||||
value := CveContentCvss{
|
||||
Type: Unknown,
|
||||
Value: Cvss{Type: CVSS3},
|
||||
}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && max < cont.Cvss3Score {
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
value = CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: cont.Cvss3Score,
|
||||
Vector: cont.Cvss3Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
}
|
||||
max = cont.Cvss3Score
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// MaxCvssScore returns max CVSS Score
|
||||
// If there is no CVSS Score, return Severity as a numerical value.
|
||||
func (v VulnInfo) MaxCvssScore() CveContentCvss {
|
||||
v3Max := v.MaxCvss3Score()
|
||||
v2Max := v.MaxCvss2Score()
|
||||
max := v3Max
|
||||
if max.Type == Unknown {
|
||||
return v2Max
|
||||
}
|
||||
|
||||
if max.Value.Score < v2Max.Value.Score && !v2Max.Value.CalculatedBySeverity {
|
||||
max = v2Max
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
// MaxCvss2Score returns Max CVSS V2 Score
|
||||
func (v VulnInfo) MaxCvss2Score() CveContentCvss {
|
||||
order := []CveContentType{Nvd, RedHat, RedHatAPI, Jvn}
|
||||
max := 0.0
|
||||
value := CveContentCvss{
|
||||
Type: Unknown,
|
||||
Value: Cvss{Type: CVSS2},
|
||||
}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && max < cont.Cvss2Score {
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
value = CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: cont.Cvss2Score,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
}
|
||||
max = cont.Cvss2Score
|
||||
}
|
||||
}
|
||||
if 0 < max {
|
||||
return value
|
||||
}
|
||||
|
||||
// If CVSS score isn't on NVD, RedHat and JVN, use OVAL and advisory Severity.
|
||||
// Convert severity to cvss score roughly, then returns max severity.
|
||||
// Only Ubuntu, RedHat and Oracle have severity data in OVAL.
|
||||
order = []CveContentType{Ubuntu, RedHat, Oracle}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && 0 < len(cont.Cvss2Severity) {
|
||||
score := severityToV2ScoreRoughly(cont.Cvss2Severity)
|
||||
if max < score {
|
||||
value = CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: score,
|
||||
CalculatedBySeverity: true,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
}
|
||||
}
|
||||
max = score
|
||||
}
|
||||
}
|
||||
|
||||
// Only RedHat, Oracle and Amazon has severity data in advisory.
|
||||
for _, adv := range v.DistroAdvisories {
|
||||
if adv.Severity != "" {
|
||||
score := severityToV2ScoreRoughly(adv.Severity)
|
||||
if max < score {
|
||||
value = CveContentCvss{
|
||||
Type: "Vendor",
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: score,
|
||||
CalculatedBySeverity: true,
|
||||
Vector: "-",
|
||||
Severity: adv.Severity,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// AttackVector returns attack vector string
|
||||
func (v VulnInfo) AttackVector() string {
|
||||
for _, cnt := range v.CveContents {
|
||||
if strings.HasPrefix(cnt.Cvss2Vector, "AV:N") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:N") {
|
||||
return "AV:N"
|
||||
} else if strings.HasPrefix(cnt.Cvss2Vector, "AV:A") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:A") {
|
||||
return "AV:A"
|
||||
} else if strings.HasPrefix(cnt.Cvss2Vector, "AV:L") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:L") {
|
||||
return "AV:L"
|
||||
} else if strings.Contains(cnt.Cvss3Vector, "AV:P") {
|
||||
// no AV:P in CVSS v2
|
||||
return "AV:P"
|
||||
}
|
||||
}
|
||||
if cont, found := v.CveContents[DebianSecurityTracker]; found {
|
||||
if attackRange, found := cont.Optional["attack range"]; found {
|
||||
return attackRange
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// PatchStatus returns fixed or unfixed string
|
||||
func (v VulnInfo) PatchStatus(packs Packages) string {
|
||||
// Vuls don't know patch status of the CPE
|
||||
if len(v.CpeURIs) != 0 {
|
||||
return ""
|
||||
}
|
||||
for _, p := range v.AffectedPackages {
|
||||
if p.NotFixedYet {
|
||||
return "unfixed"
|
||||
}
|
||||
|
||||
// Fast and offline mode can not get the candidate version.
|
||||
// Vuls can be considered as 'fixed' if not-fixed-yet==true and
|
||||
// the fixed-in-version (information in the oval) is not an empty.
|
||||
if p.FixedIn != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// fast, offline mode doesn't have new version
|
||||
if pack, ok := packs[p.Name]; ok {
|
||||
if pack.NewVersion == "" {
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
}
|
||||
return "fixed"
|
||||
}
|
||||
|
||||
// CveContentCvss has CVSS information
|
||||
type CveContentCvss struct {
|
||||
Type CveContentType `json:"type"`
|
||||
Value Cvss `json:"value"`
|
||||
}
|
||||
|
||||
// CvssType Represent the type of CVSS
|
||||
type CvssType string
|
||||
|
||||
const (
|
||||
// CVSS2 means CVSS version2
|
||||
CVSS2 CvssType = "2"
|
||||
|
||||
// CVSS3 means CVSS version3
|
||||
CVSS3 CvssType = "3"
|
||||
)
|
||||
|
||||
// Cvss has CVSS Score
|
||||
type Cvss struct {
|
||||
Type CvssType `json:"type"`
|
||||
Score float64 `json:"score"`
|
||||
CalculatedBySeverity bool `json:"calculatedBySeverity"`
|
||||
Vector string `json:"vector"`
|
||||
Severity string `json:"severity"`
|
||||
}
|
||||
|
||||
// Format CVSS Score and Vector
|
||||
func (c Cvss) Format() string {
|
||||
if c.Score == 0 || c.Vector == "" {
|
||||
return c.Severity
|
||||
}
|
||||
switch c.Type {
|
||||
case CVSS2:
|
||||
return fmt.Sprintf("%3.1f/%s %s", c.Score, c.Vector, c.Severity)
|
||||
case CVSS3:
|
||||
return fmt.Sprintf("%3.1f/%s %s", c.Score, c.Vector, c.Severity)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Amazon Linux Security Advisory
|
||||
// Critical, Important, Medium, Low
|
||||
// https://alas.aws.amazon.com/
|
||||
//
|
||||
// RedHat, Oracle OVAL
|
||||
// Critical, Important, Moderate, Low
|
||||
// https://access.redhat.com/security/updates/classification
|
||||
//
|
||||
// Ubuntu OVAL
|
||||
// Critical, High, Medium, Low
|
||||
// https://wiki.ubuntu.com/Bugs/Importance
|
||||
// https://people.canonical.com/~ubuntu-security/cve/priority.html
|
||||
func severityToV2ScoreRoughly(severity string) float64 {
|
||||
switch strings.ToUpper(severity) {
|
||||
case "CRITICAL":
|
||||
return 10.0
|
||||
case "IMPORTANT", "HIGH":
|
||||
return 8.9
|
||||
case "MODERATE", "MEDIUM":
|
||||
return 6.9
|
||||
case "LOW":
|
||||
return 3.9
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// FormatMaxCvssScore returns Max CVSS Score
|
||||
func (v VulnInfo) FormatMaxCvssScore() string {
|
||||
max := v.MaxCvssScore()
|
||||
return fmt.Sprintf("%3.1f %s (%s)",
|
||||
max.Value.Score,
|
||||
strings.ToUpper(max.Value.Severity),
|
||||
max.Type)
|
||||
}
|
||||
|
||||
// DistroAdvisories is a list of DistroAdvisory
|
||||
type DistroAdvisories []DistroAdvisory
|
||||
|
||||
// AppendIfMissing appends if missing
|
||||
func (advs *DistroAdvisories) AppendIfMissing(adv *DistroAdvisory) bool {
|
||||
for _, a := range *advs {
|
||||
if a.AdvisoryID == adv.AdvisoryID {
|
||||
return false
|
||||
}
|
||||
}
|
||||
*advs = append(*advs, *adv)
|
||||
return true
|
||||
}
|
||||
|
||||
// DistroAdvisory has Amazon Linux, RHEL, FreeBSD Security Advisory information.
|
||||
type DistroAdvisory struct {
|
||||
AdvisoryID string `json:"advisoryID"`
|
||||
Severity string `json:"severity"`
|
||||
Issued time.Time `json:"issued"`
|
||||
Updated time.Time `json:"updated"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
// Format the distro advisory information
|
||||
func (p DistroAdvisory) Format() string {
|
||||
if p.AdvisoryID == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
var delim bytes.Buffer
|
||||
for i := 0; i < len(p.AdvisoryID); i++ {
|
||||
delim.WriteString("-")
|
||||
}
|
||||
buf := []string{p.AdvisoryID, delim.String(), p.Description}
|
||||
return strings.Join(buf, "\n")
|
||||
}
|
||||
|
||||
// Exploit :
|
||||
type Exploit struct {
|
||||
ExploitType exploitmodels.ExploitType `json:"exploitType"`
|
||||
ID string `json:"id"`
|
||||
URL string `json:"url"`
|
||||
Description string `json:"description"`
|
||||
DocumentURL *string `json:"documentURL,omitempty"`
|
||||
ShellCodeURL *string `json:"shellCodeURL,omitempty"`
|
||||
BinaryURL *string `json:"binaryURL,omitempty"`
|
||||
}
|
||||
|
||||
// Metasploit :
|
||||
type Metasploit struct {
|
||||
Name string `json:"name"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description,omitempty"`
|
||||
URLs []string `json:",omitempty"`
|
||||
}
|
||||
|
||||
// Mitigation has a link and content
|
||||
type Mitigation struct {
|
||||
CveContentType CveContentType `json:"cveContentType,omitempty"`
|
||||
Mitigation string `json:"mitigation,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
}
|
||||
|
||||
// AlertDict has target cve JPCERT and USCERT alert data
|
||||
type AlertDict struct {
|
||||
Ja []Alert `json:"ja"`
|
||||
En []Alert `json:"en"`
|
||||
}
|
||||
|
||||
// FormatSource returns which source has this alert
|
||||
func (a AlertDict) FormatSource() string {
|
||||
s := []string{}
|
||||
if len(a.En) != 0 {
|
||||
s = append(s, "USCERT")
|
||||
}
|
||||
if len(a.Ja) != 0 {
|
||||
s = append(s, "JPCERT")
|
||||
}
|
||||
return strings.Join(s, "/")
|
||||
}
|
||||
|
||||
// Confidences is a list of Confidence
|
||||
type Confidences []Confidence
|
||||
|
||||
// AppendIfMissing appends confidence to the list if missing
|
||||
func (cs *Confidences) AppendIfMissing(confidence Confidence) {
|
||||
for _, c := range *cs {
|
||||
if c.DetectionMethod == confidence.DetectionMethod {
|
||||
return
|
||||
}
|
||||
}
|
||||
*cs = append(*cs, confidence)
|
||||
}
|
||||
|
||||
// SortByConfident sorts Confidences
|
||||
func (cs Confidences) SortByConfident() Confidences {
|
||||
sort.Slice(cs, func(i, j int) bool {
|
||||
return cs[i].SortOrder < cs[j].SortOrder
|
||||
})
|
||||
return cs
|
||||
}
|
||||
|
||||
// Confidence is a ranking how confident the CVE-ID was detected correctly
|
||||
// Score: 0 - 100
|
||||
type Confidence struct {
|
||||
Score int `json:"score"`
|
||||
DetectionMethod DetectionMethod `json:"detectionMethod"`
|
||||
SortOrder int `json:"-"`
|
||||
}
|
||||
|
||||
func (c Confidence) String() string {
|
||||
return fmt.Sprintf("%d / %s", c.Score, c.DetectionMethod)
|
||||
}
|
||||
|
||||
// DetectionMethod indicates
|
||||
// - How to detect the CveID
|
||||
// - How to get the changelog difference between installed and candidate version
|
||||
type DetectionMethod string
|
||||
|
||||
const (
|
||||
// CpeNameMatchStr is a String representation of CpeNameMatch
|
||||
CpeNameMatchStr = "CpeNameMatch"
|
||||
|
||||
// YumUpdateSecurityMatchStr is a String representation of YumUpdateSecurityMatch
|
||||
YumUpdateSecurityMatchStr = "YumUpdateSecurityMatch"
|
||||
|
||||
// PkgAuditMatchStr is a String representation of PkgAuditMatch
|
||||
PkgAuditMatchStr = "PkgAuditMatch"
|
||||
|
||||
// OvalMatchStr is a String representation of OvalMatch
|
||||
OvalMatchStr = "OvalMatch"
|
||||
|
||||
// RedHatAPIStr is a String representation of RedHatAPIMatch
|
||||
RedHatAPIStr = "RedHatAPIMatch"
|
||||
|
||||
// DebianSecurityTrackerMatchStr is a String representation of DebianSecurityTrackerMatch
|
||||
DebianSecurityTrackerMatchStr = "DebianSecurityTrackerMatch"
|
||||
|
||||
// TrivyMatchStr is a String representation of Trivy
|
||||
TrivyMatchStr = "TrivyMatch"
|
||||
|
||||
// ChangelogExactMatchStr is a String representation of ChangelogExactMatch
|
||||
ChangelogExactMatchStr = "ChangelogExactMatch"
|
||||
|
||||
// ChangelogLenientMatchStr is a String representation of ChangelogLenientMatch
|
||||
ChangelogLenientMatchStr = "ChangelogLenientMatch"
|
||||
|
||||
// GitHubMatchStr is a String representation of GitHubMatch
|
||||
GitHubMatchStr = "GitHubMatch"
|
||||
|
||||
// WpScanMatchStr is a String representation of WordPress VulnDB scanning
|
||||
WpScanMatchStr = "WpScanMatch"
|
||||
|
||||
// FailedToGetChangelog is a String representation of FailedToGetChangelog
|
||||
FailedToGetChangelog = "FailedToGetChangelog"
|
||||
|
||||
// FailedToFindVersionInChangelog is a String representation of FailedToFindVersionInChangelog
|
||||
FailedToFindVersionInChangelog = "FailedToFindVersionInChangelog"
|
||||
)
|
||||
|
||||
var (
|
||||
// CpeNameMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
CpeNameMatch = Confidence{100, CpeNameMatchStr, 1}
|
||||
|
||||
// YumUpdateSecurityMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
YumUpdateSecurityMatch = Confidence{100, YumUpdateSecurityMatchStr, 2}
|
||||
|
||||
// PkgAuditMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
PkgAuditMatch = Confidence{100, PkgAuditMatchStr, 2}
|
||||
|
||||
// OvalMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
OvalMatch = Confidence{100, OvalMatchStr, 0}
|
||||
|
||||
// RedHatAPIMatch ranking how confident the CVE-ID was detected correctly
|
||||
RedHatAPIMatch = Confidence{100, RedHatAPIStr, 0}
|
||||
|
||||
// DebianSecurityTrackerMatch ranking how confident the CVE-ID was detected correctly
|
||||
DebianSecurityTrackerMatch = Confidence{100, DebianSecurityTrackerMatchStr, 0}
|
||||
|
||||
// TrivyMatch ranking how confident the CVE-ID was detected correctly
|
||||
TrivyMatch = Confidence{100, TrivyMatchStr, 0}
|
||||
|
||||
// ChangelogExactMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogExactMatch = Confidence{95, ChangelogExactMatchStr, 3}
|
||||
|
||||
// ChangelogLenientMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogLenientMatch = Confidence{50, ChangelogLenientMatchStr, 4}
|
||||
|
||||
// GitHubMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
GitHubMatch = Confidence{97, GitHubMatchStr, 2}
|
||||
|
||||
// WpScanMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
WpScanMatch = Confidence{100, WpScanMatchStr, 0}
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,71 +0,0 @@
|
||||
package models
|
||||
|
||||
// WordPressPackages has Core version, plugins and themes.
|
||||
type WordPressPackages []WpPackage
|
||||
|
||||
// CoreVersion returns the core version of the installed WordPress
|
||||
func (w WordPressPackages) CoreVersion() string {
|
||||
for _, p := range w {
|
||||
if p.Type == WPCore {
|
||||
return p.Version
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Plugins returns a slice of plugins of the installed WordPress
|
||||
func (w WordPressPackages) Plugins() (ps []WpPackage) {
|
||||
for _, p := range w {
|
||||
if p.Type == WPPlugin {
|
||||
ps = append(ps, p)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Themes returns a slice of themes of the installed WordPress
|
||||
func (w WordPressPackages) Themes() (ps []WpPackage) {
|
||||
for _, p := range w {
|
||||
if p.Type == WPTheme {
|
||||
ps = append(ps, p)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Find searches by specified name
|
||||
func (w WordPressPackages) Find(name string) (ps *WpPackage, found bool) {
|
||||
for _, p := range w {
|
||||
if p.Name == name {
|
||||
return &p, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
const (
|
||||
// WPCore is a type `core` in WPPackage struct
|
||||
WPCore = "core"
|
||||
// WPPlugin is a type `plugin` in WPPackage struct
|
||||
WPPlugin = "plugin"
|
||||
// WPTheme is a type `theme` in WPPackage struct
|
||||
WPTheme = "theme"
|
||||
|
||||
// Inactive is a inactive status in WPPackage struct
|
||||
Inactive = "inactive"
|
||||
)
|
||||
|
||||
// WpPackage has a details of plugin and theme
|
||||
type WpPackage struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Status string `json:"status,omitempty"` // active, inactive or must-use
|
||||
Update string `json:"update,omitempty"` // available or none
|
||||
Version string `json:"version,omitempty"`
|
||||
Type string `json:"type,omitempty"` // core, plugin, theme
|
||||
}
|
||||
|
||||
// WpPackageFixStatus is used in Vulninfo.WordPress
|
||||
type WpPackageFixStatus struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
FixedIn string `json:"fixedIn,omitempty"`
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
package msf
|
||||
75
msf/msf.go
75
msf/msf.go
@@ -1,75 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package msf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
cnf "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"github.com/takuzoo3868/go-msfdb/db"
|
||||
metasploitmodels "github.com/takuzoo3868/go-msfdb/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// FillWithMetasploit fills metasploit module information that has in module
|
||||
func FillWithMetasploit(driver db.DB, r *models.ScanResult) (nMetasploitCve int, err error) {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
ms := driver.GetModuleByCveID(cveID)
|
||||
if len(ms) == 0 {
|
||||
continue
|
||||
}
|
||||
modules := ConvertToModels(ms)
|
||||
vuln.Metasploits = modules
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nMetasploitCve++
|
||||
}
|
||||
|
||||
return nMetasploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModels converts gost model to vuls model
|
||||
func ConvertToModels(ms []*metasploitmodels.Metasploit) (modules []models.Metasploit) {
|
||||
for _, m := range ms {
|
||||
var links []string
|
||||
if 0 < len(m.References) {
|
||||
for _, u := range m.References {
|
||||
links = append(links, u.Link)
|
||||
}
|
||||
}
|
||||
module := models.Metasploit{
|
||||
Name: m.Name,
|
||||
Title: m.Title,
|
||||
Description: m.Description,
|
||||
URLs: links,
|
||||
}
|
||||
modules = append(modules, module)
|
||||
}
|
||||
return modules
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func CheckHTTPHealth() error {
|
||||
if !cnf.Conf.Metasploit.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.Conf.Metasploit.URL)
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
resp, _, errs = gorequest.New().Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to connect to metasploit server. url: %s, errs: %w", url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user