Compare commits
126 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43c05d06fc | ||
|
|
a3f7d1d7e7 | ||
|
|
bb4a1ca6c2 | ||
|
|
57cce640e1 | ||
|
|
1eb5d36668 | ||
|
|
6bc4850596 | ||
|
|
24005ae7ae | ||
|
|
7aa296bb57 | ||
|
|
3829ed2f8e | ||
|
|
2b7294a504 | ||
|
|
0c6a892893 | ||
|
|
89d94ad85a | ||
|
|
ffdb78962f | ||
|
|
321dae37ce | ||
|
|
a31797af0b | ||
|
|
32999cf432 | ||
|
|
88218f5d92 | ||
|
|
15761933ac | ||
|
|
0b62842f0e | ||
|
|
6bceddeeda | ||
|
|
2dcbff8cd5 | ||
|
|
8659668177 | ||
|
|
e07b6a9160 | ||
|
|
aac5ef1438 | ||
|
|
d780a73297 | ||
|
|
9ef8cee36e | ||
|
|
77808a2c05 | ||
|
|
177e553d12 | ||
|
|
40f8272a28 | ||
|
|
a7eb1141ae | ||
|
|
c73ed7f32f | ||
|
|
f047a6fe0c | ||
|
|
7f15a86d6a | ||
|
|
da1e515253 | ||
|
|
591786fde6 | ||
|
|
47e6ea249d | ||
|
|
4a72295de7 | ||
|
|
9ed5f2cac5 | ||
|
|
3e67f04fe4 | ||
|
|
b9416ae062 | ||
|
|
b4e49e093e | ||
|
|
020f6ac609 | ||
|
|
7e71cbdd46 | ||
|
|
1003f62212 | ||
|
|
9b18e1f9f0 | ||
|
|
24f790f474 | ||
|
|
fb8749fc5e | ||
|
|
96c3592db1 | ||
|
|
d65421cf46 | ||
|
|
c52ba448cd | ||
|
|
21adce463b | ||
|
|
f24240bf90 | ||
|
|
ff83cadd6e | ||
|
|
e8c09282d9 | ||
|
|
5f4d68cde4 | ||
|
|
9077a83ea8 | ||
|
|
543dc99ecd | ||
|
|
f0b3a8b1db | ||
|
|
0b9ec05181 | ||
|
|
0bf12412d6 | ||
|
|
0ea4d58c63 | ||
|
|
5755b00576 | ||
|
|
1c8e074c9d | ||
|
|
0e0e5ce4be | ||
|
|
23dfe53885 | ||
|
|
8e6351a9e4 | ||
|
|
3086e2760f | ||
|
|
b8db2e0b74 | ||
|
|
43b46cb324 | ||
|
|
d0559c7719 | ||
|
|
231c63cf62 | ||
|
|
2a9aebe059 | ||
|
|
4e535d792f | ||
|
|
4b487503d4 | ||
|
|
0095c40e69 | ||
|
|
82c1abfd3a | ||
|
|
40988401bd | ||
|
|
e8e3f4d138 | ||
|
|
7eb77f5b51 | ||
|
|
e115235299 | ||
|
|
151d4b2d30 | ||
|
|
e553f8b4c5 | ||
|
|
47652ef0fb | ||
|
|
ab0e950800 | ||
|
|
a7b0ce1c85 | ||
|
|
dc9c0edece | ||
|
|
17ae386d1e | ||
|
|
2d369d0cfe | ||
|
|
c36e645d9b | ||
|
|
40039c07e2 | ||
|
|
a692cec0ef | ||
|
|
e7ca491a94 | ||
|
|
23f3e2fc11 | ||
|
|
27b3e17b79 | ||
|
|
740781af56 | ||
|
|
36c9c229b8 | ||
|
|
183fdcbdef | ||
|
|
a2a697900a | ||
|
|
6fef4db8a0 | ||
|
|
e879ff1e9e | ||
|
|
9bfe0627ae | ||
|
|
0179f4299a | ||
|
|
56017e57a0 | ||
|
|
cda91e0906 | ||
|
|
5d47adb5c9 | ||
|
|
54e73c2f54 | ||
|
|
2d075079f1 | ||
|
|
2a8ee4b22b | ||
|
|
1ec31d7be9 | ||
|
|
02286b0c59 | ||
|
|
1d0c5dea9f | ||
|
|
1c4a12c4b7 | ||
|
|
3f2ac45d71 | ||
|
|
518f4dc039 | ||
|
|
2cdeef4ffe | ||
|
|
03579126fd | ||
|
|
e3c27e1817 | ||
|
|
aeaf308679 | ||
|
|
f5e47bea40 | ||
|
|
50cf13a7f2 | ||
|
|
abd8041772 | ||
|
|
847c6438e7 | ||
|
|
ef8309df27 | ||
|
|
0dff6cf983 | ||
|
|
4c04acbd9e | ||
|
|
1c4f231572 |
12
.github/dependabot.yml
vendored
Normal file
12
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "master"
|
||||
45
.github/workflows/docker-publish.yml
vendored
Normal file
45
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Publish Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: vuls/vuls
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
vuls/vuls:latest
|
||||
${{ steps.meta.outputs.tags }}
|
||||
secrets: |
|
||||
"github_token=${{ secrets.GITHUB_TOKEN }}"
|
||||
2
.github/workflows/golangci.yml
vendored
2
.github/workflows/golangci.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
|
||||
version: v1.32
|
||||
version: v1.42
|
||||
args: --timeout=10m
|
||||
|
||||
# Optional: working directory, useful for monorepos
|
||||
|
||||
2
.github/workflows/goreleaser.yml
vendored
2
.github/workflows/goreleaser.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
name: Set up Go
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: 1.15
|
||||
go-version: 1.16
|
||||
-
|
||||
name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: 1.15.x
|
||||
go-version: 1.16.x
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
|
||||
22
.github/workflows/tidy.yml
vendored
22
.github/workflows/tidy.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: go-mod-tidy-pr
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Weekly build
|
||||
|
||||
jobs:
|
||||
go-mod-tidy-pr:
|
||||
name: go-mod-tidy-pr
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Run go-mod-tidy-pr
|
||||
uses: sue445/go-mod-tidy-pr@master
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
git_user_name: kotakanbe
|
||||
git_user_email: kotakanbe@gmail.com
|
||||
go_version: 1.15.6
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -1,6 +1,6 @@
|
||||
.vscode
|
||||
*.txt
|
||||
*.json
|
||||
*.swp
|
||||
*.sqlite3*
|
||||
*.db
|
||||
tags
|
||||
@@ -9,9 +9,12 @@ coverage.out
|
||||
issues/
|
||||
vendor/
|
||||
log/
|
||||
results/
|
||||
*config.toml
|
||||
results
|
||||
config.toml
|
||||
!setup/docker/*
|
||||
.DS_Store
|
||||
dist/
|
||||
.idea
|
||||
vuls.*
|
||||
vuls
|
||||
!cmd/vuls
|
||||
|
||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "integration"]
|
||||
path = integration
|
||||
url = https://github.com/vulsio/integration
|
||||
@@ -1,14 +1,44 @@
|
||||
name: golang-ci
|
||||
|
||||
linters-settings:
|
||||
errcheck:
|
||||
revive:
|
||||
# see https://github.com/mgechev/revive#available-rules for details.
|
||||
ignore-generated-header: true
|
||||
severity: warning
|
||||
confidence: 0.8
|
||||
rules:
|
||||
- name: blank-imports
|
||||
- name: context-as-argument
|
||||
- name: context-keys-type
|
||||
- name: dot-imports
|
||||
- name: error-return
|
||||
- name: error-strings
|
||||
- name: error-naming
|
||||
- name: exported
|
||||
- name: if-return
|
||||
- name: increment-decrement
|
||||
- name: var-naming
|
||||
- name: var-declaration
|
||||
- name: package-comments
|
||||
- name: range
|
||||
- name: receiver-naming
|
||||
- name: time-naming
|
||||
- name: unexported-return
|
||||
- name: indent-error-flow
|
||||
- name: errorf
|
||||
- name: empty-block
|
||||
- name: superfluous-else
|
||||
- name: unused-parameter
|
||||
- name: unreachable-code
|
||||
- name: redefines-builtin-id
|
||||
# errcheck:
|
||||
#exclude: /path/to/file.txt
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- goimports
|
||||
- golint
|
||||
- revive
|
||||
- govet
|
||||
- misspell
|
||||
- errcheck
|
||||
|
||||
@@ -31,7 +31,8 @@ builds:
|
||||
main: ./cmd/scanner/main.go
|
||||
flags:
|
||||
- -a
|
||||
- -tags=scanner
|
||||
tags:
|
||||
- scanner
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls-scanner
|
||||
@@ -46,6 +47,8 @@ builds:
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
tags:
|
||||
- scanner
|
||||
main: ./contrib/trivy/cmd/main.go
|
||||
binary: trivy-to-vuls
|
||||
|
||||
@@ -61,7 +64,8 @@ builds:
|
||||
- arm64
|
||||
flags:
|
||||
- -a
|
||||
- -tags=scanner
|
||||
tags:
|
||||
- scanner
|
||||
main: ./contrib/future-vuls/cmd/main.go
|
||||
binary: future-vuls
|
||||
|
||||
@@ -74,7 +78,6 @@ archives:
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
@@ -85,7 +88,6 @@ archives:
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
@@ -96,7 +98,6 @@ archives:
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
|
||||
@@ -107,7 +108,6 @@ archives:
|
||||
format: tar.gz
|
||||
files:
|
||||
- LICENSE
|
||||
- NOTICE
|
||||
- README*
|
||||
- CHANGELOG.md
|
||||
snapshot:
|
||||
|
||||
30
.revive.toml
Normal file
30
.revive.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
ignoreGeneratedHeader = false
|
||||
severity = "warning"
|
||||
confidence = 0.8
|
||||
errorCode = 0
|
||||
warningCode = 0
|
||||
|
||||
[rule.blank-imports]
|
||||
[rule.context-as-argument]
|
||||
[rule.context-keys-type]
|
||||
[rule.dot-imports]
|
||||
[rule.error-return]
|
||||
[rule.error-strings]
|
||||
[rule.error-naming]
|
||||
[rule.exported]
|
||||
[rule.if-return]
|
||||
[rule.increment-decrement]
|
||||
[rule.var-naming]
|
||||
[rule.var-declaration]
|
||||
[rule.package-comments]
|
||||
[rule.range]
|
||||
[rule.receiver-naming]
|
||||
[rule.time-naming]
|
||||
[rule.unexported-return]
|
||||
[rule.indent-error-flow]
|
||||
[rule.errorf]
|
||||
[rule.empty-block]
|
||||
[rule.superfluous-else]
|
||||
[rule.unused-parameter]
|
||||
[rule.unreachable-code]
|
||||
[rule.redefines-builtin-id]
|
||||
@@ -10,10 +10,7 @@ ENV REPOSITORY github.com/future-architect/vuls
|
||||
COPY . $GOPATH/src/$REPOSITORY
|
||||
RUN cd $GOPATH/src/$REPOSITORY && make install
|
||||
|
||||
|
||||
FROM alpine:3.11
|
||||
|
||||
MAINTAINER hikachan sadayuki-matsuno
|
||||
FROM alpine:3.14
|
||||
|
||||
ENV LOGDIR /var/log/vuls
|
||||
ENV WORKDIR /vuls
|
||||
@@ -22,6 +19,7 @@ RUN apk add --no-cache \
|
||||
openssh-client \
|
||||
ca-certificates \
|
||||
git \
|
||||
nmap \
|
||||
&& mkdir -p $WORKDIR $LOGDIR
|
||||
|
||||
COPY --from=builder /go/bin/vuls /usr/local/bin/
|
||||
|
||||
190
GNUmakefile
190
GNUmakefile
@@ -17,14 +17,13 @@ PKGS = $(shell go list ./...)
|
||||
VERSION := $(shell git describe --tags --abbrev=0)
|
||||
REVISION := $(shell git rev-parse --short HEAD)
|
||||
BUILDTIME := $(shell date "+%Y%m%d_%H%M%S")
|
||||
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' \
|
||||
-X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
|
||||
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' -X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
|
||||
GO := GO111MODULE=on go
|
||||
CGO_UNABLED := CGO_ENABLED=0 go
|
||||
GO_OFF := GO111MODULE=off go
|
||||
|
||||
|
||||
all: build
|
||||
all: b
|
||||
|
||||
build: ./cmd/vuls/main.go pretest fmt
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
@@ -32,22 +31,25 @@ build: ./cmd/vuls/main.go pretest fmt
|
||||
b: ./cmd/vuls/main.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
|
||||
install: ./cmd/vuls/main.go pretest fmt
|
||||
install: ./cmd/vuls/main.go
|
||||
$(GO) install -ldflags "$(LDFLAGS)" ./cmd/vuls
|
||||
|
||||
build-scanner: ./cmd/scanner/main.go pretest fmt
|
||||
build-scanner: ./cmd/scanner/main.go
|
||||
$(CGO_UNABLED) build -tags=scanner -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/scanner
|
||||
|
||||
install-scanner: ./cmd/scanner/main.go pretest fmt
|
||||
install-scanner: ./cmd/scanner/main.go
|
||||
$(CGO_UNABLED) install -tags=scanner -ldflags "$(LDFLAGS)" ./cmd/scanner
|
||||
|
||||
lint:
|
||||
$(GO_OFF) get -u golang.org/x/lint/golint
|
||||
golint $(PKGS)
|
||||
$(GO_OFF) get -u github.com/mgechev/revive
|
||||
revive -config ./.revive.toml -formatter plain $(PKGS)
|
||||
|
||||
vet:
|
||||
echo $(PKGS) | xargs env $(GO) vet || exit;
|
||||
|
||||
golangci:
|
||||
golangci-lint run
|
||||
|
||||
fmt:
|
||||
gofmt -s -w $(SRCS)
|
||||
|
||||
@@ -57,7 +59,7 @@ mlint:
|
||||
fmtcheck:
|
||||
$(foreach file,$(SRCS),gofmt -s -d $(file);)
|
||||
|
||||
pretest: lint vet fmtcheck
|
||||
pretest: lint vet fmtcheck golangci
|
||||
|
||||
test:
|
||||
$(GO) test -cover -v ./... || exit;
|
||||
@@ -68,15 +70,179 @@ unused:
|
||||
cov:
|
||||
@ go get -v github.com/axw/gocov/gocov
|
||||
@ go get golang.org/x/tools/cmd/cover
|
||||
gocov test | gocov report
|
||||
gocov test -v ./... | gocov report
|
||||
|
||||
clean:
|
||||
echo $(PKGS) | xargs go clean || exit;
|
||||
|
||||
# trivy-to-vuls
|
||||
build-trivy-to-vuls: pretest fmt
|
||||
$(GO) build -o trivy-to-vuls contrib/trivy/cmd/*.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o trivy-to-vuls contrib/trivy/cmd/*.go
|
||||
|
||||
# future-vuls
|
||||
build-future-vuls: pretest fmt
|
||||
$(GO) build -o future-vuls contrib/future-vuls/cmd/*.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o future-vuls contrib/future-vuls/cmd/*.go
|
||||
|
||||
|
||||
# integration-test
|
||||
BASE_DIR := '${PWD}/integration/results'
|
||||
# $(shell mkdir -p ${BASE_DIR})
|
||||
NOW=$(shell date --iso-8601=seconds)
|
||||
NOW_JSON_DIR := '${BASE_DIR}/$(NOW)'
|
||||
ONE_SEC_AFTER=$(shell date -d '+1 second' --iso-8601=seconds)
|
||||
ONE_SEC_AFTER_JSON_DIR := '${BASE_DIR}/$(ONE_SEC_AFTER)'
|
||||
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'cargo' 'gomod' 'gobinary' 'jar' 'pom' 'nuget-lock' 'nuget-config' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
|
||||
|
||||
diff:
|
||||
# git clone git@github.com:vulsio/vulsctl.git
|
||||
# cd vulsctl/docker
|
||||
# ./update-all.sh
|
||||
# cd /path/to/vuls
|
||||
# vim integration/int-config.toml
|
||||
# ln -s vuls vuls.new
|
||||
# ln -s oldvuls vuls.old
|
||||
# make int
|
||||
# (ex. test 10 times: for i in `seq 10`; do make int ARGS=-quiet ; done)
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
diff-redis:
|
||||
# docker network create redis-nw
|
||||
# docker run --name redis -d --network redis-nw -p 127.0.0.1:6379:6379 redis
|
||||
# git clone git@github.com:vulsio/vulsctl.git
|
||||
# cd vulsctl/docker
|
||||
# ./update-all-redis.sh
|
||||
# (or export DOCKER_NETWORK=redis-nw; cd /home/ubuntu/vulsctl/docker; ./update-all.sh --dbtype redis --dbpath "redis://redis/0")
|
||||
# vim integration/int-redis-config.toml
|
||||
# ln -s vuls vuls.new
|
||||
# ln -s oldvuls vuls.old
|
||||
# make int-redis
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
diff-rdb-redis:
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
sleep 1
|
||||
# new vs new
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
head= $(shell git rev-parse HEAD)
|
||||
prev= $(shell git rev-parse HEAD^)
|
||||
branch=$(shell git rev-parse --abbrev-ref HEAD)
|
||||
build-integration:
|
||||
git stash
|
||||
|
||||
# buld HEAD
|
||||
git checkout ${head}
|
||||
make build
|
||||
mv -f ./vuls ./vuls.${head}
|
||||
|
||||
# HEAD^
|
||||
git checkout ${prev}
|
||||
make build
|
||||
mv -f ./vuls ./vuls.${prev}
|
||||
|
||||
# master
|
||||
git checkout master
|
||||
make build
|
||||
mv -f ./vuls ./vuls.master
|
||||
|
||||
# working tree
|
||||
git checkout ${branch}
|
||||
git stash apply stash@\{0\}
|
||||
make build
|
||||
|
||||
# update integration data
|
||||
git submodule update --remote
|
||||
|
||||
# for integration testing, vuls.new and vuls.old needed.
|
||||
# ex)
|
||||
# $ ln -s ./vuls ./vuls.new
|
||||
# $ ln -s ./vuls.${head} ./vuls.old
|
||||
# or
|
||||
# $ ln -s ./vuls.${prev} ./vuls.old
|
||||
# then
|
||||
# $ make diff
|
||||
# $ make diff-redis
|
||||
# $ make diff-rdb-redis
|
||||
|
||||
|
||||
define sed-d
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
|
||||
endef
|
||||
|
||||
define count-cve
|
||||
for jsonfile in ${NOW_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
for jsonfile in ${ONE_SEC_AFTER_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
endef
|
||||
|
||||
153
LICENSE
153
LICENSE
@@ -1,21 +1,23 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
@@ -24,34 +26,44 @@ them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
@@ -60,7 +72,7 @@ modification follow.
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
@@ -537,45 +549,35 @@ to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
@@ -629,33 +631,44 @@ to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
Vuls - Vulnerability Scanner
|
||||
Copyright (C) 2016 Future Corporation , Japan.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
Vuls Copyright (C) 2016 Future Corporation , Japan.
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
|
||||
23
README.md
23
README.md
@@ -50,7 +50,7 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
|
||||
[Supports major Linux/FreeBSD](https://vuls.io/docs/en/supported-os.html)
|
||||
|
||||
- Alpine, Amazon Linux, CentOS, Debian, Oracle Linux, Raspbian, RHEL, SUSE Enterprise Linux, and Ubuntu
|
||||
- Alpine, Amazon Linux, CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Raspbian, RHEL, SUSE Enterprise Linux, and Ubuntu
|
||||
- FreeBSD
|
||||
- Cloud, on-premise, Running Docker Container
|
||||
|
||||
@@ -71,6 +71,7 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
- [Alpine-secdb](https://git.alpinelinux.org/cgit/alpine-secdb/)
|
||||
- [Red Hat Security Advisories](https://access.redhat.com/security/security-updates/)
|
||||
- [Debian Security Bug Tracker](https://security-tracker.debian.org/tracker/)
|
||||
- [Ubuntu CVE Tracker](https://people.canonical.com/~ubuntu-security/cve/)
|
||||
|
||||
- Commands(yum, zypper, pkg-audit)
|
||||
- RHSA / ALAS / ELSA / FreeBSD-SA
|
||||
@@ -79,11 +80,16 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
- PoC, Exploit
|
||||
- [Exploit Database](https://www.exploit-db.com/)
|
||||
- [Metasploit-Framework modules](https://www.rapid7.com/db/?q=&type=metasploit)
|
||||
- [qazbnm456/awesome-cve-poc](https://github.com/qazbnm456/awesome-cve-poc)
|
||||
- [nomi-sec/PoC-in-GitHub](https://github.com/nomi-sec/PoC-in-GitHub)
|
||||
|
||||
- CERT
|
||||
- [US-CERT](https://www.us-cert.gov/ncas/alerts)
|
||||
- [JPCERT](http://www.jpcert.or.jp/at/2019.html)
|
||||
|
||||
- CISA(Cybersecurity & Infrastructure Security Agency)
|
||||
- [Known Exploited Vulnerabilities Catalog](https://www.cisa.gov/known-exploited-vulnerabilities-catalog)
|
||||
|
||||
- Libraries
|
||||
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
|
||||
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
|
||||
@@ -100,15 +106,15 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
|
||||
- Scan without root privilege, no dependencies
|
||||
- Almost no load on the scan target server
|
||||
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
|
||||
[Fast Root Scan](https://vuls.io/docs/en/architecture-fast-root-scan.html)
|
||||
|
||||
- Scan with root privilege
|
||||
- Almost no load on the scan target server
|
||||
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Oracle Linux, and RedHat)
|
||||
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Alma Linux, Rocky Linux, Oracle Linux, and RedHat)
|
||||
- Detect processes which updated before but not restarting yet using checkrestart of debian-goodies (Debian and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
|
||||
### [Remote, Local scan mode, Server mode](https://vuls.io/docs/en/architecture-remote-local.html)
|
||||
|
||||
@@ -183,11 +189,14 @@ see [vulsdoc](https://vuls.io/docs/en/how-to-contribute.html)
|
||||
|
||||
----
|
||||
|
||||
## Stargazers over time
|
||||
## Sponsors
|
||||
|
||||
[](https://starcharts.herokuapp.com/future-architect/vuls)
|
||||
| | |
|
||||
| ------------- | ------------- |
|
||||
| <a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=vuls"><img src="img/sponsor/tines.png" align="left" width="600px" ></a> | Tines is no-code automation for security teams. Build powerful, reliable workflows without a development team. |
|
||||
| <a href="https://www.sakura.ad.jp/"><img src="https://vuls.io/img/icons/sakura.svg" align="left" width="600px" ></a> | SAKURA internet Inc. is an Internet company founded in 1996. We provide cloud computing services such as "Sakura's Shared Server", "Sakura's VPS", and "Sakura's Cloud" to meet the needs of a wide range of customers, from individuals and corporations to the education and public sectors, using its own data centers in Japan. Based on the philosophy of "changing what you want to do into what you can do," we offer DX solutions for all fields. |
|
||||
|
||||
-----;
|
||||
----
|
||||
|
||||
## License
|
||||
|
||||
|
||||
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Only the latest version is supported.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Email kotakanbe@gmail.com
|
||||
12
cache/bolt.go
vendored
12
cache/bolt.go
vendored
@@ -5,8 +5,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/boltdb/bolt"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/sirupsen/logrus"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
@@ -14,12 +14,12 @@ import (
|
||||
// boltdb is used to store a cache of Changelogs of Ubuntu/Debian
|
||||
type Bolt struct {
|
||||
Path string
|
||||
Log *logrus.Entry
|
||||
Log logging.Logger
|
||||
db *bolt.DB
|
||||
}
|
||||
|
||||
// SetupBolt opens a boltdb and creates a meta bucket if not exists.
|
||||
func SetupBolt(path string, l *logrus.Entry) error {
|
||||
func SetupBolt(path string, l logging.Logger) error {
|
||||
l.Infof("Open boltDB: %s", path)
|
||||
db, err := bolt.Open(path, 0600, nil)
|
||||
if err != nil {
|
||||
@@ -47,7 +47,7 @@ func (b Bolt) Close() error {
|
||||
return b.db.Close()
|
||||
}
|
||||
|
||||
// CreateBucketIfNotExists creates a buket that is specified by arg.
|
||||
// CreateBucketIfNotExists creates a bucket that is specified by arg.
|
||||
func (b *Bolt) createBucketIfNotExists(name string) error {
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
_, err := tx.CreateBucketIfNotExists([]byte(name))
|
||||
@@ -93,7 +93,7 @@ func (b Bolt) RefreshMeta(meta Meta) error {
|
||||
})
|
||||
}
|
||||
|
||||
// EnsureBuckets puts a Meta information and create a buket that holds changelogs.
|
||||
// EnsureBuckets puts a Meta information and create a bucket that holds changelogs.
|
||||
func (b Bolt) EnsureBuckets(meta Meta) error {
|
||||
jsonBytes, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
@@ -159,7 +159,7 @@ func (b Bolt) GetChangelog(servername, packName string) (changelog string, err e
|
||||
return
|
||||
}
|
||||
|
||||
// PutChangelog put the changelgo of specified packName into the Bucket
|
||||
// PutChangelog put the changelog of specified packName into the Bucket
|
||||
func (b Bolt) PutChangelog(servername, packName, changelog string) error {
|
||||
return b.db.Update(func(tx *bolt.Tx) error {
|
||||
bkt := tx.Bucket([]byte(servername))
|
||||
|
||||
8
cache/bolt_test.go
vendored
8
cache/bolt_test.go
vendored
@@ -7,8 +7,8 @@ import (
|
||||
|
||||
"github.com/boltdb/bolt"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const path = "/tmp/vuls-test-cache-11111111.db"
|
||||
@@ -29,7 +29,7 @@ var meta = Meta{
|
||||
}
|
||||
|
||||
func TestSetupBolt(t *testing.T) {
|
||||
log := logrus.NewEntry(&logrus.Logger{})
|
||||
log := logging.NewNormalLogger()
|
||||
err := SetupBolt(path, log)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
@@ -57,7 +57,7 @@ func TestSetupBolt(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestEnsureBuckets(t *testing.T) {
|
||||
log := logrus.NewEntry(&logrus.Logger{})
|
||||
log := logging.NewNormalLogger()
|
||||
if err := SetupBolt(path, log); err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
@@ -98,7 +98,7 @@ func TestEnsureBuckets(t *testing.T) {
|
||||
|
||||
func TestPutGetChangelog(t *testing.T) {
|
||||
clog := "changelog-text"
|
||||
log := logrus.NewEntry(&logrus.Logger{})
|
||||
log := logging.NewNormalLogger()
|
||||
if err := SetupBolt(path, log); err != nil {
|
||||
t.Errorf("Failed to setup bolt: %s", err)
|
||||
}
|
||||
|
||||
30
config/awsconf.go
Normal file
30
config/awsconf.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package config
|
||||
|
||||
// AWSConf is aws config
|
||||
type AWSConf struct {
|
||||
// AWS profile to use
|
||||
Profile string `json:"profile"`
|
||||
|
||||
// AWS region to use
|
||||
Region string `json:"region"`
|
||||
|
||||
// S3 bucket name
|
||||
S3Bucket string `json:"s3Bucket"`
|
||||
|
||||
// /bucket/path/to/results
|
||||
S3ResultsDir string `json:"s3ResultsDir"`
|
||||
|
||||
// The Server-side encryption algorithm used when storing the reports in S3 (e.g., AES256, aws:kms).
|
||||
S3ServerSideEncryption string `json:"s3ServerSideEncryption"`
|
||||
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate configuration
|
||||
func (c *AWSConf) Validate() (errs []error) {
|
||||
// TODO
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
46
config/azureconf.go
Normal file
46
config/azureconf.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// AzureConf is azure config
|
||||
type AzureConf struct {
|
||||
// Azure account name to use. AZURE_STORAGE_ACCOUNT environment variable is used if not specified
|
||||
AccountName string `json:"accountName"`
|
||||
|
||||
// Azure account key to use. AZURE_STORAGE_ACCESS_KEY environment variable is used if not specified
|
||||
AccountKey string `json:"-"`
|
||||
|
||||
// Azure storage container name
|
||||
ContainerName string `json:"containerName"`
|
||||
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
const (
|
||||
azureAccount = "AZURE_STORAGE_ACCOUNT"
|
||||
azureKey = "AZURE_STORAGE_ACCESS_KEY"
|
||||
)
|
||||
|
||||
// Validate configuration
|
||||
func (c *AzureConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
|
||||
// overwrite if env var is not empty
|
||||
if os.Getenv(azureAccount) != "" {
|
||||
c.AccountName = os.Getenv(azureAccount)
|
||||
}
|
||||
if os.Getenv(azureKey) != "" {
|
||||
c.AccountKey = os.Getenv(azureKey)
|
||||
}
|
||||
|
||||
if c.ContainerName == "" {
|
||||
errs = append(errs, xerrors.Errorf("Azure storage container name is required"))
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -9,11 +9,12 @@ import (
|
||||
type ChatWorkConf struct {
|
||||
APIToken string `json:"-"`
|
||||
Room string `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *ChatWorkConf) Validate() (errs []error) {
|
||||
if !Conf.ToChatWork {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.Room) == 0 {
|
||||
|
||||
338
config/config.go
338
config/config.go
@@ -3,12 +3,12 @@ package config
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
@@ -23,97 +23,81 @@ var Conf Config
|
||||
|
||||
//Config is struct of Configuration
|
||||
type Config struct {
|
||||
Debug bool `json:"debug,omitempty"`
|
||||
DebugSQL bool `json:"debugSQL,omitempty"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
logging.LogOpts
|
||||
|
||||
// scan, report
|
||||
HTTPProxy string `valid:"url" json:"httpProxy,omitempty"`
|
||||
LogDir string `json:"logDir,omitempty"`
|
||||
ResultsDir string `json:"resultsDir,omitempty"`
|
||||
Pipe bool `json:"pipe,omitempty"`
|
||||
Quiet bool `json:"quiet,omitempty"`
|
||||
NoProgress bool `json:"noProgress,omitempty"`
|
||||
SSHNative bool `json:"sshNative,omitempty"`
|
||||
Vvv bool `json:"vvv,omitempty"`
|
||||
|
||||
Default ServerInfo `json:"default,omitempty"`
|
||||
Servers map[string]ServerInfo `json:"servers,omitempty"`
|
||||
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
|
||||
Default ServerInfo `json:"default,omitempty"`
|
||||
Servers map[string]ServerInfo `json:"servers,omitempty"`
|
||||
|
||||
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
|
||||
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
|
||||
IgnoreGitHubDismissed bool `json:"ignore_git_hub_dismissed,omitempty"`
|
||||
|
||||
CacheDBPath string `json:"cacheDBPath,omitempty"`
|
||||
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
|
||||
ScanOpts
|
||||
|
||||
// report
|
||||
CveDict GoCveDictConf `json:"cveDict,omitempty"`
|
||||
OvalDict GovalDictConf `json:"ovalDict,omitempty"`
|
||||
Gost GostConf `json:"gost,omitempty"`
|
||||
Exploit ExploitConf `json:"exploit,omitempty"`
|
||||
Metasploit MetasploitConf `json:"metasploit,omitempty"`
|
||||
KEVuln KEVulnConf `json:"kevuln,omitempty"`
|
||||
|
||||
Slack SlackConf `json:"-"`
|
||||
EMail SMTPConf `json:"-"`
|
||||
HTTP HTTPConf `json:"-"`
|
||||
Syslog SyslogConf `json:"-"`
|
||||
AWS AWSConf `json:"-"`
|
||||
Azure AzureConf `json:"-"`
|
||||
ChatWork ChatWorkConf `json:"-"`
|
||||
Telegram TelegramConf `json:"-"`
|
||||
Slack SlackConf `json:"-"`
|
||||
EMail SMTPConf `json:"-"`
|
||||
HTTP HTTPConf `json:"-"`
|
||||
Syslog SyslogConf `json:"-"`
|
||||
AWS AWSConf `json:"-"`
|
||||
Azure AzureConf `json:"-"`
|
||||
ChatWork ChatWorkConf `json:"-"`
|
||||
GoogleChat GoogleChatConf `json:"-"`
|
||||
Telegram TelegramConf `json:"-"`
|
||||
WpScan WpScanConf `json:"-"`
|
||||
Saas SaasConf `json:"-"`
|
||||
|
||||
WpScan WpScanConf `json:"WpScan,omitempty"`
|
||||
ReportOpts
|
||||
}
|
||||
|
||||
Saas SaasConf `json:"-"`
|
||||
DetectIPS bool `json:"detectIps,omitempty"`
|
||||
// ReportConf is an interface to Validate Report Config
|
||||
type ReportConf interface {
|
||||
Validate() []error
|
||||
}
|
||||
|
||||
RefreshCve bool `json:"refreshCve,omitempty"`
|
||||
ToSlack bool `json:"toSlack,omitempty"`
|
||||
ToChatWork bool `json:"toChatWork,omitempty"`
|
||||
ToTelegram bool `json:"ToTelegram,omitempty"`
|
||||
ToEmail bool `json:"toEmail,omitempty"`
|
||||
ToSyslog bool `json:"toSyslog,omitempty"`
|
||||
ToLocalFile bool `json:"toLocalFile,omitempty"`
|
||||
ToS3 bool `json:"toS3,omitempty"`
|
||||
ToAzureBlob bool `json:"toAzureBlob,omitempty"`
|
||||
ToHTTP bool `json:"toHTTP,omitempty"`
|
||||
FormatJSON bool `json:"formatJSON,omitempty"`
|
||||
FormatOneEMail bool `json:"formatOneEMail,omitempty"`
|
||||
FormatOneLineText bool `json:"formatOneLineText,omitempty"`
|
||||
FormatList bool `json:"formatList,omitempty"`
|
||||
FormatFullText bool `json:"formatFullText,omitempty"`
|
||||
FormatCsvList bool `json:"formatCsvList,omitempty"`
|
||||
GZIP bool `json:"gzip,omitempty"`
|
||||
Diff bool `json:"diff,omitempty"`
|
||||
// ScanOpts is options for scan
|
||||
type ScanOpts struct {
|
||||
Vvv bool `json:"vvv,omitempty"`
|
||||
}
|
||||
|
||||
// ReportOpts is options for report
|
||||
type ReportOpts struct {
|
||||
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
|
||||
ConfidenceScoreOver int `json:"confidenceScoreOver,omitempty"`
|
||||
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
|
||||
NoProgress bool `json:"noProgress,omitempty"`
|
||||
RefreshCve bool `json:"refreshCve,omitempty"`
|
||||
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
|
||||
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
|
||||
DiffPlus bool `json:"diffPlus,omitempty"`
|
||||
DiffMinus bool `json:"diffMinus,omitempty"`
|
||||
Diff bool `json:"diff,omitempty"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
}
|
||||
|
||||
// ValidateOnConfigtest validates
|
||||
func (c Config) ValidateOnConfigtest() bool {
|
||||
errs := c.checkSSHKeyExist()
|
||||
|
||||
if runtime.GOOS == "windows" && !c.SSHNative {
|
||||
errs = append(errs, xerrors.New("-ssh-native-insecure is needed on windows"))
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnScan validates configuration
|
||||
func (c Config) ValidateOnScan() bool {
|
||||
errs := c.checkSSHKeyExist()
|
||||
|
||||
if runtime.GOOS == "windows" && !c.SSHNative {
|
||||
errs = append(errs, xerrors.New("-ssh-native-insecure is needed on windows"))
|
||||
}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
@@ -121,29 +105,28 @@ func (c Config) ValidateOnScan() bool {
|
||||
}
|
||||
}
|
||||
|
||||
if len(c.CacheDBPath) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.CacheDBPath); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"Cache DB path must be a *Absolute* file path. -cache-dbpath: %s",
|
||||
c.CacheDBPath))
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
for _, server := range c.Servers {
|
||||
if !server.Module.IsScanPort() {
|
||||
continue
|
||||
}
|
||||
if es := server.PortScan.Validate(); 0 < len(es) {
|
||||
errs = append(errs, es...)
|
||||
}
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
func (c Config) checkSSHKeyExist() (errs []error) {
|
||||
for serverName, v := range c.Servers {
|
||||
if v.Type == ServerTypePseudo {
|
||||
if v.Type == constant.ServerTypePseudo {
|
||||
continue
|
||||
}
|
||||
if v.KeyPath != "" {
|
||||
@@ -156,39 +139,8 @@ func (c Config) checkSSHKeyExist() (errs []error) {
|
||||
return errs
|
||||
}
|
||||
|
||||
// ValidateOnReportDB validates configuration
|
||||
func (c Config) ValidateOnReportDB() bool {
|
||||
errs := []error{}
|
||||
|
||||
if err := validateDB("cvedb", c.CveDict.Type, c.CveDict.SQLite3Path, c.CveDict.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("ovaldb", c.OvalDict.Type, c.OvalDict.SQLite3Path, c.OvalDict.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("gostdb", c.Gost.Type, c.Gost.SQLite3Path, c.Gost.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("exploitdb", c.Exploit.Type, c.Exploit.SQLite3Path, c.Exploit.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if err := validateDB("msfdb", c.Metasploit.Type, c.Metasploit.SQLite3Path, c.Metasploit.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnReport validates configuration
|
||||
func (c Config) ValidateOnReport() bool {
|
||||
func (c *Config) ValidateOnReport() bool {
|
||||
errs := []error{}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
@@ -203,54 +155,40 @@ func (c Config) ValidateOnReport() bool {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
if mailerrs := c.EMail.Validate(); 0 < len(mailerrs) {
|
||||
errs = append(errs, mailerrs...)
|
||||
}
|
||||
|
||||
if slackerrs := c.Slack.Validate(); 0 < len(slackerrs) {
|
||||
errs = append(errs, slackerrs...)
|
||||
}
|
||||
|
||||
if chatworkerrs := c.ChatWork.Validate(); 0 < len(chatworkerrs) {
|
||||
errs = append(errs, chatworkerrs...)
|
||||
}
|
||||
|
||||
if telegramerrs := c.Telegram.Validate(); 0 < len(telegramerrs) {
|
||||
errs = append(errs, telegramerrs...)
|
||||
}
|
||||
|
||||
if syslogerrs := c.Syslog.Validate(); 0 < len(syslogerrs) {
|
||||
errs = append(errs, syslogerrs...)
|
||||
}
|
||||
|
||||
if httperrs := c.HTTP.Validate(); 0 < len(httperrs) {
|
||||
errs = append(errs, httperrs...)
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
}
|
||||
|
||||
// ValidateOnTui validates configuration
|
||||
func (c Config) ValidateOnTui() bool {
|
||||
errs := []error{}
|
||||
|
||||
if len(c.ResultsDir) != 0 {
|
||||
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
|
||||
for _, rc := range []ReportConf{
|
||||
&c.EMail,
|
||||
&c.Slack,
|
||||
&c.ChatWork,
|
||||
&c.GoogleChat,
|
||||
&c.Telegram,
|
||||
&c.Syslog,
|
||||
&c.HTTP,
|
||||
&c.AWS,
|
||||
&c.Azure,
|
||||
} {
|
||||
if es := rc.Validate(); 0 < len(es) {
|
||||
errs = append(errs, es...)
|
||||
}
|
||||
}
|
||||
|
||||
if err := validateDB("cvedb", c.CveDict.Type, c.CveDict.SQLite3Path, c.CveDict.URL); err != nil {
|
||||
errs = append(errs, err)
|
||||
for _, cnf := range []VulnDictInterface{
|
||||
&Conf.CveDict,
|
||||
&Conf.OvalDict,
|
||||
&Conf.Gost,
|
||||
&Conf.Exploit,
|
||||
&Conf.Metasploit,
|
||||
&Conf.KEVuln,
|
||||
} {
|
||||
if err := cnf.Validate(); err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Failed to validate %s: %+v", cnf.GetName(), err))
|
||||
}
|
||||
if err := cnf.CheckHTTPHealth(); err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Run %s as server mode before reporting: %+v", cnf.GetName(), err))
|
||||
}
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
log.Error(err)
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
|
||||
return len(errs) == 0
|
||||
@@ -260,83 +198,11 @@ func (c Config) ValidateOnTui() bool {
|
||||
func (c Config) ValidateOnSaaS() bool {
|
||||
saaserrs := c.Saas.Validate()
|
||||
for _, err := range saaserrs {
|
||||
log.Error("Failed to validate SaaS conf: %+w", err)
|
||||
logging.Log.Error("Failed to validate SaaS conf: %+w", err)
|
||||
}
|
||||
return len(saaserrs) == 0
|
||||
}
|
||||
|
||||
// validateDB validates configuration
|
||||
func validateDB(dictionaryDBName, dbType, dbPath, dbURL string) error {
|
||||
log.Infof("-%s-type: %s, -%s-url: %s, -%s-path: %s",
|
||||
dictionaryDBName, dbType, dictionaryDBName, dbURL, dictionaryDBName, dbPath)
|
||||
|
||||
switch dbType {
|
||||
case "sqlite3":
|
||||
if dbURL != "" {
|
||||
return xerrors.Errorf("To use SQLite3, specify -%s-type=sqlite3 and -%s-path. To use as http server mode, specify -%s-type=http and -%s-url",
|
||||
dictionaryDBName, dictionaryDBName, dictionaryDBName, dictionaryDBName)
|
||||
}
|
||||
if ok, _ := govalidator.IsFilePath(dbPath); !ok {
|
||||
return xerrors.Errorf("SQLite3 path must be a *Absolute* file path. -%s-path: %s",
|
||||
dictionaryDBName, dbPath)
|
||||
}
|
||||
case "mysql":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`MySQL connection string is needed. -%s-url="user:pass@tcp(localhost:3306)/dbname"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
case "postgres":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`PostgreSQL connection string is needed. -%s-url="host=myhost user=user dbname=dbname sslmode=disable password=password"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
case "redis":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`Redis connection string is needed. -%s-url="redis://localhost/0"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
case "http":
|
||||
if dbURL == "" {
|
||||
return xerrors.Errorf(`URL is needed. -%s-url="http://localhost:1323"`,
|
||||
dictionaryDBName)
|
||||
}
|
||||
default:
|
||||
return xerrors.Errorf("%s type must be either 'sqlite3', 'mysql', 'postgres', 'redis' or 'http'. -%s-type: %s",
|
||||
dictionaryDBName, dictionaryDBName, dbType)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AWSConf is aws config
|
||||
type AWSConf struct {
|
||||
// AWS profile to use
|
||||
Profile string `json:"profile"`
|
||||
|
||||
// AWS region to use
|
||||
Region string `json:"region"`
|
||||
|
||||
// S3 bucket name
|
||||
S3Bucket string `json:"s3Bucket"`
|
||||
|
||||
// /bucket/path/to/results
|
||||
S3ResultsDir string `json:"s3ResultsDir"`
|
||||
|
||||
// The Server-side encryption algorithm used when storing the reports in S3 (e.g., AES256, aws:kms).
|
||||
S3ServerSideEncryption string `json:"s3ServerSideEncryption"`
|
||||
}
|
||||
|
||||
// AzureConf is azure config
|
||||
type AzureConf struct {
|
||||
// Azure account name to use. AZURE_STORAGE_ACCOUNT environment variable is used if not specified
|
||||
AccountName string `json:"accountName"`
|
||||
|
||||
// Azure account key to use. AZURE_STORAGE_ACCESS_KEY environment variable is used if not specified
|
||||
AccountKey string `json:"-"`
|
||||
|
||||
// Azure storage container name
|
||||
ContainerName string `json:"containerName"`
|
||||
}
|
||||
|
||||
// WpScanConf is wpscan.com config
|
||||
type WpScanConf struct {
|
||||
Token string `toml:"token,omitempty" json:"-"`
|
||||
@@ -352,7 +218,6 @@ type ServerInfo struct {
|
||||
Port string `toml:"port,omitempty" json:"port,omitempty"`
|
||||
SSHConfigPath string `toml:"sshConfigPath,omitempty" json:"sshConfigPath,omitempty"`
|
||||
KeyPath string `toml:"keyPath,omitempty" json:"keyPath,omitempty"`
|
||||
KeyPassword string `json:"-" toml:"-"`
|
||||
CpeNames []string `toml:"cpeNames,omitempty" json:"cpeNames,omitempty"`
|
||||
ScanMode []string `toml:"scanMode,omitempty" json:"scanMode,omitempty"`
|
||||
ScanModules []string `toml:"scanModules,omitempty" json:"scanModules,omitempty"`
|
||||
@@ -367,16 +232,18 @@ type ServerInfo struct {
|
||||
GitHubRepos map[string]GitHubConf `toml:"githubs" json:"githubs,omitempty"` // key: owner/repo
|
||||
UUIDs map[string]string `toml:"uuids,omitempty" json:"uuids,omitempty"`
|
||||
Memo string `toml:"memo,omitempty" json:"memo,omitempty"`
|
||||
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, RHEL, Amazon
|
||||
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, Alma, Rocky, RHEL, Amazon
|
||||
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
|
||||
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
|
||||
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
|
||||
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
|
||||
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
|
||||
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
|
||||
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
|
||||
IPSIdentifiers map[IPS]string `toml:"-" json:"ipsIdentifiers,omitempty"`
|
||||
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
|
||||
PortScan *PortScanConf `toml:"portscan,omitempty" json:"portscan,omitempty"`
|
||||
|
||||
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
|
||||
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
|
||||
IPSIdentifiers map[string]string `toml:"-" json:"ipsIdentifiers,omitempty"`
|
||||
|
||||
// internal use
|
||||
LogMsgAnsiColor string `toml:"-" json:"-"` // DebugLog Color
|
||||
@@ -408,7 +275,8 @@ func (cnf WordPressConf) IsZero() bool {
|
||||
|
||||
// GitHubConf is used for GitHub Security Alerts
|
||||
type GitHubConf struct {
|
||||
Token string `json:"-"`
|
||||
Token string `json:"-"`
|
||||
IgnoreGitHubDismissed bool `json:"ignoreGitHubDismissed,omitempty"`
|
||||
}
|
||||
|
||||
// GetServerName returns ServerName if this serverInfo is about host.
|
||||
@@ -432,11 +300,8 @@ func (l Distro) String() string {
|
||||
|
||||
// MajorVersion returns Major version
|
||||
func (l Distro) MajorVersion() (int, error) {
|
||||
if l.Family == Amazon {
|
||||
if isAmazonLinux1(l.Release) {
|
||||
return 1, nil
|
||||
}
|
||||
return 2, nil
|
||||
if l.Family == constant.Amazon {
|
||||
return strconv.Atoi(getAmazonLinuxVersion(l.Release))
|
||||
}
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(l.Release, ".")[0])
|
||||
@@ -460,8 +325,3 @@ type Container struct {
|
||||
Name string
|
||||
Image string
|
||||
}
|
||||
|
||||
// VulnSrcConf is an interface of vulnsrc
|
||||
type VulnSrcConf interface {
|
||||
CheckHTTPHealth() error
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
func TestSyslogConfValidate(t *testing.T) {
|
||||
@@ -55,7 +57,7 @@ func TestSyslogConfValidate(t *testing.T) {
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
Conf.ToSyslog = true
|
||||
tt.conf.Enabled = true
|
||||
errs := tt.conf.Validate()
|
||||
if len(errs) != tt.expectedErrLength {
|
||||
t.Errorf("test: %d, expected %d, actual %d", i, tt.expectedErrLength, len(errs))
|
||||
@@ -68,6 +70,13 @@ func TestDistro_MajorVersion(t *testing.T) {
|
||||
in Distro
|
||||
out int
|
||||
}{
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2022 (Amazon Linux)",
|
||||
},
|
||||
out: 2022,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// ExploitConf is exploit config
|
||||
type ExploitConf struct {
|
||||
// DB type for exploit dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://exploit-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/exploit.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *ExploitConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "go-exploitdb.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const exploitDBType = "EXPLOITDB_TYPE"
|
||||
const exploitDBURL = "EXPLOITDB_URL"
|
||||
const exploitDBPATH = "EXPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *ExploitConf) Init() {
|
||||
if os.Getenv(exploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(exploitDBType)
|
||||
}
|
||||
if os.Getenv(exploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(exploitDBURL)
|
||||
}
|
||||
if os.Getenv(exploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(exploitDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *ExploitConf) IsFetchViaHTTP() bool {
|
||||
return Conf.Exploit.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func (cnf *ExploitConf) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to connect to exploit server. url: %s, errs: %s", url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GoCveDictConf is go-cve-dictionary config
|
||||
type GoCveDictConf struct {
|
||||
// DB type of CVE dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://cve-dictionary.com:1323 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/cve.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *GoCveDictConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "cve.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const cveDBType = "CVEDB_TYPE"
|
||||
const cveDBURL = "CVEDB_URL"
|
||||
const cveDBPATH = "CVEDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GoCveDictConf) Init() {
|
||||
if os.Getenv(cveDBType) != "" {
|
||||
cnf.Type = os.Getenv(cveDBType)
|
||||
}
|
||||
if os.Getenv(cveDBURL) != "" {
|
||||
cnf.URL = os.Getenv(cveDBURL)
|
||||
}
|
||||
if os.Getenv(cveDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(cveDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *GoCveDictConf) IsFetchViaHTTP() bool {
|
||||
return Conf.CveDict.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth checks http server status
|
||||
func (cnf *GoCveDictConf) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Timeout(10 * time.Second).SetDebug(Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to request to CVE server. url: %s, errs: %s",
|
||||
url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
32
config/googlechatconf.go
Normal file
32
config/googlechatconf.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GoogleChatConf is GoogleChat config
|
||||
type GoogleChatConf struct {
|
||||
WebHookURL string `valid:"url" json:"-" toml:"webHookURL,omitempty"`
|
||||
SkipIfNoCve bool `valid:"type(bool)" json:"-" toml:"skipIfNoCve"`
|
||||
ServerNameRegexp string `valid:"type(string)" json:"-" toml:"serverNameRegexp,omitempty"`
|
||||
Enabled bool `valid:"type(bool)" json:"-" toml:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *GoogleChatConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.WebHookURL) == 0 {
|
||||
errs = append(errs, xerrors.New("googleChatConf.webHookURL must not be empty"))
|
||||
}
|
||||
if !govalidator.IsRegex(c.ServerNameRegexp) {
|
||||
errs = append(errs, xerrors.New("googleChatConf.serverNameRegexp must be regex"))
|
||||
}
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GostConf is gost config
|
||||
type GostConf struct {
|
||||
// DB type for gost dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://gost-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/gost.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *GostConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "gost.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const gostDBType = "GOSTDB_TYPE"
|
||||
const gostDBURL = "GOSTDB_URL"
|
||||
const gostDBPATH = "GOSTDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GostConf) Init() {
|
||||
if os.Getenv(gostDBType) != "" {
|
||||
cnf.Type = os.Getenv(gostDBType)
|
||||
}
|
||||
if os.Getenv(gostDBURL) != "" {
|
||||
cnf.URL = os.Getenv(gostDBURL)
|
||||
}
|
||||
if os.Getenv(gostDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(gostDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *GostConf) IsFetchViaHTTP() bool {
|
||||
return Conf.Gost.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func (cnf *GostConf) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to connect to gost server. url: %s, errs: %s", url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GovalDictConf is goval-dictionary config
|
||||
type GovalDictConf struct {
|
||||
|
||||
// DB type of OVAL dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://goval-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/oval.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *GovalDictConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "oval.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const govalType = "OVALDB_TYPE"
|
||||
const govalURL = "OVALDB_URL"
|
||||
const govalPATH = "OVALDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GovalDictConf) Init() {
|
||||
if os.Getenv(govalType) != "" {
|
||||
cnf.Type = os.Getenv(govalType)
|
||||
}
|
||||
if os.Getenv(govalURL) != "" {
|
||||
cnf.URL = os.Getenv(govalURL)
|
||||
}
|
||||
if os.Getenv(govalPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(govalPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *GovalDictConf) IsFetchViaHTTP() bool {
|
||||
return Conf.OvalDict.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func (cnf *GovalDictConf) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to request to OVAL server. url: %s, errs: %s",
|
||||
url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -8,31 +8,25 @@ import (
|
||||
|
||||
// HTTPConf is HTTP config
|
||||
type HTTPConf struct {
|
||||
URL string `valid:"url" json:"-"`
|
||||
URL string `valid:"url" json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
const httpKey = "VULS_HTTP_URL"
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *HTTPConf) Validate() (errs []error) {
|
||||
if !Conf.ToHTTP {
|
||||
if !c.Enabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
// overwrite if env var is not empty
|
||||
if os.Getenv(httpKey) != "" {
|
||||
c.URL = os.Getenv(httpKey)
|
||||
}
|
||||
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
const httpKey = "VULS_HTTP_URL"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (c *HTTPConf) Init(toml HTTPConf) {
|
||||
if os.Getenv(httpKey) != "" {
|
||||
c.URL = os.Getenv(httpKey)
|
||||
}
|
||||
if toml.URL != "" {
|
||||
c.URL = toml.URL
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
package config
|
||||
|
||||
// IPS is
|
||||
type IPS string
|
||||
|
||||
const (
|
||||
// DeepSecurity is
|
||||
DeepSecurity IPS = "deepsecurity"
|
||||
)
|
||||
@@ -7,6 +7,6 @@ type JSONLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration JSON file specified by path arg.
|
||||
func (c JSONLoader) Load(path, sudoPass, keyPass string) (err error) {
|
||||
func (c JSONLoader) Load(_, _, _ string) (err error) {
|
||||
return xerrors.New("Not implement yet")
|
||||
}
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// MetasploitConf is metasploit config
|
||||
type MetasploitConf struct {
|
||||
// DB type for metasploit dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://metasploit-dictionary.com:1324 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/metasploit.sqlite3
|
||||
SQLite3Path string `json:"-"`
|
||||
}
|
||||
|
||||
func (cnf *MetasploitConf) setDefault() {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, "go-msfdb.sqlite3")
|
||||
}
|
||||
}
|
||||
|
||||
const metasploitDBType = "METASPLOITDB_TYPE"
|
||||
const metasploitDBURL = "METASPLOITDB_URL"
|
||||
const metasploitDBPATH = "METASPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *MetasploitConf) Init() {
|
||||
if os.Getenv(metasploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(metasploitDBType)
|
||||
}
|
||||
if os.Getenv(metasploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(metasploitDBURL)
|
||||
}
|
||||
if os.Getenv(metasploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(metasploitDBPATH)
|
||||
}
|
||||
cnf.setDefault()
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns wether fetch via http
|
||||
func (cnf *MetasploitConf) IsFetchViaHTTP() bool {
|
||||
return Conf.Metasploit.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth do health check
|
||||
func (cnf *MetasploitConf) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Get(url).End()
|
||||
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to connect to metasploit server. url: %s, errs: %s", url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
107
config/os.go
107
config/os.go
@@ -4,59 +4,8 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
// RedHat is
|
||||
RedHat = "redhat"
|
||||
|
||||
// Debian is
|
||||
Debian = "debian"
|
||||
|
||||
// Ubuntu is
|
||||
Ubuntu = "ubuntu"
|
||||
|
||||
// CentOS is
|
||||
CentOS = "centos"
|
||||
|
||||
// Fedora is
|
||||
// Fedora = "fedora"
|
||||
|
||||
// Amazon is
|
||||
Amazon = "amazon"
|
||||
|
||||
// Oracle is
|
||||
Oracle = "oracle"
|
||||
|
||||
// FreeBSD is
|
||||
FreeBSD = "freebsd"
|
||||
|
||||
// Raspbian is
|
||||
Raspbian = "raspbian"
|
||||
|
||||
// Windows is
|
||||
Windows = "windows"
|
||||
|
||||
// OpenSUSE is
|
||||
OpenSUSE = "opensuse"
|
||||
|
||||
// OpenSUSELeap is
|
||||
OpenSUSELeap = "opensuse.leap"
|
||||
|
||||
// SUSEEnterpriseServer is
|
||||
SUSEEnterpriseServer = "suse.linux.enterprise.server"
|
||||
|
||||
// SUSEEnterpriseDesktop is
|
||||
SUSEEnterpriseDesktop = "suse.linux.enterprise.desktop"
|
||||
|
||||
// SUSEOpenstackCloud is
|
||||
SUSEOpenstackCloud = "suse.openstack.cloud"
|
||||
|
||||
// Alpine is
|
||||
Alpine = "alpine"
|
||||
|
||||
// ServerTypePseudo is used for ServerInfo.Type, r.Family
|
||||
ServerTypePseudo = "pseudo"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
// EOL has End-of-Life information
|
||||
@@ -89,16 +38,13 @@ func (e EOL) IsExtendedSuppportEnded(now time.Time) bool {
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/redhat/redhat.go#L20
|
||||
func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
switch family {
|
||||
case Amazon:
|
||||
rel := "2"
|
||||
if isAmazonLinux1(release) {
|
||||
rel = "1"
|
||||
}
|
||||
case constant.Amazon:
|
||||
eol, found = map[string]EOL{
|
||||
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2": {},
|
||||
}[rel]
|
||||
case RedHat:
|
||||
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2": {},
|
||||
"2022": {},
|
||||
}[getAmazonLinuxVersion(release)]
|
||||
case constant.RedHat:
|
||||
// https://access.redhat.com/support/policy/updates/errata
|
||||
eol, found = map[string]EOL{
|
||||
"3": {Ended: true},
|
||||
@@ -115,7 +61,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[major(release)]
|
||||
case CentOS:
|
||||
case constant.CentOS:
|
||||
// https://en.wikipedia.org/wiki/CentOS#End-of-support_schedule
|
||||
// TODO Stream
|
||||
eol, found = map[string]EOL{
|
||||
@@ -126,7 +72,15 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case Oracle:
|
||||
case constant.Alma:
|
||||
eol, found = map[string]EOL{
|
||||
"8": {StandardSupportUntil: time.Date(2029, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Rocky:
|
||||
eol, found = map[string]EOL{
|
||||
"8": {StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Oracle:
|
||||
eol, found = map[string]EOL{
|
||||
// Source:
|
||||
// https://www.oracle.com/a/ocom/docs/elsp-lifetime-069338.pdf
|
||||
@@ -145,7 +99,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
StandardSupportUntil: time.Date(2029, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[major(release)]
|
||||
case Debian:
|
||||
case constant.Debian:
|
||||
eol, found = map[string]EOL{
|
||||
// https://wiki.debian.org/LTS
|
||||
"6": {Ended: true},
|
||||
@@ -153,11 +107,12 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"8": {Ended: true},
|
||||
"9": {StandardSupportUntil: time.Date(2022, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"10": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"11": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case Raspbian:
|
||||
case constant.Raspbian:
|
||||
// Not found
|
||||
eol, found = map[string]EOL{}[major(release)]
|
||||
case Ubuntu:
|
||||
case constant.Ubuntu:
|
||||
// https://wiki.ubuntu.com/Releases
|
||||
eol, found = map[string]EOL{
|
||||
"14.10": {Ended: true},
|
||||
@@ -182,16 +137,19 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"20.04": {
|
||||
StandardSupportUntil: time.Date(2025, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"20.10": {
|
||||
StandardSupportUntil: time.Date(2021, 7, 22, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.04": {
|
||||
StandardSupportUntil: time.Date(2022, 1, 1, 23, 59, 59, 0, time.UTC),
|
||||
StandardSupportUntil: time.Date(2022, 1, 22, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.10": {
|
||||
StandardSupportUntil: time.Date(2022, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
}[release]
|
||||
case SUSEEnterpriseServer:
|
||||
case constant.SUSEEnterpriseServer:
|
||||
//TODO
|
||||
case Alpine:
|
||||
case constant.Alpine:
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/alpine/alpine.go#L19
|
||||
// https://wiki.alpinelinux.org/wiki/Alpine_Linux:Releases
|
||||
eol, found = map[string]EOL{
|
||||
@@ -218,7 +176,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"3.12": {StandardSupportUntil: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.13": {StandardSupportUntil: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
}[majorDotMinor(release)]
|
||||
case FreeBSD:
|
||||
case constant.FreeBSD:
|
||||
// https://www.freebsd.org/security/
|
||||
eol, found = map[string]EOL{
|
||||
"7": {Ended: true},
|
||||
@@ -227,6 +185,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"10": {Ended: true},
|
||||
"11": {StandardSupportUntil: time.Date(2021, 9, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"12": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"13": {StandardSupportUntil: time.Date(2026, 1, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
}
|
||||
return
|
||||
@@ -244,6 +203,10 @@ func majorDotMinor(osVer string) (majorDotMinor string) {
|
||||
return fmt.Sprintf("%s.%s", ss[0], ss[1])
|
||||
}
|
||||
|
||||
func isAmazonLinux1(osRelease string) bool {
|
||||
return len(strings.Fields(osRelease)) == 1
|
||||
func getAmazonLinuxVersion(osRelease string) string {
|
||||
ss := strings.Fields(osRelease)
|
||||
if len(ss) == 1 {
|
||||
return "1"
|
||||
}
|
||||
return ss[0]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@ package config
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
. "github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
@@ -43,6 +45,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 2022 supported",
|
||||
fields: fields{family: Amazon, release: "2022 (Amazon Linux)"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
//RHEL
|
||||
{
|
||||
name: "RHEL7 supported",
|
||||
@@ -109,6 +119,56 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// Alma
|
||||
{
|
||||
name: "Alma Linux 8 supported",
|
||||
fields: fields{family: Alma, release: "8"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alma Linux 8 EOL",
|
||||
fields: fields{family: Alma, release: "8"},
|
||||
now: time.Date(2029, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alma Linux 9 Not Found",
|
||||
fields: fields{family: Alma, release: "9"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// Rocky
|
||||
{
|
||||
name: "Rocky Linux 8 supported",
|
||||
fields: fields{family: Rocky, release: "8"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Rocky Linux 8 EOL",
|
||||
fields: fields{family: Rocky, release: "8"},
|
||||
now: time.Date(2026, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Rocky Linux 9 Not Found",
|
||||
fields: fields{family: Rocky, release: "9"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//Oracle
|
||||
{
|
||||
name: "Oracle Linux 7 supported",
|
||||
@@ -191,6 +251,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 20.10 supported",
|
||||
fields: fields{family: Ubuntu, release: "20.10"},
|
||||
now: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 21.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "21.04"},
|
||||
@@ -230,6 +298,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 12 is not supported yet",
|
||||
fields: fields{family: Debian, release: "12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//alpine
|
||||
@@ -298,6 +374,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 13 supported",
|
||||
fields: fields{family: FreeBSD, release: "13"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 10 eol",
|
||||
fields: fields{family: FreeBSD, release: "10"},
|
||||
|
||||
222
config/portscan.go
Normal file
222
config/portscan.go
Normal file
@@ -0,0 +1,222 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// PortScanConf is the setting for using an external port scanner
|
||||
type PortScanConf struct {
|
||||
IsUseExternalScanner bool `toml:"-" json:"-"`
|
||||
|
||||
// Path to external scanner
|
||||
ScannerBinPath string `toml:"scannerBinPath,omitempty" json:"scannerBinPath,omitempty"`
|
||||
|
||||
// set user has privileged
|
||||
HasPrivileged bool `toml:"hasPrivileged,omitempty" json:"hasPrivileged,omitempty"`
|
||||
|
||||
// set the ScanTechniques for ScannerBinPath
|
||||
ScanTechniques []string `toml:"scanTechniques,omitempty" json:"scanTechniques,omitempty"`
|
||||
|
||||
// set the FIREWALL/IDS EVASION AND SPOOFING(Use given port number)
|
||||
SourcePort string `toml:"sourcePort,omitempty" json:"sourcePort,omitempty"`
|
||||
}
|
||||
|
||||
// ScanTechnique is implemented to represent the supported ScanTechniques in an Enum.
|
||||
type ScanTechnique int
|
||||
|
||||
const (
|
||||
// NotSupportTechnique is a ScanTechnique that is currently not supported.
|
||||
NotSupportTechnique ScanTechnique = iota
|
||||
// TCPSYN is SYN scan
|
||||
TCPSYN
|
||||
// TCPConnect is TCP connect scan
|
||||
TCPConnect
|
||||
// TCPACK is ACK scan
|
||||
TCPACK
|
||||
// TCPWindow is Window scan
|
||||
TCPWindow
|
||||
// TCPMaimon is Maimon scan
|
||||
TCPMaimon
|
||||
// TCPNull is Null scan
|
||||
TCPNull
|
||||
// TCPFIN is FIN scan
|
||||
TCPFIN
|
||||
// TCPXmas is Xmas scan
|
||||
TCPXmas
|
||||
)
|
||||
|
||||
var scanTechniqueMap = map[ScanTechnique]string{
|
||||
TCPSYN: "sS",
|
||||
TCPConnect: "sT",
|
||||
TCPACK: "sA",
|
||||
TCPWindow: "sW",
|
||||
TCPMaimon: "sM",
|
||||
TCPNull: "sN",
|
||||
TCPFIN: "sF",
|
||||
TCPXmas: "sX",
|
||||
}
|
||||
|
||||
func (s ScanTechnique) String() string {
|
||||
switch s {
|
||||
case TCPSYN:
|
||||
return "TCPSYN"
|
||||
case TCPConnect:
|
||||
return "TCPConnect"
|
||||
case TCPACK:
|
||||
return "TCPACK"
|
||||
case TCPWindow:
|
||||
return "TCPWindow"
|
||||
case TCPMaimon:
|
||||
return "TCPMaimon"
|
||||
case TCPNull:
|
||||
return "TCPNull"
|
||||
case TCPFIN:
|
||||
return "TCPFIN"
|
||||
case TCPXmas:
|
||||
return "TCPXmas"
|
||||
default:
|
||||
return "NotSupportTechnique"
|
||||
}
|
||||
}
|
||||
|
||||
// GetScanTechniques converts ScanTechniques loaded from config.toml to []scanTechniques.
|
||||
func (c *PortScanConf) GetScanTechniques() []ScanTechnique {
|
||||
if len(c.ScanTechniques) == 0 {
|
||||
return []ScanTechnique{}
|
||||
}
|
||||
|
||||
scanTechniques := []ScanTechnique{}
|
||||
for _, technique := range c.ScanTechniques {
|
||||
findScanTechniqueFlag := false
|
||||
for key, value := range scanTechniqueMap {
|
||||
if strings.EqualFold(value, technique) {
|
||||
scanTechniques = append(scanTechniques, key)
|
||||
findScanTechniqueFlag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !findScanTechniqueFlag {
|
||||
scanTechniques = append(scanTechniques, NotSupportTechnique)
|
||||
}
|
||||
}
|
||||
|
||||
if len(scanTechniques) == 0 {
|
||||
return []ScanTechnique{NotSupportTechnique}
|
||||
}
|
||||
return scanTechniques
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *PortScanConf) Validate() (errs []error) {
|
||||
if !c.IsUseExternalScanner {
|
||||
if c.IsZero() {
|
||||
return
|
||||
}
|
||||
errs = append(errs, xerrors.New("To enable the PortScan option, ScannerBinPath must be set."))
|
||||
}
|
||||
|
||||
if _, err := os.Stat(c.ScannerBinPath); err != nil {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"scanner is not found. ScannerBinPath: %s not exists", c.ScannerBinPath))
|
||||
}
|
||||
|
||||
scanTechniques := c.GetScanTechniques()
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique == NotSupportTechnique {
|
||||
errs = append(errs, xerrors.New("There is an unsupported option in ScanTechniques."))
|
||||
}
|
||||
}
|
||||
|
||||
// It does not currently support multiple ScanTechniques.
|
||||
// But if it supports UDP scanning, it will need to accept multiple ScanTechniques.
|
||||
if len(scanTechniques) > 1 {
|
||||
errs = append(errs, xerrors.New("Currently multiple ScanTechniques are not supported."))
|
||||
}
|
||||
|
||||
if c.HasPrivileged {
|
||||
if os.Geteuid() != 0 {
|
||||
output, err := exec.Command("getcap", c.ScannerBinPath).Output()
|
||||
if err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Failed to check capability of %s. error message: %w", c.ScannerBinPath, err))
|
||||
} else {
|
||||
parseOutput := strings.SplitN(string(output), "=", 2)
|
||||
if len(parseOutput) != 2 {
|
||||
errs = append(errs, xerrors.Errorf("Failed to parse getcap outputs. please execute this command: `$ getcap %s`. If the following string (`/usr/bin/nmap = ... `) is not displayed, you need to set the capability with the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", c.ScannerBinPath, c.ScannerBinPath))
|
||||
} else {
|
||||
parseCapability := strings.Split(strings.TrimSpace(parseOutput[1]), "+")
|
||||
capabilities := strings.Split(parseCapability[0], ",")
|
||||
for _, needCap := range []string{"cap_net_bind_service", "cap_net_admin", "cap_net_raw"} {
|
||||
existCapFlag := false
|
||||
for _, cap := range capabilities {
|
||||
if needCap == cap {
|
||||
existCapFlag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if existCapFlag {
|
||||
continue
|
||||
}
|
||||
|
||||
errs = append(errs, xerrors.Errorf("Not enough capability to execute. needs: ['cap_net_bind_service', 'cap_net_admin', 'cap_net_raw'], actual: %s. To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", capabilities, c.ScannerBinPath))
|
||||
break
|
||||
}
|
||||
|
||||
if parseCapability[1] != "eip" {
|
||||
errs = append(errs, xerrors.Errorf("Capability(`cap_net_bind_service,cap_net_admin,cap_net_raw`) must belong to the following capability set(need: eip, actual: %s). To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", parseCapability[1], c.ScannerBinPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !c.HasPrivileged {
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique != TCPConnect && scanTechnique != NotSupportTechnique {
|
||||
errs = append(errs, xerrors.New("If not privileged, only TCPConnect Scan(-sT) can be used."))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.SourcePort != "" {
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique == TCPConnect {
|
||||
errs = append(errs, xerrors.New("SourcePort Option(-g/--source-port) is incompatible with the default TCPConnect Scan(-sT)."))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
portNumber, err := strconv.Atoi(c.SourcePort)
|
||||
if err != nil {
|
||||
errs = append(errs, xerrors.Errorf("SourcePort conversion failed. %w", err))
|
||||
} else {
|
||||
if portNumber < 0 || 65535 < portNumber {
|
||||
errs = append(errs, xerrors.Errorf("SourcePort(%s) must be between 0 and 65535.", c.SourcePort))
|
||||
}
|
||||
|
||||
if portNumber == 0 {
|
||||
errs = append(errs, xerrors.New("SourcePort(0) may not work on all systems."))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// IsZero return whether this struct is not specified in config.toml
|
||||
func (c PortScanConf) IsZero() bool {
|
||||
return c.ScannerBinPath == "" && !c.HasPrivileged && len(c.ScanTechniques) == 0 && c.SourcePort == ""
|
||||
}
|
||||
69
config/portscan_test.go
Normal file
69
config/portscan_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPortScanConf_getScanTechniques(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
techniques []string
|
||||
want []ScanTechnique
|
||||
}{
|
||||
{
|
||||
name: "nil",
|
||||
techniques: []string{},
|
||||
want: []ScanTechnique{},
|
||||
},
|
||||
{
|
||||
name: "single",
|
||||
techniques: []string{"sS"},
|
||||
want: []ScanTechnique{TCPSYN},
|
||||
},
|
||||
{
|
||||
name: "multiple",
|
||||
techniques: []string{"sS", "sT"},
|
||||
want: []ScanTechnique{TCPSYN, TCPConnect},
|
||||
},
|
||||
{
|
||||
name: "unknown",
|
||||
techniques: []string{"sU"},
|
||||
want: []ScanTechnique{NotSupportTechnique},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
c := PortScanConf{ScanTechniques: tt.techniques}
|
||||
if got := c.GetScanTechniques(); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("PortScanConf.getScanTechniques() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPortScanConf_IsZero(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
conf PortScanConf
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "not zero",
|
||||
conf: PortScanConf{ScannerBinPath: "/usr/bin/nmap"},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "zero",
|
||||
conf: PortScanConf{},
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.conf.IsZero(); got != tt.want {
|
||||
t.Errorf("PortScanConf.IsZero() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -84,7 +84,7 @@ func (s ScanMode) String() string {
|
||||
return ss + " mode"
|
||||
}
|
||||
|
||||
func setScanMode(server *ServerInfo, d ServerInfo) error {
|
||||
func setScanMode(server *ServerInfo) error {
|
||||
if len(server.ScanMode) == 0 {
|
||||
server.ScanMode = Conf.Default.ScanMode
|
||||
}
|
||||
|
||||
@@ -16,11 +16,12 @@ type SlackConf struct {
|
||||
AuthUser string `json:"-" toml:"authUser,omitempty"`
|
||||
NotifyUsers []string `toml:"notifyUsers,omitempty" json:"-"`
|
||||
Text string `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SlackConf) Validate() (errs []error) {
|
||||
if !Conf.ToSlack {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ type SMTPConf struct {
|
||||
To []string `toml:"to,omitempty" json:"-"`
|
||||
Cc []string `toml:"cc,omitempty" json:"-"`
|
||||
SubjectPrefix string `toml:"subjectPrefix,omitempty" json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func checkEmails(emails []string) (errs []error) {
|
||||
@@ -31,10 +32,9 @@ func checkEmails(emails []string) (errs []error) {
|
||||
|
||||
// Validate SMTP configuration
|
||||
func (c *SMTPConf) Validate() (errs []error) {
|
||||
if !Conf.ToEmail {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
// Check Emails fromat
|
||||
emails := []string{}
|
||||
emails = append(emails, c.From)
|
||||
emails = append(emails, c.To...)
|
||||
@@ -44,10 +44,10 @@ func (c *SMTPConf) Validate() (errs []error) {
|
||||
errs = append(errs, emailErrs...)
|
||||
}
|
||||
|
||||
if len(c.SMTPAddr) == 0 {
|
||||
if c.SMTPAddr == "" {
|
||||
errs = append(errs, xerrors.New("email.smtpAddr must not be empty"))
|
||||
}
|
||||
if len(c.SMTPPort) == 0 {
|
||||
if c.SMTPPort == "" {
|
||||
errs = append(errs, xerrors.New("email.smtpPort must not be empty"))
|
||||
}
|
||||
if len(c.To) == 0 {
|
||||
|
||||
@@ -17,11 +17,12 @@ type SyslogConf struct {
|
||||
Facility string `json:"-"`
|
||||
Tag string `json:"-"`
|
||||
Verbose bool `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *SyslogConf) Validate() (errs []error) {
|
||||
if !Conf.ToSyslog {
|
||||
if !c.Enabled {
|
||||
return nil
|
||||
}
|
||||
// If protocol is empty, it will connect to the local syslog server.
|
||||
|
||||
@@ -7,13 +7,14 @@ import (
|
||||
|
||||
// TelegramConf is Telegram config
|
||||
type TelegramConf struct {
|
||||
Token string `json:"-"`
|
||||
ChatID string `json:"-"`
|
||||
Token string `json:"-"`
|
||||
ChatID string `json:"-"`
|
||||
Enabled bool `toml:"-" json:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *TelegramConf) Validate() (errs []error) {
|
||||
if !Conf.ToTelegram {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.ChatID) == 0 {
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/knqyf263/go-cpe/naming"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
@@ -14,32 +15,31 @@ type TOMLLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration TOML file specified by path arg.
|
||||
func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
func (c TOMLLoader) Load(pathToToml, _ string) error {
|
||||
// util.Log.Infof("Loading config: %s", pathToToml)
|
||||
if _, err := toml.DecodeFile(pathToToml, &Conf); err != nil {
|
||||
return err
|
||||
}
|
||||
if keyPass != "" {
|
||||
Conf.Default.KeyPassword = keyPass
|
||||
}
|
||||
|
||||
Conf.CveDict.Init()
|
||||
Conf.OvalDict.Init()
|
||||
Conf.Gost.Init()
|
||||
Conf.Exploit.Init()
|
||||
Conf.Metasploit.Init()
|
||||
for _, cnf := range []VulnDictInterface{
|
||||
&Conf.CveDict,
|
||||
&Conf.OvalDict,
|
||||
&Conf.Gost,
|
||||
&Conf.Exploit,
|
||||
&Conf.Metasploit,
|
||||
&Conf.KEVuln,
|
||||
} {
|
||||
cnf.Init()
|
||||
}
|
||||
|
||||
index := 0
|
||||
for name, server := range Conf.Servers {
|
||||
server.ServerName = name
|
||||
if 0 < len(server.KeyPassword) {
|
||||
return xerrors.Errorf("[Deprecated] KEYPASSWORD IN CONFIG FILE ARE UNSECURE. REMOVE THEM IMMEDIATELY FOR A SECURITY REASONS. THEY WILL BE REMOVED IN A FUTURE RELEASE: %s", name)
|
||||
}
|
||||
|
||||
if err := setDefaultIfEmpty(&server, Conf.Default); err != nil {
|
||||
if err := setDefaultIfEmpty(&server); err != nil {
|
||||
return xerrors.Errorf("Failed to set default value to config. server: %s, err: %w", name, err)
|
||||
}
|
||||
|
||||
if err := setScanMode(&server, Conf.Default); err != nil {
|
||||
if err := setScanMode(&server); err != nil {
|
||||
return xerrors.Errorf("Failed to set ScanMode: %w", err)
|
||||
}
|
||||
|
||||
@@ -126,6 +126,10 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan.ScannerBinPath != "" {
|
||||
server.PortScan.IsUseExternalScanner = true
|
||||
}
|
||||
|
||||
server.LogMsgAnsiColor = Colors[index%len(Colors)]
|
||||
index++
|
||||
|
||||
@@ -134,8 +138,8 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
|
||||
if server.Type != ServerTypePseudo {
|
||||
func setDefaultIfEmpty(server *ServerInfo) error {
|
||||
if server.Type != constant.ServerTypePseudo {
|
||||
if len(server.Host) == 0 {
|
||||
return xerrors.Errorf("server.host is empty")
|
||||
}
|
||||
@@ -166,10 +170,6 @@ func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
|
||||
if server.KeyPath == "" {
|
||||
server.KeyPath = Conf.Default.KeyPath
|
||||
}
|
||||
|
||||
if server.KeyPassword == "" {
|
||||
server.KeyPassword = Conf.Default.KeyPassword
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.Lockfiles) == 0 {
|
||||
@@ -208,6 +208,13 @@ func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan == nil {
|
||||
server.PortScan = Conf.Default.PortScan
|
||||
if server.PortScan == nil {
|
||||
server.PortScan = &PortScanConf{}
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.IgnoredJSONKeys) == 0 {
|
||||
server.IgnoredJSONKeys = Conf.Default.IgnoredJSONKeys
|
||||
}
|
||||
|
||||
303
config/vulnDictConf.go
Normal file
303
config/vulnDictConf.go
Normal file
@@ -0,0 +1,303 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// VulnDictInterface is an interface of vulnsrc
|
||||
type VulnDictInterface interface {
|
||||
Init()
|
||||
Validate() error
|
||||
IsFetchViaHTTP() bool
|
||||
CheckHTTPHealth() error
|
||||
GetName() string
|
||||
GetType() string
|
||||
GetURL() string
|
||||
GetSQLite3Path() string
|
||||
GetDebugSQL() bool
|
||||
}
|
||||
|
||||
// VulnDict is a base struct of vuln dicts
|
||||
type VulnDict struct {
|
||||
Name string
|
||||
|
||||
// DB type of CVE dictionary (sqlite3, mysql, postgres or redis)
|
||||
Type string
|
||||
|
||||
// http://cve-dictionary.com:1323 or DB connection string
|
||||
URL string `json:"-"`
|
||||
|
||||
// /path/to/cve.sqlite3
|
||||
SQLite3Path string
|
||||
|
||||
DebugSQL bool
|
||||
}
|
||||
|
||||
// GetType returns type
|
||||
func (cnf VulnDict) GetType() string {
|
||||
return cnf.Type
|
||||
}
|
||||
|
||||
// GetName returns name
|
||||
func (cnf VulnDict) GetName() string {
|
||||
return cnf.Name
|
||||
}
|
||||
|
||||
// GetURL returns url
|
||||
func (cnf VulnDict) GetURL() string {
|
||||
return cnf.URL
|
||||
}
|
||||
|
||||
// GetSQLite3Path return the path of SQLite3
|
||||
func (cnf VulnDict) GetSQLite3Path() string {
|
||||
return cnf.SQLite3Path
|
||||
}
|
||||
|
||||
// GetDebugSQL return debugSQL flag
|
||||
func (cnf VulnDict) GetDebugSQL() bool {
|
||||
return cnf.DebugSQL
|
||||
}
|
||||
|
||||
// Validate settings
|
||||
func (cnf VulnDict) Validate() error {
|
||||
logging.Log.Infof("%s.type=%s, %s.url=%s, %s.SQLite3Path=%s",
|
||||
cnf.Name, cnf.Type, cnf.Name, cnf.URL, cnf.Name, cnf.SQLite3Path)
|
||||
|
||||
switch cnf.Type {
|
||||
case "sqlite3":
|
||||
if cnf.URL != "" {
|
||||
return xerrors.Errorf("To use SQLite3, specify %s.type=sqlite3 and %s.SQLite3Path. To use as HTTP server mode, specify %s.type=http and %s.url",
|
||||
cnf.Name, cnf.Name, cnf.Name, cnf.Name)
|
||||
}
|
||||
if ok, _ := govalidator.IsFilePath(cnf.SQLite3Path); !ok {
|
||||
return xerrors.Errorf("SQLite3 path must be a *Absolute* file path. %s.SQLite3Path: %s",
|
||||
cnf.Name, cnf.SQLite3Path)
|
||||
}
|
||||
if _, err := os.Stat(cnf.SQLite3Path); os.IsNotExist(err) {
|
||||
logging.Log.Warnf("%s.SQLite3Path=%s file not found", cnf.Name, cnf.SQLite3Path)
|
||||
}
|
||||
case "mysql":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`MySQL connection string is needed. %s.url="user:pass@tcp(localhost:3306)/dbname"`, cnf.Name)
|
||||
}
|
||||
case "postgres":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`PostgreSQL connection string is needed. %s.url="host=myhost user=user dbname=dbname sslmode=disable password=password"`, cnf.Name)
|
||||
}
|
||||
case "redis":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`Redis connection string is needed. %s.url="redis://localhost/0"`, cnf.Name)
|
||||
}
|
||||
case "http":
|
||||
if cnf.URL == "" {
|
||||
return xerrors.Errorf(`URL is needed. -%s-url="http://localhost:1323"`, cnf.Name)
|
||||
}
|
||||
default:
|
||||
return xerrors.Errorf("%s.type must be either 'sqlite3', 'mysql', 'postgres', 'redis' or 'http'. %s.type: %s", cnf.Name, cnf.Name, cnf.Type)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Init the struct
|
||||
func (cnf VulnDict) Init() {}
|
||||
|
||||
func (cnf *VulnDict) setDefault(sqlite3Name string) {
|
||||
if cnf.Type == "" {
|
||||
cnf.Type = "sqlite3"
|
||||
}
|
||||
if cnf.URL == "" && cnf.SQLite3Path == "" {
|
||||
wd, _ := os.Getwd()
|
||||
cnf.SQLite3Path = filepath.Join(wd, sqlite3Name)
|
||||
}
|
||||
}
|
||||
|
||||
// IsFetchViaHTTP returns if fetch via HTTP
|
||||
func (cnf VulnDict) IsFetchViaHTTP() bool {
|
||||
return cnf.Type == "http"
|
||||
}
|
||||
|
||||
// CheckHTTPHealth checks http server status
|
||||
func (cnf VulnDict) CheckHTTPHealth() error {
|
||||
if !cnf.IsFetchViaHTTP() {
|
||||
return nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/health", cnf.URL)
|
||||
resp, _, errs := gorequest.New().Timeout(10 * time.Second).SetDebug(Conf.Debug).Get(url).End()
|
||||
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("Failed to request to CVE server. url: %s, errs: %s",
|
||||
url, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GovalDictConf is goval-dictionary config
|
||||
type GovalDictConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const govalType = "OVALDB_TYPE"
|
||||
const govalURL = "OVALDB_URL"
|
||||
const govalPATH = "OVALDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GovalDictConf) Init() {
|
||||
cnf.Name = "ovalDict"
|
||||
if os.Getenv(govalType) != "" {
|
||||
cnf.Type = os.Getenv(govalType)
|
||||
}
|
||||
if os.Getenv(govalURL) != "" {
|
||||
cnf.URL = os.Getenv(govalURL)
|
||||
}
|
||||
if os.Getenv(govalPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(govalPATH)
|
||||
}
|
||||
cnf.setDefault("oval.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// ExploitConf is exploit config
|
||||
type ExploitConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const exploitDBType = "EXPLOITDB_TYPE"
|
||||
const exploitDBURL = "EXPLOITDB_URL"
|
||||
const exploitDBPATH = "EXPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *ExploitConf) Init() {
|
||||
cnf.Name = "exploit"
|
||||
if os.Getenv(exploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(exploitDBType)
|
||||
}
|
||||
if os.Getenv(exploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(exploitDBURL)
|
||||
}
|
||||
if os.Getenv(exploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(exploitDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-exploitdb.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// GoCveDictConf is GoCveDict config
|
||||
type GoCveDictConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const cveDBType = "CVEDB_TYPE"
|
||||
const cveDBURL = "CVEDB_URL"
|
||||
const cveDBPATH = "CVEDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GoCveDictConf) Init() {
|
||||
cnf.Name = "cveDict"
|
||||
if os.Getenv(cveDBType) != "" {
|
||||
cnf.Type = os.Getenv(cveDBType)
|
||||
}
|
||||
if os.Getenv(cveDBURL) != "" {
|
||||
cnf.URL = os.Getenv(cveDBURL)
|
||||
}
|
||||
if os.Getenv(cveDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(cveDBPATH)
|
||||
}
|
||||
cnf.setDefault("cve.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// GostConf is gost config
|
||||
type GostConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const gostDBType = "GOSTDB_TYPE"
|
||||
const gostDBURL = "GOSTDB_URL"
|
||||
const gostDBPATH = "GOSTDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *GostConf) Init() {
|
||||
cnf.Name = "gost"
|
||||
if os.Getenv(gostDBType) != "" {
|
||||
cnf.Type = os.Getenv(gostDBType)
|
||||
}
|
||||
if os.Getenv(gostDBURL) != "" {
|
||||
cnf.URL = os.Getenv(gostDBURL)
|
||||
}
|
||||
if os.Getenv(gostDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(gostDBPATH)
|
||||
}
|
||||
cnf.setDefault("gost.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// MetasploitConf is go-msfdb config
|
||||
type MetasploitConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const metasploitDBType = "METASPLOITDB_TYPE"
|
||||
const metasploitDBURL = "METASPLOITDB_URL"
|
||||
const metasploitDBPATH = "METASPLOITDB_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *MetasploitConf) Init() {
|
||||
cnf.Name = "metasploit"
|
||||
if os.Getenv(metasploitDBType) != "" {
|
||||
cnf.Type = os.Getenv(metasploitDBType)
|
||||
}
|
||||
if os.Getenv(metasploitDBURL) != "" {
|
||||
cnf.URL = os.Getenv(metasploitDBURL)
|
||||
}
|
||||
if os.Getenv(metasploitDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(metasploitDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-msfdb.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// KEVulnConf is go-kev config
|
||||
type KEVulnConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const kevulnDBType = "KEVULN_TYPE"
|
||||
const kevulnDBURL = "KEVULN_URL"
|
||||
const kevulnDBPATH = "KEVULN_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *KEVulnConf) Init() {
|
||||
cnf.Name = "kevuln"
|
||||
if os.Getenv(kevulnDBType) != "" {
|
||||
cnf.Type = os.Getenv(kevulnDBType)
|
||||
}
|
||||
if os.Getenv(kevulnDBURL) != "" {
|
||||
cnf.URL = os.Getenv(kevulnDBURL)
|
||||
}
|
||||
if os.Getenv(kevulnDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(kevulnDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-kev.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
67
constant/constant.go
Normal file
67
constant/constant.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package constant
|
||||
|
||||
// Global constant
|
||||
// Pkg local constants should not be defined here.
|
||||
// Define them in the each package.
|
||||
|
||||
const (
|
||||
// RedHat is
|
||||
RedHat = "redhat"
|
||||
|
||||
// Debian is
|
||||
Debian = "debian"
|
||||
|
||||
// Ubuntu is
|
||||
Ubuntu = "ubuntu"
|
||||
|
||||
// CentOS is
|
||||
CentOS = "centos"
|
||||
|
||||
// Alma is
|
||||
Alma = "alma"
|
||||
|
||||
// Rocky is
|
||||
Rocky = "rocky"
|
||||
|
||||
// Fedora is
|
||||
// Fedora = "fedora"
|
||||
|
||||
// Amazon is
|
||||
Amazon = "amazon"
|
||||
|
||||
// Oracle is
|
||||
Oracle = "oracle"
|
||||
|
||||
// FreeBSD is
|
||||
FreeBSD = "freebsd"
|
||||
|
||||
// Raspbian is
|
||||
Raspbian = "raspbian"
|
||||
|
||||
// Windows is
|
||||
Windows = "windows"
|
||||
|
||||
// OpenSUSE is
|
||||
OpenSUSE = "opensuse"
|
||||
|
||||
// OpenSUSELeap is
|
||||
OpenSUSELeap = "opensuse.leap"
|
||||
|
||||
// SUSEEnterpriseServer is
|
||||
SUSEEnterpriseServer = "suse.linux.enterprise.server"
|
||||
|
||||
// SUSEEnterpriseDesktop is
|
||||
SUSEEnterpriseDesktop = "suse.linux.enterprise.desktop"
|
||||
|
||||
// SUSEOpenstackCloud is
|
||||
SUSEOpenstackCloud = "suse.openstack.cloud"
|
||||
|
||||
// Alpine is
|
||||
Alpine = "alpine"
|
||||
|
||||
// ServerTypePseudo is used for ServerInfo.Type, r.Family
|
||||
ServerTypePseudo = "pseudo"
|
||||
|
||||
// DeepSecurity is
|
||||
DeepSecurity = "deepsecurity"
|
||||
)
|
||||
@@ -81,6 +81,14 @@ func main() {
|
||||
return
|
||||
},
|
||||
}
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show version",
|
||||
Long: "Show version",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("future-vuls-%s-%s\n", config.Version, config.Revision)
|
||||
},
|
||||
}
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&serverUUID, "uuid", "", "server uuid. ENV: VULS_SERVER_UUID")
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&configFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
|
||||
cmdFvulsUploader.PersistentFlags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin. ENV: VULS_STDIN")
|
||||
@@ -92,6 +100,7 @@ func main() {
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "future-vuls"}
|
||||
rootCmd.AddCommand(cmdFvulsUploader)
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/contrib/trivy/parser"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@@ -34,45 +34,55 @@ func main() {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err = buf.ReadFrom(reader); err != nil {
|
||||
fmt.Printf("Failed to read file. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
trivyJSON = buf.Bytes()
|
||||
} else {
|
||||
if trivyJSON, err = ioutil.ReadFile(jsonFilePath); err != nil {
|
||||
fmt.Println("Failed to read file", err)
|
||||
fmt.Printf("Failed to read file. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
scanResult := &models.ScanResult{
|
||||
JSONVersion: models.JSONVersion,
|
||||
ScannedCves: models.VulnInfos{},
|
||||
}
|
||||
if scanResult, err = parser.Parse(trivyJSON, scanResult); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
parser, err := parser.NewParser(trivyJSON)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to new parser. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
scanResult, err := parser.Parse(trivyJSON)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to parse. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
var resultJSON []byte
|
||||
if resultJSON, err = json.MarshalIndent(scanResult, "", " "); err != nil {
|
||||
fmt.Println("Failed to create json", err)
|
||||
fmt.Printf("Failed to create json. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(resultJSON))
|
||||
return
|
||||
},
|
||||
}
|
||||
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show version",
|
||||
Long: "Show version",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("trivy-to-vuls-%s-%s\n", config.Version, config.Revision)
|
||||
},
|
||||
}
|
||||
|
||||
cmdTrivyToVuls.Flags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonDir, "trivy-json-dir", "d", "./", "trivy json dir")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonFileName, "trivy-json-file-name", "f", "results.json", "trivy json file name")
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "trivy-to-vuls"}
|
||||
rootCmd.AddCommand(cmdTrivyToVuls)
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
fmt.Printf("Failed to execute command. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
@@ -2,175 +2,32 @@ package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
v2 "github.com/future-architect/vuls/contrib/trivy/parser/v2"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Parse :
|
||||
func Parse(vulnJSON []byte, scanResult *models.ScanResult) (result *models.ScanResult, err error) {
|
||||
var trivyResults report.Results
|
||||
if err = json.Unmarshal(vulnJSON, &trivyResults); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pkgs := models.Packages{}
|
||||
vulnInfos := models.VulnInfos{}
|
||||
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
|
||||
for _, trivyResult := range trivyResults {
|
||||
for _, vuln := range trivyResult.Vulnerabilities {
|
||||
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
|
||||
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
|
||||
CveID: vuln.VulnerabilityID,
|
||||
Confidences: models.Confidences{
|
||||
{
|
||||
Score: 100,
|
||||
DetectionMethod: models.TrivyMatchStr,
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
// VulnType : "",
|
||||
}
|
||||
}
|
||||
vulnInfo := vulnInfos[vuln.VulnerabilityID]
|
||||
var notFixedYet bool
|
||||
fixState := ""
|
||||
if len(vuln.FixedVersion) == 0 {
|
||||
notFixedYet = true
|
||||
fixState = "Affected"
|
||||
}
|
||||
var references models.References
|
||||
for _, reference := range vuln.References {
|
||||
references = append(references, models.Reference{
|
||||
Source: "trivy",
|
||||
Link: reference,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(references, func(i, j int) bool {
|
||||
return references[i].Link < references[j].Link
|
||||
})
|
||||
|
||||
var published time.Time
|
||||
if vuln.PublishedDate != nil {
|
||||
published = *vuln.PublishedDate
|
||||
}
|
||||
|
||||
var lastModified time.Time
|
||||
if vuln.LastModifiedDate != nil {
|
||||
lastModified = *vuln.LastModifiedDate
|
||||
}
|
||||
|
||||
vulnInfo.CveContents = models.CveContents{
|
||||
models.Trivy: models.CveContent{
|
||||
Cvss3Severity: vuln.Severity,
|
||||
References: references,
|
||||
Title: vuln.Title,
|
||||
Summary: vuln.Description,
|
||||
Published: published,
|
||||
LastModified: lastModified,
|
||||
},
|
||||
}
|
||||
// do only if image type is Vuln
|
||||
if IsTrivySupportedOS(trivyResult.Type) {
|
||||
pkgs[vuln.PkgName] = models.Package{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
}
|
||||
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
|
||||
Name: vuln.PkgName,
|
||||
NotFixedYet: notFixedYet,
|
||||
FixState: fixState,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
|
||||
// overwrite every time if os package
|
||||
scanResult.Family = trivyResult.Type
|
||||
scanResult.ServerName = trivyResult.Target
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
"trivy-target": trivyResult.Target,
|
||||
}
|
||||
scanResult.ScannedAt = time.Now()
|
||||
scanResult.ScannedBy = "trivy"
|
||||
scanResult.ScannedVia = "trivy"
|
||||
} else {
|
||||
// LibraryScanの結果
|
||||
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
|
||||
Key: trivyResult.Type,
|
||||
Name: vuln.PkgName,
|
||||
Path: trivyResult.Target,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Libs = append(libScanner.Libs, types.Library{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
})
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
vulnInfos[vuln.VulnerabilityID] = vulnInfo
|
||||
}
|
||||
}
|
||||
// flatten and unique libraries
|
||||
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
|
||||
for path, v := range uniqueLibraryScannerPaths {
|
||||
uniqueLibrary := map[string]types.Library{}
|
||||
for _, lib := range v.Libs {
|
||||
uniqueLibrary[lib.Name+lib.Version] = lib
|
||||
}
|
||||
|
||||
var libraries []types.Library
|
||||
for _, library := range uniqueLibrary {
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
sort.Slice(libraries, func(i, j int) bool {
|
||||
return libraries[i].Name < libraries[j].Name
|
||||
})
|
||||
|
||||
libscanner := models.LibraryScanner{
|
||||
Path: path,
|
||||
Libs: libraries,
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscanner)
|
||||
}
|
||||
sort.Slice(libraryScanners, func(i, j int) bool {
|
||||
return libraryScanners[i].Path < libraryScanners[j].Path
|
||||
})
|
||||
scanResult.ScannedCves = vulnInfos
|
||||
scanResult.Packages = pkgs
|
||||
scanResult.LibraryScanners = libraryScanners
|
||||
return scanResult, nil
|
||||
// Parser is a parser interface
|
||||
type Parser interface {
|
||||
Parse(vulnJSON []byte) (result *models.ScanResult, err error)
|
||||
}
|
||||
|
||||
// IsTrivySupportedOS :
|
||||
func IsTrivySupportedOS(family string) bool {
|
||||
supportedFamilies := []string{
|
||||
os.RedHat,
|
||||
os.Debian,
|
||||
os.Ubuntu,
|
||||
os.CentOS,
|
||||
os.Fedora,
|
||||
os.Amazon,
|
||||
os.Oracle,
|
||||
os.Windows,
|
||||
os.OpenSUSE,
|
||||
os.OpenSUSELeap,
|
||||
os.OpenSUSETumbleweed,
|
||||
os.SLES,
|
||||
os.Photon,
|
||||
os.Alpine,
|
||||
}
|
||||
for _, supportedFamily := range supportedFamilies {
|
||||
if family == supportedFamily {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
// Report is used for judgeing the scheme version of trivy
|
||||
type Report struct {
|
||||
SchemaVersion int `json:",omitempty"`
|
||||
}
|
||||
|
||||
// NewParser make a parser for the schema version of trivy
|
||||
func NewParser(vulnJSON []byte) (Parser, error) {
|
||||
r := Report{}
|
||||
if err := json.Unmarshal(vulnJSON, &r); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to parse JSON. Please use the latest version of trivy, trivy-to-vuls and future-vuls")
|
||||
}
|
||||
switch r.SchemaVersion {
|
||||
case 2:
|
||||
return v2.ParserV2{}, nil
|
||||
default:
|
||||
return nil, xerrors.Errorf("Failed to parse trivy json. SchemeVersion %d is not supported yet. Please contact support", r.SchemaVersion)
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
60
contrib/trivy/parser/v2/parser.go
Normal file
60
contrib/trivy/parser/v2/parser.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package v2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/contrib/trivy/pkg"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// ParserV2 is a parser for scheme v2
|
||||
type ParserV2 struct {
|
||||
}
|
||||
|
||||
// Parse trivy's JSON and convert to the Vuls struct
|
||||
func (p ParserV2) Parse(vulnJSON []byte) (result *models.ScanResult, err error) {
|
||||
var report report.Report
|
||||
if err = json.Unmarshal(vulnJSON, &report); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
scanResult, err := pkg.Convert(report.Results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
setScanResultMeta(scanResult, &report)
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
func setScanResultMeta(scanResult *models.ScanResult, report *report.Report) {
|
||||
for _, r := range report.Results {
|
||||
const trivyTarget = "trivy-target"
|
||||
if pkg.IsTrivySupportedOS(r.Type) {
|
||||
scanResult.Family = r.Type
|
||||
scanResult.ServerName = r.Target
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
trivyTarget: r.Target,
|
||||
}
|
||||
} else if pkg.IsTrivySupportedLib(r.Type) {
|
||||
if scanResult.Family == "" {
|
||||
scanResult.Family = constant.ServerTypePseudo
|
||||
}
|
||||
if scanResult.ServerName == "" {
|
||||
scanResult.ServerName = "library scan by trivy"
|
||||
}
|
||||
if _, ok := scanResult.Optional[trivyTarget]; !ok {
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
trivyTarget: r.Target,
|
||||
}
|
||||
}
|
||||
}
|
||||
scanResult.ScannedAt = time.Now()
|
||||
scanResult.ScannedBy = "trivy"
|
||||
scanResult.ScannedVia = "trivy"
|
||||
}
|
||||
}
|
||||
725
contrib/trivy/parser/v2/parser_test.go
Normal file
725
contrib/trivy/parser/v2/parser_test.go
Normal file
@@ -0,0 +1,725 @@
|
||||
package v2
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/d4l3k/messagediff"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
vulnJSON []byte
|
||||
expected *models.ScanResult
|
||||
}{
|
||||
"image redis": {
|
||||
vulnJSON: redisTrivy,
|
||||
expected: redisSR,
|
||||
},
|
||||
"image struts": {
|
||||
vulnJSON: strutsTrivy,
|
||||
expected: strutsSR,
|
||||
},
|
||||
"image osAndLib": {
|
||||
vulnJSON: osAndLibTrivy,
|
||||
expected: osAndLibSR,
|
||||
},
|
||||
}
|
||||
|
||||
for testcase, v := range cases {
|
||||
actual, err := ParserV2{}.Parse(v.vulnJSON)
|
||||
if err != nil {
|
||||
t.Errorf("%s", err)
|
||||
}
|
||||
|
||||
diff, equal := messagediff.PrettyDiff(
|
||||
v.expected,
|
||||
actual,
|
||||
messagediff.IgnoreStructField("ScannedAt"),
|
||||
messagediff.IgnoreStructField("Title"),
|
||||
messagediff.IgnoreStructField("Summary"),
|
||||
messagediff.IgnoreStructField("LastModified"),
|
||||
messagediff.IgnoreStructField("Published"),
|
||||
)
|
||||
if !equal {
|
||||
t.Errorf("test: %s, diff %s", testcase, diff)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var redisTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "redis",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"OS": {
|
||||
"Family": "debian",
|
||||
"Name": "10.10"
|
||||
},
|
||||
"ImageID": "sha256:ddcca4b8a6f0367b5de2764dfe76b0a4bfa6d75237932185923705da47004347",
|
||||
"DiffIDs": [
|
||||
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
|
||||
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
|
||||
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
|
||||
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
|
||||
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
|
||||
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
|
||||
],
|
||||
"RepoTags": [
|
||||
"redis:latest"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"redis@sha256:66ce9bc742609650afc3de7009658473ed601db4e926a5b16d239303383bacad"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "fa59f1c2817c9095f8f7272a4ab9b11db0332b33efb3a82c00a3d1fec8763684",
|
||||
"created": "2021-08-17T14:30:06.550779326Z",
|
||||
"docker_version": "20.10.7",
|
||||
"history": [
|
||||
{
|
||||
"created": "2021-08-17T01:24:06Z",
|
||||
"created_by": "/bin/sh -c #(nop) ADD file:87b4e60fe3af680c6815448374365a44e9ea461bc8ade2960b4639c25aed3ba9 in / "
|
||||
},
|
||||
{
|
||||
"created": "2021-08-17T14:30:06Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"redis-server\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
|
||||
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
|
||||
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
|
||||
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
|
||||
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
|
||||
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"redis-server"
|
||||
],
|
||||
"Entrypoint": [
|
||||
"docker-entrypoint.sh"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"GOSU_VERSION=1.12",
|
||||
"REDIS_VERSION=6.2.5",
|
||||
"REDIS_DOWNLOAD_URL=http://download.redis.io/releases/redis-6.2.5.tar.gz",
|
||||
"REDIS_DOWNLOAD_SHA=4b9a75709a1b74b3785e20a6c158cab94cf52298aa381eea947a678a60d551ae"
|
||||
],
|
||||
"Image": "sha256:befbd3fc62bffcd0115008969a014faaad07828b2c54b4bcfd2d9fc3aa2508cd",
|
||||
"Volumes": {
|
||||
"/data": {}
|
||||
},
|
||||
"WorkingDir": "/data"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "redis (debian 10.10)",
|
||||
"Class": "os-pkgs",
|
||||
"Type": "debian",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "adduser",
|
||||
"Version": "3.118",
|
||||
"SrcName": "adduser",
|
||||
"SrcVersion": "3.118",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "apt",
|
||||
"Version": "1.8.2.3",
|
||||
"SrcName": "apt",
|
||||
"SrcVersion": "1.8.2.3",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "bsdutils",
|
||||
"Version": "1:2.33.1-0.1",
|
||||
"SrcName": "util-linux",
|
||||
"SrcVersion": "2.33.1-0.1",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "pkgA",
|
||||
"Version": "1:2.33.1-0.1",
|
||||
"SrcName": "util-linux",
|
||||
"SrcVersion": "2.33.1-0.1",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2011-3374",
|
||||
"PkgName": "apt",
|
||||
"InstalledVersion": "1.8.2.3",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
},
|
||||
"SeveritySource": "debian",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2011-3374",
|
||||
"Description": "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
|
||||
"Severity": "LOW",
|
||||
"CweIDs": [
|
||||
"CWE-347"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:L/A:N",
|
||||
"V2Score": 4.3,
|
||||
"V3Score": 3.7
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://access.redhat.com/security/cve/cve-2011-3374"
|
||||
],
|
||||
"PublishedDate": "2019-11-26T00:15:00Z",
|
||||
"LastModifiedDate": "2021-02-09T16:08:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`)
|
||||
var redisSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "redis (debian 10.10)",
|
||||
Family: "debian",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2011-3374": {
|
||||
CveID: "CVE-2011-3374",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
models.PackageFixStatus{
|
||||
Name: "apt",
|
||||
NotFixedYet: true,
|
||||
FixState: "Affected",
|
||||
FixedIn: "",
|
||||
}},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "",
|
||||
Summary: "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
|
||||
Cvss3Severity: "LOW",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://access.redhat.com/security/cve/cve-2011-3374"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{},
|
||||
Packages: models.Packages{
|
||||
"apt": models.Package{
|
||||
Name: "apt",
|
||||
Version: "1.8.2.3",
|
||||
},
|
||||
"adduser": models.Package{
|
||||
Name: "adduser",
|
||||
Version: "3.118",
|
||||
},
|
||||
"bsdutils": models.Package{
|
||||
Name: "bsdutils",
|
||||
Version: "1:2.33.1-0.1",
|
||||
},
|
||||
"pkgA": models.Package{
|
||||
Name: "pkgA",
|
||||
Version: "1:2.33.1-0.1",
|
||||
},
|
||||
},
|
||||
SrcPackages: models.SrcPackages{
|
||||
"util-linux": models.SrcPackage{
|
||||
Name: "util-linux",
|
||||
Version: "2.33.1-0.1",
|
||||
BinaryNames: []string{"bsdutils", "pkgA"},
|
||||
},
|
||||
},
|
||||
Optional: map[string]interface{}{
|
||||
"trivy-target": "redis (debian 10.10)",
|
||||
},
|
||||
}
|
||||
|
||||
var strutsTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "/data/struts-1.2.7/lib",
|
||||
"ArtifactType": "filesystem",
|
||||
"Metadata": {
|
||||
"ImageConfig": {
|
||||
"architecture": "",
|
||||
"created": "0001-01-01T00:00:00Z",
|
||||
"os": "",
|
||||
"rootfs": {
|
||||
"type": "",
|
||||
"diff_ids": null
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "Java",
|
||||
"Class": "lang-pkgs",
|
||||
"Type": "jar",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "oro:oro",
|
||||
"Version": "2.0.7",
|
||||
"Layer": {}
|
||||
},
|
||||
{
|
||||
"Name": "struts:struts",
|
||||
"Version": "1.2.7",
|
||||
"Layer": {}
|
||||
},
|
||||
{
|
||||
"Name": "commons-beanutils:commons-beanutils",
|
||||
"Version": "1.7.0",
|
||||
"Layer": {}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2014-0114",
|
||||
"PkgName": "commons-beanutils:commons-beanutils",
|
||||
"InstalledVersion": "1.7.0",
|
||||
"FixedVersion": "1.9.2",
|
||||
"Layer": {},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2014-0114",
|
||||
"Title": "Apache Struts 1: Class Loader manipulation via request parameters",
|
||||
"Description": "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
|
||||
"Severity": "HIGH",
|
||||
"CweIDs": [
|
||||
"CWE-20"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V2Score": 7.5
|
||||
},
|
||||
"redhat": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V2Score": 7.5
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"http://advisories.mageia.org/MGASA-2014-0219.html"
|
||||
],
|
||||
"PublishedDate": "2014-04-30T10:49:00Z",
|
||||
"LastModifiedDate": "2021-01-26T18:15:00Z"
|
||||
},
|
||||
{
|
||||
"VulnerabilityID": "CVE-2012-1007",
|
||||
"PkgName": "struts:struts",
|
||||
"InstalledVersion": "1.2.7",
|
||||
"Layer": {},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2012-1007",
|
||||
"Title": "struts: multiple XSS flaws",
|
||||
"Description": "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
|
||||
"Severity": "MEDIUM",
|
||||
"CweIDs": [
|
||||
"CWE-79"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V2Score": 4.3
|
||||
},
|
||||
"redhat": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V2Score": 4.3
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"
|
||||
],
|
||||
"PublishedDate": "2012-02-07T04:09:00Z",
|
||||
"LastModifiedDate": "2018-10-17T01:29:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`)
|
||||
|
||||
var strutsSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "library scan by trivy",
|
||||
Family: "pseudo",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2014-0114": {
|
||||
CveID: "CVE-2014-0114",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "Apache Struts 1: Class Loader manipulation via request parameters",
|
||||
Summary: "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
|
||||
Cvss3Severity: "HIGH",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "http://advisories.mageia.org/MGASA-2014-0219.html"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "jar",
|
||||
Name: "commons-beanutils:commons-beanutils",
|
||||
FixedIn: "1.9.2",
|
||||
//TODO use Artifactname?
|
||||
Path: "Java",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
"CVE-2012-1007": {
|
||||
CveID: "CVE-2012-1007",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "struts: multiple XSS flaws",
|
||||
Summary: "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
|
||||
Cvss3Severity: "MEDIUM",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "jar",
|
||||
Name: "struts:struts",
|
||||
FixedIn: "",
|
||||
//TODO use Artifactname?
|
||||
Path: "Java",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{
|
||||
models.LibraryScanner{
|
||||
Type: "jar",
|
||||
LockfilePath: "Java",
|
||||
Libs: []models.Library{
|
||||
{
|
||||
Name: "commons-beanutils:commons-beanutils",
|
||||
Version: "1.7.0",
|
||||
},
|
||||
{
|
||||
Name: "oro:oro",
|
||||
Version: "2.0.7",
|
||||
},
|
||||
{
|
||||
Name: "struts:struts",
|
||||
Version: "1.2.7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Packages: models.Packages{},
|
||||
SrcPackages: models.SrcPackages{},
|
||||
Optional: map[string]interface{}{
|
||||
"trivy-target": "Java",
|
||||
},
|
||||
}
|
||||
|
||||
var osAndLibTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"OS": {
|
||||
"Family": "debian",
|
||||
"Name": "10.2"
|
||||
},
|
||||
"ImageID": "sha256:5a992077baba51b97f27591a10d54d2f2723dc9c81a3fe419e261023f2554933",
|
||||
"DiffIDs": [
|
||||
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
|
||||
],
|
||||
"RepoTags": [
|
||||
"quay.io/fluentd_elasticsearch/fluentd:v2.9.0"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"quay.io/fluentd_elasticsearch/fluentd@sha256:54716d825ec9791ffb403ac17a1e82159c98ac6161e02b2a054595ad01aa6726"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "232f3fc7ddffd71dc3ff52c6c0c3a5feea2f51acffd9b53850a8fc6f1a15319a",
|
||||
"created": "2020-03-04T13:59:39.161374106Z",
|
||||
"docker_version": "19.03.4",
|
||||
"history": [
|
||||
{
|
||||
"created": "2020-03-04T13:59:39.161374106Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"/run.sh\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"/run.sh"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
|
||||
],
|
||||
"Image": "sha256:2a538358cddc4824e9eff1531e0c63ae5e3cda85d2984c647df9b1c816b9b86b",
|
||||
"ExposedPorts": {
|
||||
"80/tcp": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
"Class": "os-pkgs",
|
||||
"Type": "debian",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "libgnutls30",
|
||||
"Version": "3.6.7-4",
|
||||
"SrcName": "gnutls28",
|
||||
"SrcVersion": "3.6.7-4",
|
||||
"Layer": {
|
||||
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
|
||||
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2021-20231",
|
||||
"PkgName": "libgnutls30",
|
||||
"InstalledVersion": "3.6.7-4",
|
||||
"FixedVersion": "3.6.7-4+deb10u7",
|
||||
"Layer": {
|
||||
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
|
||||
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
|
||||
},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2021-20231",
|
||||
"Title": "gnutls: Use after free in client key_share extension",
|
||||
"Description": "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
|
||||
"Severity": "CRITICAL",
|
||||
"CweIDs": [
|
||||
"CWE-416"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V2Score": 7.5,
|
||||
"V3Score": 9.8
|
||||
},
|
||||
"redhat": {
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:L",
|
||||
"V3Score": 3.7
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://bugzilla.redhat.com/show_bug.cgi?id=1922276"
|
||||
],
|
||||
"PublishedDate": "2021-03-12T19:15:00Z",
|
||||
"LastModifiedDate": "2021-06-01T14:07:00Z"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Target": "Ruby",
|
||||
"Class": "lang-pkgs",
|
||||
"Type": "gemspec",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "activesupport",
|
||||
"Version": "6.0.2.1",
|
||||
"License": "MIT",
|
||||
"Layer": {
|
||||
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
|
||||
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
|
||||
},
|
||||
"FilePath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec"
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2020-8165",
|
||||
"PkgName": "activesupport",
|
||||
"PkgPath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
|
||||
"InstalledVersion": "6.0.2.1",
|
||||
"FixedVersion": "6.0.3.1, 5.2.4.3",
|
||||
"Layer": {
|
||||
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
|
||||
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
|
||||
},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2020-8165",
|
||||
"Title": "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
|
||||
"Description": "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
|
||||
"Severity": "CRITICAL",
|
||||
"CweIDs": [
|
||||
"CWE-502"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V2Score": 7.5,
|
||||
"V3Score": 9.8
|
||||
},
|
||||
"redhat": {
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V3Score": 9.8
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://www.debian.org/security/2020/dsa-4766"
|
||||
],
|
||||
"PublishedDate": "2020-06-19T18:15:00Z",
|
||||
"LastModifiedDate": "2020-10-17T12:15:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`)
|
||||
|
||||
var osAndLibSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
Family: "debian",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2021-20231": {
|
||||
CveID: "CVE-2021-20231",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
models.PackageFixStatus{
|
||||
Name: "libgnutls30",
|
||||
NotFixedYet: false,
|
||||
FixState: "",
|
||||
FixedIn: "3.6.7-4+deb10u7",
|
||||
}},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "gnutls: Use after free in client key_share extension",
|
||||
Summary: "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
|
||||
Cvss3Severity: "CRITICAL",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://bugzilla.redhat.com/show_bug.cgi?id=1922276"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
},
|
||||
"CVE-2020-8165": {
|
||||
CveID: "CVE-2020-8165",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
|
||||
Summary: "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
|
||||
Cvss3Severity: "CRITICAL",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://www.debian.org/security/2020/dsa-4766"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "gemspec",
|
||||
Name: "activesupport",
|
||||
FixedIn: "6.0.3.1, 5.2.4.3",
|
||||
Path: "Ruby",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{
|
||||
models.LibraryScanner{
|
||||
Type: "gemspec",
|
||||
LockfilePath: "Ruby",
|
||||
Libs: []models.Library{
|
||||
{
|
||||
Name: "activesupport",
|
||||
Version: "6.0.2.1",
|
||||
FilePath: "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Packages: models.Packages{
|
||||
"libgnutls30": models.Package{
|
||||
Name: "libgnutls30",
|
||||
Version: "3.6.7-4",
|
||||
},
|
||||
},
|
||||
SrcPackages: models.SrcPackages{
|
||||
"gnutls28": models.SrcPackage{
|
||||
Name: "gnutls28",
|
||||
Version: "3.6.7-4",
|
||||
BinaryNames: []string{"libgnutls30"},
|
||||
},
|
||||
},
|
||||
Optional: map[string]interface{}{
|
||||
"trivy-target": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
},
|
||||
}
|
||||
227
contrib/trivy/pkg/converter.go
Normal file
227
contrib/trivy/pkg/converter.go
Normal file
@@ -0,0 +1,227 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
ftypes "github.com/aquasecurity/fanal/types"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// Convert :
|
||||
func Convert(results report.Results) (result *models.ScanResult, err error) {
|
||||
scanResult := &models.ScanResult{
|
||||
JSONVersion: models.JSONVersion,
|
||||
ScannedCves: models.VulnInfos{},
|
||||
}
|
||||
|
||||
pkgs := models.Packages{}
|
||||
srcPkgs := models.SrcPackages{}
|
||||
vulnInfos := models.VulnInfos{}
|
||||
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
|
||||
for _, trivyResult := range results {
|
||||
for _, vuln := range trivyResult.Vulnerabilities {
|
||||
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
|
||||
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
|
||||
CveID: vuln.VulnerabilityID,
|
||||
Confidences: models.Confidences{
|
||||
{
|
||||
Score: 100,
|
||||
DetectionMethod: models.TrivyMatchStr,
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
// VulnType : "",
|
||||
}
|
||||
}
|
||||
vulnInfo := vulnInfos[vuln.VulnerabilityID]
|
||||
var notFixedYet bool
|
||||
fixState := ""
|
||||
if len(vuln.FixedVersion) == 0 {
|
||||
notFixedYet = true
|
||||
fixState = "Affected"
|
||||
}
|
||||
var references models.References
|
||||
for _, reference := range vuln.References {
|
||||
references = append(references, models.Reference{
|
||||
Source: "trivy",
|
||||
Link: reference,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(references, func(i, j int) bool {
|
||||
return references[i].Link < references[j].Link
|
||||
})
|
||||
|
||||
var published time.Time
|
||||
if vuln.PublishedDate != nil {
|
||||
published = *vuln.PublishedDate
|
||||
}
|
||||
|
||||
var lastModified time.Time
|
||||
if vuln.LastModifiedDate != nil {
|
||||
lastModified = *vuln.LastModifiedDate
|
||||
}
|
||||
|
||||
vulnInfo.CveContents = models.CveContents{
|
||||
models.Trivy: []models.CveContent{{
|
||||
Cvss3Severity: vuln.Severity,
|
||||
References: references,
|
||||
Title: vuln.Title,
|
||||
Summary: vuln.Description,
|
||||
Published: published,
|
||||
LastModified: lastModified,
|
||||
}},
|
||||
}
|
||||
// do onlyIif image type is Vuln
|
||||
if IsTrivySupportedOS(trivyResult.Type) {
|
||||
pkgs[vuln.PkgName] = models.Package{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
}
|
||||
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
|
||||
Name: vuln.PkgName,
|
||||
NotFixedYet: notFixedYet,
|
||||
FixState: fixState,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
} else {
|
||||
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
|
||||
Key: trivyResult.Type,
|
||||
Name: vuln.PkgName,
|
||||
Path: trivyResult.Target,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Type = trivyResult.Type
|
||||
libScanner.Libs = append(libScanner.Libs, models.Library{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
FilePath: vuln.PkgPath,
|
||||
})
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
vulnInfos[vuln.VulnerabilityID] = vulnInfo
|
||||
}
|
||||
|
||||
// --list-all-pkgs flg of trivy will output all installed packages, so collect them.
|
||||
if trivyResult.Class == report.ClassOSPkg {
|
||||
for _, p := range trivyResult.Packages {
|
||||
pkgs[p.Name] = models.Package{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
}
|
||||
if p.Name != p.SrcName {
|
||||
if v, ok := srcPkgs[p.SrcName]; !ok {
|
||||
srcPkgs[p.SrcName] = models.SrcPackage{
|
||||
Name: p.SrcName,
|
||||
Version: p.SrcVersion,
|
||||
BinaryNames: []string{p.Name},
|
||||
}
|
||||
} else {
|
||||
v.AddBinaryName(p.Name)
|
||||
srcPkgs[p.SrcName] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if trivyResult.Class == report.ClassLangPkg {
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Type = trivyResult.Type
|
||||
for _, p := range trivyResult.Packages {
|
||||
libScanner.Libs = append(libScanner.Libs, models.Library{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
FilePath: p.FilePath,
|
||||
})
|
||||
}
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
}
|
||||
|
||||
// flatten and unique libraries
|
||||
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
|
||||
for path, v := range uniqueLibraryScannerPaths {
|
||||
uniqueLibrary := map[string]models.Library{}
|
||||
for _, lib := range v.Libs {
|
||||
uniqueLibrary[lib.Name+lib.Version] = lib
|
||||
}
|
||||
|
||||
var libraries []models.Library
|
||||
for _, library := range uniqueLibrary {
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
sort.Slice(libraries, func(i, j int) bool {
|
||||
return libraries[i].Name < libraries[j].Name
|
||||
})
|
||||
|
||||
libscanner := models.LibraryScanner{
|
||||
Type: v.Type,
|
||||
LockfilePath: path,
|
||||
Libs: libraries,
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscanner)
|
||||
}
|
||||
sort.Slice(libraryScanners, func(i, j int) bool {
|
||||
return libraryScanners[i].LockfilePath < libraryScanners[j].LockfilePath
|
||||
})
|
||||
scanResult.ScannedCves = vulnInfos
|
||||
scanResult.Packages = pkgs
|
||||
scanResult.SrcPackages = srcPkgs
|
||||
scanResult.LibraryScanners = libraryScanners
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
// IsTrivySupportedOS :
|
||||
func IsTrivySupportedOS(family string) bool {
|
||||
supportedFamilies := map[string]interface{}{
|
||||
os.RedHat: struct{}{},
|
||||
os.Debian: struct{}{},
|
||||
os.Ubuntu: struct{}{},
|
||||
os.CentOS: struct{}{},
|
||||
os.Rocky: struct{}{},
|
||||
os.Alma: struct{}{},
|
||||
os.Fedora: struct{}{},
|
||||
os.Amazon: struct{}{},
|
||||
os.Oracle: struct{}{},
|
||||
os.Windows: struct{}{},
|
||||
os.OpenSUSE: struct{}{},
|
||||
os.OpenSUSELeap: struct{}{},
|
||||
os.OpenSUSETumbleweed: struct{}{},
|
||||
os.SLES: struct{}{},
|
||||
os.Photon: struct{}{},
|
||||
os.Alpine: struct{}{},
|
||||
}
|
||||
_, ok := supportedFamilies[family]
|
||||
return ok
|
||||
}
|
||||
|
||||
// IsTrivySupportedLib :
|
||||
func IsTrivySupportedLib(typestr string) bool {
|
||||
supportedLibs := map[string]interface{}{
|
||||
ftypes.Bundler: struct{}{},
|
||||
ftypes.GemSpec: struct{}{},
|
||||
ftypes.Cargo: struct{}{},
|
||||
ftypes.Composer: struct{}{},
|
||||
ftypes.Npm: struct{}{},
|
||||
ftypes.NuGet: struct{}{},
|
||||
ftypes.Pip: struct{}{},
|
||||
ftypes.Pipenv: struct{}{},
|
||||
ftypes.Poetry: struct{}{},
|
||||
ftypes.PythonPkg: struct{}{},
|
||||
ftypes.NodePkg: struct{}{},
|
||||
ftypes.Yarn: struct{}{},
|
||||
ftypes.Jar: struct{}{},
|
||||
ftypes.Pom: struct{}{},
|
||||
ftypes.GoBinary: struct{}{},
|
||||
ftypes.GoMod: struct{}{},
|
||||
}
|
||||
_, ok := supportedLibs[typestr]
|
||||
return ok
|
||||
}
|
||||
225
detector/cve_client.go
Normal file
225
detector/cve_client.go
Normal file
@@ -0,0 +1,225 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/util"
|
||||
cvedb "github.com/vulsio/go-cve-dictionary/db"
|
||||
cvelog "github.com/vulsio/go-cve-dictionary/log"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
type goCveDictClient struct {
|
||||
cnf config.VulnDictInterface
|
||||
driver cvedb.DB
|
||||
}
|
||||
|
||||
func newGoCveDictClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goCveDictClient, error) {
|
||||
if err := cvelog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
driver, locked, err := newCveDB(cnf)
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &goCveDictClient{cnf: cnf, driver: driver}, nil
|
||||
}
|
||||
|
||||
func (api goCveDictClient) closeDB() error {
|
||||
if api.driver == nil {
|
||||
return nil
|
||||
}
|
||||
return api.driver.CloseDB()
|
||||
}
|
||||
|
||||
func (api goCveDictClient) fetchCveDetails(cveIDs []string) (cveDetails []cvemodels.CveDetail, err error) {
|
||||
m, err := api.driver.GetMulti(cveIDs)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to GetMulti. err: %w", err)
|
||||
}
|
||||
for _, v := range m {
|
||||
cveDetails = append(cveDetails, v)
|
||||
}
|
||||
return cveDetails, nil
|
||||
}
|
||||
|
||||
type response struct {
|
||||
Key string
|
||||
CveDetail cvemodels.CveDetail
|
||||
}
|
||||
|
||||
func (api goCveDictClient) fetchCveDetailsViaHTTP(cveIDs []string) (cveDetails []cvemodels.CveDetail, err error) {
|
||||
reqChan := make(chan string, len(cveIDs))
|
||||
resChan := make(chan response, len(cveIDs))
|
||||
errChan := make(chan error, len(cveIDs))
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- cveID
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for range cveIDs {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case cveID := <-reqChan:
|
||||
url, err := util.URLPathJoin(api.cnf.GetURL(), "cves", cveID)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
api.httpGet(cveID, url, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for range cveIDs {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
cveDetails = append(cveDetails, res.CveDetail)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching CVE")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil,
|
||||
xerrors.Errorf("Failed to fetch CVE. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (api goCveDictClient) httpGet(key, url string, resChan chan<- response, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
f := func() (err error) {
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("HTTP GET Error, url: %s, resp: %v, err: %+v",
|
||||
url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error: %w", err)
|
||||
return
|
||||
}
|
||||
cveDetail := cvemodels.CveDetail{}
|
||||
if err := json.Unmarshal([]byte(body), &cveDetail); err != nil {
|
||||
errChan <- xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
|
||||
return
|
||||
}
|
||||
resChan <- response{
|
||||
key,
|
||||
cveDetail,
|
||||
}
|
||||
}
|
||||
|
||||
func (api goCveDictClient) detectCveByCpeURI(cpeURI string, useJVN bool) (cves []cvemodels.CveDetail, err error) {
|
||||
if api.cnf.IsFetchViaHTTP() {
|
||||
url, err := util.URLPathJoin(api.cnf.GetURL(), "cpes")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := map[string]string{"name": cpeURI}
|
||||
logging.Log.Debugf("HTTP Request to %s, query: %#v", url, query)
|
||||
if cves, err = api.httpPost(url, query); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
if cves, err = api.driver.GetByCpeURI(cpeURI); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if useJVN {
|
||||
return cves, nil
|
||||
}
|
||||
|
||||
nvdCves := []cvemodels.CveDetail{}
|
||||
for _, cve := range cves {
|
||||
if !cve.HasNvd() {
|
||||
continue
|
||||
}
|
||||
cve.Jvns = []cvemodels.Jvn{}
|
||||
nvdCves = append(nvdCves, cve)
|
||||
}
|
||||
return nvdCves, nil
|
||||
}
|
||||
|
||||
func (api goCveDictClient) httpPost(url string, query map[string]string) ([]cvemodels.CveDetail, error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
f := func() (err error) {
|
||||
req := gorequest.New().Timeout(10 * time.Second).Post(url)
|
||||
for key := range query {
|
||||
req = req.Send(fmt.Sprintf("%s=%s", key, query[key])).Type("json")
|
||||
}
|
||||
resp, body, errs = req.End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return xerrors.Errorf("HTTP POST error. url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP POST. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("HTTP Error: %w", err)
|
||||
}
|
||||
|
||||
cveDetails := []cvemodels.CveDetail{}
|
||||
if err := json.Unmarshal([]byte(body), &cveDetails); err != nil {
|
||||
return nil,
|
||||
xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
|
||||
}
|
||||
return cveDetails, nil
|
||||
}
|
||||
|
||||
func newCveDB(cnf config.VulnDictInterface) (driver cvedb.DB, locked bool, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, false, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err = cvedb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), cvedb.Option{})
|
||||
if err != nil {
|
||||
err = xerrors.Errorf("Failed to init CVE DB. err: %w, path: %s", err, path)
|
||||
return nil, locked, err
|
||||
}
|
||||
return driver, false, nil
|
||||
}
|
||||
592
detector/detector.go
Normal file
592
detector/detector.go
Normal file
@@ -0,0 +1,592 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/contrib/owasp-dependency-check/parser"
|
||||
"github.com/future-architect/vuls/cwe"
|
||||
"github.com/future-architect/vuls/gost"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/oval"
|
||||
"github.com/future-architect/vuls/reporter"
|
||||
"github.com/future-architect/vuls/util"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Cpe :
|
||||
type Cpe struct {
|
||||
CpeURI string
|
||||
UseJVN bool
|
||||
}
|
||||
|
||||
// Detect vulns and fill CVE detailed information
|
||||
func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
|
||||
// Use the same reportedAt for all rs
|
||||
reportedAt := time.Now()
|
||||
for i, r := range rs {
|
||||
if !config.Conf.RefreshCve && !needToRefreshCve(r) {
|
||||
logging.Log.Info("No need to refresh")
|
||||
continue
|
||||
}
|
||||
|
||||
if !reuseScannedCves(&r) {
|
||||
r.ScannedCves = models.VulnInfos{}
|
||||
}
|
||||
|
||||
if err := DetectLibsCves(&r, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Library dependency: %w", err)
|
||||
}
|
||||
|
||||
if err := DetectPkgCves(&r, config.Conf.OvalDict, config.Conf.Gost); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect Pkg CVE: %w", err)
|
||||
}
|
||||
|
||||
cpeURIs, owaspDCXMLPath := []string{}, ""
|
||||
cpes := []Cpe{}
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
cpeURIs = config.Conf.Servers[r.ServerName].CpeNames
|
||||
owaspDCXMLPath = config.Conf.Servers[r.ServerName].OwaspDCXMLPath
|
||||
} else {
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
if con, ok := s.Containers[r.Container.Name]; ok {
|
||||
cpeURIs = con.Cpes
|
||||
owaspDCXMLPath = con.OwaspDCXMLPath
|
||||
}
|
||||
}
|
||||
}
|
||||
if owaspDCXMLPath != "" {
|
||||
cpes, err := parser.Parse(owaspDCXMLPath)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to read OWASP Dependency Check XML on %s, `%s`, err: %w",
|
||||
r.ServerInfo(), owaspDCXMLPath, err)
|
||||
}
|
||||
cpeURIs = append(cpeURIs, cpes...)
|
||||
}
|
||||
for _, uri := range cpeURIs {
|
||||
cpes = append(cpes, Cpe{
|
||||
CpeURI: uri,
|
||||
UseJVN: true,
|
||||
})
|
||||
}
|
||||
if err := DetectCpeURIsCves(&r, cpes, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect CVE of `%s`: %w", cpeURIs, err)
|
||||
}
|
||||
|
||||
repos := config.Conf.Servers[r.ServerName].GitHubRepos
|
||||
if err := DetectGitHubCves(&r, repos); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect GitHub Cves: %w", err)
|
||||
}
|
||||
|
||||
if err := DetectWordPressCves(&r, config.Conf.WpScan); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect WordPress Cves: %w", err)
|
||||
}
|
||||
|
||||
if err := gost.FillCVEsWithRedHat(&r, config.Conf.Gost); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with gost: %w", err)
|
||||
}
|
||||
|
||||
if err := FillCvesWithNvdJvn(&r, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with CVE: %w", err)
|
||||
}
|
||||
|
||||
nExploitCve, err := FillWithExploit(&r, config.Conf.Exploit)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with exploit: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d PoC are detected", r.FormatServerName(), nExploitCve)
|
||||
|
||||
nMetasploitCve, err := FillWithMetasploit(&r, config.Conf.Metasploit)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with metasploit: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d exploits are detected", r.FormatServerName(), nMetasploitCve)
|
||||
|
||||
if err := FillWithKEVuln(&r, config.Conf.KEVuln); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Known Exploited Vulnerabilities: %w", err)
|
||||
}
|
||||
|
||||
FillCweDict(&r)
|
||||
|
||||
r.ReportedBy, _ = os.Hostname()
|
||||
r.Lang = config.Conf.Lang
|
||||
r.ReportedAt = reportedAt
|
||||
r.ReportedVersion = config.Version
|
||||
r.ReportedRevision = config.Revision
|
||||
r.Config.Report = config.Conf
|
||||
r.Config.Report.Servers = map[string]config.ServerInfo{
|
||||
r.ServerName: config.Conf.Servers[r.ServerName],
|
||||
}
|
||||
rs[i] = r
|
||||
}
|
||||
|
||||
// Overwrite the json file every time to clear the fields specified in config.IgnoredJSONKeys
|
||||
for _, r := range rs {
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
r = r.ClearFields(s.IgnoredJSONKeys)
|
||||
}
|
||||
//TODO don't call here
|
||||
if err := reporter.OverwriteJSONFile(dir, r); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to write JSON: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if config.Conf.DiffPlus || config.Conf.DiffMinus {
|
||||
prevs, err := loadPrevious(rs, config.Conf.ResultsDir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rs = diff(rs, prevs, config.Conf.DiffPlus, config.Conf.DiffMinus)
|
||||
}
|
||||
|
||||
for i, r := range rs {
|
||||
nFiltered := 0
|
||||
logging.Log.Infof("%s: total %d CVEs detected", r.FormatServerName(), len(r.ScannedCves))
|
||||
|
||||
if 0 < config.Conf.CvssScoreOver {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterByCvssOver(config.Conf.CvssScoreOver)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --cvss-over=%g", r.FormatServerName(), nFiltered, config.Conf.CvssScoreOver)
|
||||
}
|
||||
|
||||
if config.Conf.IgnoreUnfixed {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterUnfixed(config.Conf.IgnoreUnfixed)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unfixed", r.FormatServerName(), nFiltered)
|
||||
}
|
||||
|
||||
if 0 < config.Conf.ConfidenceScoreOver {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterByConfidenceOver(config.Conf.ConfidenceScoreOver)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --confidence-over=%d", r.FormatServerName(), nFiltered, config.Conf.ConfidenceScoreOver)
|
||||
}
|
||||
|
||||
// IgnoreCves
|
||||
ignoreCves := []string{}
|
||||
if r.Container.Name == "" {
|
||||
ignoreCves = config.Conf.Servers[r.ServerName].IgnoreCves
|
||||
} else if con, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
|
||||
ignoreCves = con.IgnoreCves
|
||||
}
|
||||
if 0 < len(ignoreCves) {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnoreCves(ignoreCves)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by ignoreCves=%s", r.FormatServerName(), nFiltered, ignoreCves)
|
||||
}
|
||||
|
||||
// ignorePkgs
|
||||
ignorePkgsRegexps := []string{}
|
||||
if r.Container.Name == "" {
|
||||
ignorePkgsRegexps = config.Conf.Servers[r.ServerName].IgnorePkgsRegexp
|
||||
} else if s, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
|
||||
ignorePkgsRegexps = s.IgnorePkgsRegexp
|
||||
}
|
||||
if 0 < len(ignorePkgsRegexps) {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnorePkgs(ignorePkgsRegexps)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by ignorePkgsRegexp=%s", r.FormatServerName(), nFiltered, ignorePkgsRegexps)
|
||||
}
|
||||
|
||||
// IgnoreUnscored
|
||||
if config.Conf.IgnoreUnscoredCves {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FindScoredVulns()
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unscored-cves", r.FormatServerName(), nFiltered)
|
||||
}
|
||||
|
||||
r.FilterInactiveWordPressLibs(config.Conf.WpScan.DetectInactive)
|
||||
rs[i] = r
|
||||
}
|
||||
return rs, nil
|
||||
}
|
||||
|
||||
// DetectPkgCves detects OS pkg cves
|
||||
// pass 2 configs
|
||||
func DetectPkgCves(r *models.ScanResult, ovalCnf config.GovalDictConf, gostCnf config.GostConf) error {
|
||||
// Pkg Scan
|
||||
if r.Release != "" {
|
||||
if len(r.Packages)+len(r.SrcPackages) > 0 {
|
||||
// OVAL, gost(Debian Security Tracker) does not support Package for Raspbian, so skip it.
|
||||
if r.Family == constant.Raspbian {
|
||||
r = r.RemoveRaspbianPackFromResult()
|
||||
}
|
||||
|
||||
// OVAL
|
||||
if err := detectPkgsCvesWithOval(ovalCnf, r); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with OVAL: %w", err)
|
||||
}
|
||||
|
||||
// gost
|
||||
if err := detectPkgsCvesWithGost(gostCnf, r); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with gost: %w", err)
|
||||
}
|
||||
} else {
|
||||
logging.Log.Infof("Number of packages is 0. Skip OVAL and gost detection")
|
||||
}
|
||||
} else if reuseScannedCves(r) {
|
||||
logging.Log.Infof("r.Release is empty. Use CVEs as it as.")
|
||||
} else if r.Family == constant.ServerTypePseudo {
|
||||
logging.Log.Infof("pseudo type. Skip OVAL and gost detection")
|
||||
} else {
|
||||
logging.Log.Infof("r.Release is empty. detect as pseudo type. Skip OVAL and gost detection")
|
||||
}
|
||||
|
||||
for i, v := range r.ScannedCves {
|
||||
for j, p := range v.AffectedPackages {
|
||||
if p.NotFixedYet && p.FixState == "" {
|
||||
p.FixState = "Not fixed yet"
|
||||
r.ScannedCves[i].AffectedPackages[j] = p
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// To keep backward compatibility
|
||||
// Newer versions use ListenPortStats,
|
||||
// but older versions of Vuls are set to ListenPorts.
|
||||
// Set ListenPorts to ListenPortStats to allow newer Vuls to report old results.
|
||||
for i, pkg := range r.Packages {
|
||||
for j, proc := range pkg.AffectedProcs {
|
||||
for _, ipPort := range proc.ListenPorts {
|
||||
ps, err := models.NewPortStat(ipPort)
|
||||
if err != nil {
|
||||
logging.Log.Warnf("Failed to parse ip:port: %s, err:%+v", ipPort, err)
|
||||
continue
|
||||
}
|
||||
r.Packages[i].AffectedProcs[j].ListenPortStats = append(
|
||||
r.Packages[i].AffectedProcs[j].ListenPortStats, *ps)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectGitHubCves fetches CVEs from GitHub Security Alerts
|
||||
func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHubConf) error {
|
||||
if len(githubConfs) == 0 {
|
||||
return nil
|
||||
}
|
||||
for ownerRepo, setting := range githubConfs {
|
||||
ss := strings.Split(ownerRepo, "/")
|
||||
if len(ss) != 2 {
|
||||
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s", ownerRepo)
|
||||
}
|
||||
owner, repo := ss[0], ss[1]
|
||||
n, err := DetectGitHubSecurityAlerts(r, owner, repo, setting.Token, setting.IgnoreGitHubDismissed)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to access GitHub Security Alerts: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d CVEs detected with GHSA %s/%s",
|
||||
r.FormatServerName(), n, owner, repo)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectWordPressCves detects CVEs of WordPress
|
||||
func DetectWordPressCves(r *models.ScanResult, wpCnf config.WpScanConf) error {
|
||||
if len(r.WordPressPackages) == 0 {
|
||||
return nil
|
||||
}
|
||||
logging.Log.Infof("%s: Detect WordPress CVE. Number of pkgs: %d ", r.ServerInfo(), len(r.WordPressPackages))
|
||||
n, err := detectWordPressCves(r, wpCnf)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to detect WordPress CVE: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: found %d WordPress CVEs", r.FormatServerName(), n)
|
||||
return nil
|
||||
}
|
||||
|
||||
// FillCvesWithNvdJvn fills CVE detail with NVD, JVN
|
||||
func FillCvesWithNvdJvn(r *models.ScanResult, cnf config.GoCveDictConf, logOpts logging.LogOpts) (err error) {
|
||||
cveIDs := []string{}
|
||||
for _, v := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, v.CveID)
|
||||
}
|
||||
|
||||
client, err := newGoCveDictClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
var ds []cvemodels.CveDetail
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
ds, err = client.fetchCveDetailsViaHTTP(cveIDs)
|
||||
} else {
|
||||
ds, err = client.fetchCveDetails(cveIDs)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, d := range ds {
|
||||
nvds, exploits, mitigations := models.ConvertNvdToModel(d.CveID, d.Nvds)
|
||||
jvns := models.ConvertJvnToModel(d.CveID, d.Jvns)
|
||||
|
||||
alerts := fillCertAlerts(&d)
|
||||
for cveID, vinfo := range r.ScannedCves {
|
||||
if vinfo.CveID == d.CveID {
|
||||
if vinfo.CveContents == nil {
|
||||
vinfo.CveContents = models.CveContents{}
|
||||
}
|
||||
for _, con := range nvds {
|
||||
if !con.Empty() {
|
||||
vinfo.CveContents[con.Type] = []models.CveContent{con}
|
||||
}
|
||||
}
|
||||
for _, con := range jvns {
|
||||
if !con.Empty() {
|
||||
found := false
|
||||
for _, cveCont := range vinfo.CveContents[con.Type] {
|
||||
if con.SourceLink == cveCont.SourceLink {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
vinfo.CveContents[con.Type] = append(vinfo.CveContents[con.Type], con)
|
||||
}
|
||||
}
|
||||
}
|
||||
vinfo.AlertDict = alerts
|
||||
vinfo.Exploits = append(vinfo.Exploits, exploits...)
|
||||
vinfo.Mitigations = append(vinfo.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveID] = vinfo
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func fillCertAlerts(cvedetail *cvemodels.CveDetail) (dict models.AlertDict) {
|
||||
for _, nvd := range cvedetail.Nvds {
|
||||
for _, cert := range nvd.Certs {
|
||||
dict.USCERT = append(dict.USCERT, models.Alert{
|
||||
URL: cert.Link,
|
||||
Title: cert.Title,
|
||||
Team: "uscert",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, jvn := range cvedetail.Jvns {
|
||||
for _, cert := range jvn.Certs {
|
||||
dict.JPCERT = append(dict.JPCERT, models.Alert{
|
||||
URL: cert.Link,
|
||||
Title: cert.Title,
|
||||
Team: "jpcert",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return dict
|
||||
}
|
||||
|
||||
// detectPkgsCvesWithOval fetches OVAL database
|
||||
func detectPkgsCvesWithOval(cnf config.GovalDictConf, r *models.ScanResult) error {
|
||||
ovalClient, err := oval.NewOVALClient(r.Family, cnf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ovalClient == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Check if oval fetched: %s %s", r.Family, r.Release)
|
||||
ok, err := ovalClient.CheckIfOvalFetched(r.Family, r.Release)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !ok {
|
||||
if r.Family == constant.Debian {
|
||||
logging.Log.Infof("Skip OVAL and Scan with gost alone.")
|
||||
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), 0)
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("OVAL entries of %s %s are not found. Fetch OVAL before reporting. For details, see `https://github.com/vulsio/goval-dictionary#usage`", r.Family, r.Release)
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Check if oval fresh: %s %s", r.Family, r.Release)
|
||||
_, err = ovalClient.CheckIfOvalFresh(r.Family, r.Release)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Fill with oval: %s %s", r.Family, r.Release)
|
||||
nCVEs, err := ovalClient.FillWithOval(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), nCVEs)
|
||||
return nil
|
||||
}
|
||||
|
||||
func detectPkgsCvesWithGost(cnf config.GostConf, r *models.ScanResult) error {
|
||||
client, err := gost.NewClient(cnf, r.Family)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to new a gost client: %w", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := client.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close the gost DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nCVEs, err := client.DetectCVEs(r, true)
|
||||
if err != nil {
|
||||
if r.Family == constant.Debian {
|
||||
return xerrors.Errorf("Failed to detect CVEs with gost: %w", err)
|
||||
}
|
||||
return xerrors.Errorf("Failed to detect unfixed CVEs with gost: %w", err)
|
||||
}
|
||||
|
||||
if r.Family == constant.Debian {
|
||||
logging.Log.Infof("%s: %d CVEs are detected with gost",
|
||||
r.FormatServerName(), nCVEs)
|
||||
} else {
|
||||
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost",
|
||||
r.FormatServerName(), nCVEs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectCpeURIsCves detects CVEs of given CPE-URIs
|
||||
func DetectCpeURIsCves(r *models.ScanResult, cpes []Cpe, cnf config.GoCveDictConf, logOpts logging.LogOpts) error {
|
||||
client, err := newGoCveDictClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := client.closeDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nCVEs := 0
|
||||
for _, cpe := range cpes {
|
||||
details, err := client.detectCveByCpeURI(cpe.CpeURI, cpe.UseJVN)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, detail := range details {
|
||||
advisories := []models.DistroAdvisory{}
|
||||
if !detail.HasNvd() && detail.HasJvn() {
|
||||
for _, jvn := range detail.Jvns {
|
||||
advisories = append(advisories, models.DistroAdvisory{
|
||||
AdvisoryID: jvn.JvnID,
|
||||
})
|
||||
}
|
||||
}
|
||||
maxConfidence := getMaxConfidence(detail)
|
||||
|
||||
if val, ok := r.ScannedCves[detail.CveID]; ok {
|
||||
val.CpeURIs = util.AppendIfMissing(val.CpeURIs, cpe.CpeURI)
|
||||
val.Confidences.AppendIfMissing(maxConfidence)
|
||||
val.DistroAdvisories = advisories
|
||||
r.ScannedCves[detail.CveID] = val
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
CveID: detail.CveID,
|
||||
CpeURIs: []string{cpe.CpeURI},
|
||||
Confidences: models.Confidences{maxConfidence},
|
||||
DistroAdvisories: advisories,
|
||||
}
|
||||
r.ScannedCves[detail.CveID] = v
|
||||
nCVEs++
|
||||
}
|
||||
}
|
||||
}
|
||||
logging.Log.Infof("%s: %d CVEs are detected with CPE", r.FormatServerName(), nCVEs)
|
||||
return nil
|
||||
}
|
||||
|
||||
func getMaxConfidence(detail cvemodels.CveDetail) (max models.Confidence) {
|
||||
if !detail.HasNvd() && detail.HasJvn() {
|
||||
return models.JvnVendorProductMatch
|
||||
} else if detail.HasNvd() {
|
||||
for _, nvd := range detail.Nvds {
|
||||
confidence := models.Confidence{}
|
||||
switch nvd.DetectionMethod {
|
||||
case cvemodels.NvdExactVersionMatch:
|
||||
confidence = models.NvdExactVersionMatch
|
||||
case cvemodels.NvdRoughVersionMatch:
|
||||
confidence = models.NvdRoughVersionMatch
|
||||
case cvemodels.NvdVendorProductMatch:
|
||||
confidence = models.NvdVendorProductMatch
|
||||
}
|
||||
if max.Score < confidence.Score {
|
||||
max = confidence
|
||||
}
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
// FillCweDict fills CWE
|
||||
func FillCweDict(r *models.ScanResult) {
|
||||
uniqCweIDMap := map[string]bool{}
|
||||
for _, vinfo := range r.ScannedCves {
|
||||
for _, conts := range vinfo.CveContents {
|
||||
for _, cont := range conts {
|
||||
for _, id := range cont.CweIDs {
|
||||
if strings.HasPrefix(id, "CWE-") {
|
||||
id = strings.TrimPrefix(id, "CWE-")
|
||||
uniqCweIDMap[id] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dict := map[string]models.CweDictEntry{}
|
||||
for id := range uniqCweIDMap {
|
||||
entry := models.CweDictEntry{}
|
||||
if e, ok := cwe.CweDictEn[id]; ok {
|
||||
if rank, ok := cwe.OwaspTopTen2017[id]; ok {
|
||||
entry.OwaspTopTen2017 = rank
|
||||
}
|
||||
if rank, ok := cwe.CweTopTwentyfive2019[id]; ok {
|
||||
entry.CweTopTwentyfive2019 = rank
|
||||
}
|
||||
if rank, ok := cwe.SansTopTwentyfive[id]; ok {
|
||||
entry.SansTopTwentyfive = rank
|
||||
}
|
||||
entry.En = &e
|
||||
} else {
|
||||
logging.Log.Debugf("CWE-ID %s is not found in English CWE Dict", id)
|
||||
entry.En = &cwe.Cwe{CweID: id}
|
||||
}
|
||||
|
||||
if r.Lang == "ja" {
|
||||
if e, ok := cwe.CweDictJa[id]; ok {
|
||||
if rank, ok := cwe.OwaspTopTen2017[id]; ok {
|
||||
entry.OwaspTopTen2017 = rank
|
||||
}
|
||||
if rank, ok := cwe.CweTopTwentyfive2019[id]; ok {
|
||||
entry.CweTopTwentyfive2019 = rank
|
||||
}
|
||||
if rank, ok := cwe.SansTopTwentyfive[id]; ok {
|
||||
entry.SansTopTwentyfive = rank
|
||||
}
|
||||
entry.Ja = &e
|
||||
} else {
|
||||
logging.Log.Debugf("CWE-ID %s is not found in Japanese CWE Dict", id)
|
||||
entry.Ja = &cwe.Cwe{CweID: id}
|
||||
}
|
||||
}
|
||||
dict[id] = entry
|
||||
}
|
||||
r.CweDict = dict
|
||||
return
|
||||
}
|
||||
90
detector/detector_test.go
Normal file
90
detector/detector_test.go
Normal file
@@ -0,0 +1,90 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
func Test_getMaxConfidence(t *testing.T) {
|
||||
type args struct {
|
||||
detail cvemodels.CveDetail
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantMax models.Confidence
|
||||
}{
|
||||
{
|
||||
name: "JvnVendorProductMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{},
|
||||
Jvns: []cvemodels.Jvn{{}},
|
||||
},
|
||||
},
|
||||
wantMax: models.JvnVendorProductMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdExactVersionMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
{DetectionMethod: cvemodels.NvdExactVersionMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdExactVersionMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdRoughVersionMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdRoughVersionMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdVendorProductMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdVendorProductMatch,
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{},
|
||||
Jvns: []cvemodels.Jvn{},
|
||||
},
|
||||
},
|
||||
wantMax: models.Confidence{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if gotMax := getMaxConfidence(tt.args.detail); !reflect.DeepEqual(gotMax, tt.wantMax) {
|
||||
t.Errorf("getMaxConfidence() = %v, want %v", gotMax, tt.wantMax)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
221
detector/exploitdb.go
Normal file
221
detector/exploitdb.go
Normal file
@@ -0,0 +1,221 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
exploitdb "github.com/vulsio/go-exploitdb/db"
|
||||
exploitmodels "github.com/vulsio/go-exploitdb/models"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// FillWithExploit fills exploit information that has in Exploit
|
||||
func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf) (nExploitCve int, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, _ := util.URLPathJoin(cnf.GetURL(), "cves")
|
||||
responses, err := getExploitsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
exps := []exploitmodels.Exploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
exploits := ConvertToModelsExploit(exps)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Exploits = exploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nExploitCve++
|
||||
}
|
||||
} else {
|
||||
driver, locked, err := newExploitDB(&cnf)
|
||||
if locked {
|
||||
return 0, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
es, err := driver.GetExploitByCveID(cveID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if len(es) == 0 {
|
||||
continue
|
||||
}
|
||||
exploits := ConvertToModelsExploit(es)
|
||||
vuln.Exploits = exploits
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return nExploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModelsExploit converts exploit model to vuls model
|
||||
func ConvertToModelsExploit(es []exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
for _, e := range es {
|
||||
var documentURL, shellURL *string
|
||||
if e.OffensiveSecurity != nil {
|
||||
os := e.OffensiveSecurity
|
||||
if os.Document != nil {
|
||||
documentURL = &os.Document.DocumentURL
|
||||
}
|
||||
if os.ShellCode != nil {
|
||||
shellURL = &os.ShellCode.ShellCodeURL
|
||||
}
|
||||
}
|
||||
exploit := models.Exploit{
|
||||
ExploitType: e.ExploitType,
|
||||
ID: e.ExploitUniqueID,
|
||||
URL: e.URL,
|
||||
Description: e.Description,
|
||||
DocumentURL: documentURL,
|
||||
ShellCodeURL: shellURL,
|
||||
}
|
||||
exploits = append(exploits, exploit)
|
||||
}
|
||||
return exploits
|
||||
}
|
||||
|
||||
type exploitResponse struct {
|
||||
request exploitRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getExploitsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []exploitResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan exploitRequest, nReq)
|
||||
resChan := make(chan exploitResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- exploitRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetExploit(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching Exploit")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch Exploit. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type exploitRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetExploit(url string, req exploitRequest, resChan chan<- exploitResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- exploitResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func newExploitDB(cnf config.VulnDictInterface) (driver exploitdb.DB, locked bool, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, false, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = exploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), exploitdb.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("exploitDB is locked. err: %w", err)
|
||||
}
|
||||
return nil, false, err
|
||||
}
|
||||
return driver, false, nil
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
package github
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@@ -9,7 +12,6 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/errof"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/oauth2"
|
||||
@@ -17,8 +19,7 @@ import (
|
||||
|
||||
// DetectGitHubSecurityAlerts access to owner/repo on GitHub and fetch security alerts of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
|
||||
// https://help.github.com/articles/about-security-alerts-for-vulnerable-dependencies/
|
||||
//TODO move to report
|
||||
func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string) (nCVEs int, err error) {
|
||||
func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string, ignoreDismissed bool) (nCVEs int, err error) {
|
||||
src := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: token},
|
||||
)
|
||||
@@ -74,7 +75,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string)
|
||||
}
|
||||
|
||||
for _, v := range alerts.Data.Repository.VulnerabilityAlerts.Edges {
|
||||
if config.Conf.IgnoreGitHubDismissed && v.Node.DismissReason != "" {
|
||||
if ignoreDismissed && v.Node.DismissReason != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -125,7 +126,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string)
|
||||
|
||||
if val, ok := r.ScannedCves[cveID]; ok {
|
||||
val.GitHubSecurityAlerts = val.GitHubSecurityAlerts.Add(m)
|
||||
val.CveContents[models.GitHub] = cveContent
|
||||
val.CveContents[models.GitHub] = []models.CveContent{cveContent}
|
||||
r.ScannedCves[cveID] = val
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
214
detector/kevuln.go
Normal file
214
detector/kevuln.go
Normal file
@@ -0,0 +1,214 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
kevulndb "github.com/vulsio/go-kev/db"
|
||||
kevulnmodels "github.com/vulsio/go-kev/models"
|
||||
)
|
||||
|
||||
// FillWithKEVuln :
|
||||
func FillWithKEVuln(r *models.ScanResult, cnf config.KEVulnConf) error {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(cnf.GetURL(), "cves")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
responses, err := getKEVulnsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, res := range responses {
|
||||
kevulns := []kevulnmodels.KEVuln{}
|
||||
if err := json.Unmarshal([]byte(res.json), &kevulns); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
alerts := []models.Alert{}
|
||||
if len(kevulns) > 0 {
|
||||
alerts = append(alerts, models.Alert{
|
||||
Title: "Known Exploited Vulnerabilities Catalog",
|
||||
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
|
||||
Team: "cisa",
|
||||
})
|
||||
}
|
||||
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.AlertDict.CISA = alerts
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
}
|
||||
} else {
|
||||
driver, locked, err := newKEVulnDB(&cnf)
|
||||
if locked {
|
||||
return xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
kevulns, err := driver.GetKEVulnByCveID(cveID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(kevulns) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
alerts := []models.Alert{}
|
||||
if len(kevulns) > 0 {
|
||||
alerts = append(alerts, models.Alert{
|
||||
Title: "Known Exploited Vulnerabilities Catalog",
|
||||
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
|
||||
Team: "cisa",
|
||||
})
|
||||
}
|
||||
|
||||
vuln.AlertDict.CISA = alerts
|
||||
r.ScannedCves[cveID] = vuln
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type kevulnResponse struct {
|
||||
request kevulnRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getKEVulnsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []kevulnResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan kevulnRequest, nReq)
|
||||
resChan := make(chan kevulnResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- kevulnRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetKEVuln(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching KEVuln")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch KEVuln. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type kevulnRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetKEVuln(url string, req kevulnRequest, resChan chan<- kevulnResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- kevulnResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func newKEVulnDB(cnf config.VulnDictInterface) (driver kevulndb.DB, locked bool, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, false, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = kevulndb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), kevulndb.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("kevulnDB is locked. err: %w", err)
|
||||
}
|
||||
return nil, false, err
|
||||
}
|
||||
return driver, false, nil
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
package libmanager
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -12,13 +15,12 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
"k8s.io/utils/clock"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
)
|
||||
|
||||
// DetectLibsCves fills LibraryScanner information
|
||||
func DetectLibsCves(r *models.ScanResult) (err error) {
|
||||
func DetectLibsCves(r *models.ScanResult, cacheDir string, noProgress bool) (err error) {
|
||||
totalCnt := 0
|
||||
if len(r.LibraryScanners) == 0 {
|
||||
return
|
||||
@@ -30,12 +32,12 @@ func DetectLibsCves(r *models.ScanResult) (err error) {
|
||||
return err
|
||||
}
|
||||
|
||||
util.Log.Info("Updating library db...")
|
||||
if err := downloadDB(config.Version, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress, false, false); err != nil {
|
||||
logging.Log.Info("Updating library db...")
|
||||
if err := downloadDB("", cacheDir, noProgress, false, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := db2.Init(config.Conf.TrivyCacheDBDir); err != nil {
|
||||
if err := db2.Init(cacheDir); err != nil {
|
||||
return err
|
||||
}
|
||||
defer db2.Close()
|
||||
@@ -57,7 +59,7 @@ func DetectLibsCves(r *models.ScanResult) (err error) {
|
||||
totalCnt += len(vinfos)
|
||||
}
|
||||
|
||||
util.Log.Infof("%s: %d CVEs are detected with Library",
|
||||
logging.Log.Infof("%s: %d CVEs are detected with Library",
|
||||
r.FormatServerName(), totalCnt)
|
||||
|
||||
return nil
|
||||
@@ -72,8 +74,8 @@ func downloadDB(appVersion, cacheDir string, quiet, light, skipUpdate bool) erro
|
||||
}
|
||||
|
||||
if needsUpdate {
|
||||
util.Log.Info("Need to update DB")
|
||||
util.Log.Info("Downloading DB...")
|
||||
logging.Log.Info("Need to update DB")
|
||||
logging.Log.Info("Downloading DB...")
|
||||
if err := client.Download(ctx, cacheDir, light); err != nil {
|
||||
return xerrors.Errorf("failed to download vulnerability DB: %w", err)
|
||||
}
|
||||
@@ -106,7 +108,7 @@ func showDBInfo(cacheDir string) error {
|
||||
if err != nil {
|
||||
return xerrors.Errorf("something wrong with DB: %w", err)
|
||||
}
|
||||
util.Log.Debugf("DB Schema: %d, Type: %d, UpdatedAt: %s, NextUpdate: %s",
|
||||
logging.Log.Debugf("DB Schema: %d, Type: %d, UpdatedAt: %s, NextUpdate: %s",
|
||||
metadata.Version, metadata.Type, metadata.UpdatedAt, metadata.NextUpdate)
|
||||
return nil
|
||||
}
|
||||
217
detector/msf.go
Normal file
217
detector/msf.go
Normal file
@@ -0,0 +1,217 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
metasploitdb "github.com/vulsio/go-msfdb/db"
|
||||
metasploitmodels "github.com/vulsio/go-msfdb/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// FillWithMetasploit fills metasploit module information that has in module
|
||||
func FillWithMetasploit(r *models.ScanResult, cnf config.MetasploitConf) (nMetasploitCve int, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(cnf.GetURL(), "cves")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
responses, err := getMetasploitsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
msfs := []metasploitmodels.Metasploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &msfs); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
metasploits := ConvertToModelsMsf(msfs)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Metasploits = metasploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nMetasploitCve++
|
||||
}
|
||||
} else {
|
||||
driver, locked, err := newMetasploitDB(&cnf)
|
||||
if locked {
|
||||
return 0, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
ms, err := driver.GetModuleByCveID(cveID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if len(ms) == 0 {
|
||||
continue
|
||||
}
|
||||
modules := ConvertToModelsMsf(ms)
|
||||
vuln.Metasploits = modules
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nMetasploitCve++
|
||||
}
|
||||
}
|
||||
return nMetasploitCve, nil
|
||||
}
|
||||
|
||||
type metasploitResponse struct {
|
||||
request metasploitRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getMetasploitsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []metasploitResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan metasploitRequest, nReq)
|
||||
resChan := make(chan metasploitResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- metasploitRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetMetasploit(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching Metasploit")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch Metasploit. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type metasploitRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetMetasploit(url string, req metasploitRequest, resChan chan<- metasploitResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- metasploitResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertToModelsMsf converts metasploit model to vuls model
|
||||
func ConvertToModelsMsf(ms []metasploitmodels.Metasploit) (modules []models.Metasploit) {
|
||||
for _, m := range ms {
|
||||
var links []string
|
||||
if 0 < len(m.References) {
|
||||
for _, u := range m.References {
|
||||
links = append(links, u.Link)
|
||||
}
|
||||
}
|
||||
module := models.Metasploit{
|
||||
Name: m.Name,
|
||||
Title: m.Title,
|
||||
Description: m.Description,
|
||||
URLs: links,
|
||||
}
|
||||
modules = append(modules, module)
|
||||
}
|
||||
return modules
|
||||
}
|
||||
|
||||
func newMetasploitDB(cnf config.VulnDictInterface) (driver metasploitdb.DB, locked bool, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, false, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = metasploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), metasploitdb.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("metasploitDB is locked. err: %w", err)
|
||||
}
|
||||
return nil, false, err
|
||||
}
|
||||
return driver, false, nil
|
||||
}
|
||||
274
detector/util.go
Normal file
274
detector/util.go
Normal file
@@ -0,0 +1,274 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func reuseScannedCves(r *models.ScanResult) bool {
|
||||
switch r.Family {
|
||||
case constant.FreeBSD, constant.Raspbian:
|
||||
return true
|
||||
}
|
||||
return isTrivyResult(r)
|
||||
}
|
||||
|
||||
func isTrivyResult(r *models.ScanResult) bool {
|
||||
_, ok := r.Optional["trivy-target"]
|
||||
return ok
|
||||
}
|
||||
|
||||
func needToRefreshCve(r models.ScanResult) bool {
|
||||
for _, cve := range r.ScannedCves {
|
||||
if 0 < len(cve.CveContents) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func loadPrevious(currs models.ScanResults, resultsDir string) (prevs models.ScanResults, err error) {
|
||||
dirs, err := ListValidJSONDirs(resultsDir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, result := range currs {
|
||||
filename := result.ServerName + ".json"
|
||||
if result.Container.Name != "" {
|
||||
filename = fmt.Sprintf("%s@%s.json", result.Container.Name, result.ServerName)
|
||||
}
|
||||
for _, dir := range dirs[1:] {
|
||||
path := filepath.Join(dir, filename)
|
||||
r, err := loadOneServerScanResult(path)
|
||||
if err != nil {
|
||||
logging.Log.Debugf("%+v", err)
|
||||
continue
|
||||
}
|
||||
if r.Family == result.Family && r.Release == result.Release {
|
||||
prevs = append(prevs, *r)
|
||||
logging.Log.Infof("Previous json found: %s", path)
|
||||
break
|
||||
}
|
||||
logging.Log.Infof("Previous json is different family.Release: %s, pre: %s.%s cur: %s.%s",
|
||||
path, r.Family, r.Release, result.Family, result.Release)
|
||||
}
|
||||
}
|
||||
return prevs, nil
|
||||
}
|
||||
|
||||
func diff(curResults, preResults models.ScanResults, isPlus, isMinus bool) (diffed models.ScanResults) {
|
||||
for _, current := range curResults {
|
||||
found := false
|
||||
var previous models.ScanResult
|
||||
for _, r := range preResults {
|
||||
if current.ServerName == r.ServerName && current.Container.Name == r.Container.Name {
|
||||
found = true
|
||||
previous = r
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
diffed = append(diffed, current)
|
||||
continue
|
||||
}
|
||||
|
||||
cves := models.VulnInfos{}
|
||||
if isPlus {
|
||||
cves = getPlusDiffCves(previous, current)
|
||||
}
|
||||
if isMinus {
|
||||
minus := getMinusDiffCves(previous, current)
|
||||
if len(cves) == 0 {
|
||||
cves = minus
|
||||
} else {
|
||||
for k, v := range minus {
|
||||
cves[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
packages := models.Packages{}
|
||||
for _, s := range cves {
|
||||
for _, affected := range s.AffectedPackages {
|
||||
var p models.Package
|
||||
if s.DiffStatus == models.DiffPlus {
|
||||
p = current.Packages[affected.Name]
|
||||
} else {
|
||||
p = previous.Packages[affected.Name]
|
||||
}
|
||||
packages[affected.Name] = p
|
||||
}
|
||||
}
|
||||
current.ScannedCves = cves
|
||||
current.Packages = packages
|
||||
diffed = append(diffed, current)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func getPlusDiffCves(previous, current models.ScanResult) models.VulnInfos {
|
||||
previousCveIDsSet := map[string]bool{}
|
||||
for _, previousVulnInfo := range previous.ScannedCves {
|
||||
previousCveIDsSet[previousVulnInfo.CveID] = true
|
||||
}
|
||||
|
||||
new := models.VulnInfos{}
|
||||
updated := models.VulnInfos{}
|
||||
for _, v := range current.ScannedCves {
|
||||
if previousCveIDsSet[v.CveID] {
|
||||
if isCveInfoUpdated(v.CveID, previous, current) {
|
||||
v.DiffStatus = models.DiffPlus
|
||||
updated[v.CveID] = v
|
||||
logging.Log.Debugf("updated: %s", v.CveID)
|
||||
|
||||
// TODO commented out because a bug of diff logic when multiple oval defs found for a certain CVE-ID and same updated_at
|
||||
// if these OVAL defs have different affected packages, this logic detects as updated.
|
||||
// This logic will be uncomented after integration with gost https://github.com/vulsio/gost
|
||||
// } else if isCveFixed(v, previous) {
|
||||
// updated[v.CveID] = v
|
||||
// logging.Log.Debugf("fixed: %s", v.CveID)
|
||||
|
||||
} else {
|
||||
logging.Log.Debugf("same: %s", v.CveID)
|
||||
}
|
||||
} else {
|
||||
logging.Log.Debugf("new: %s", v.CveID)
|
||||
v.DiffStatus = models.DiffPlus
|
||||
new[v.CveID] = v
|
||||
}
|
||||
}
|
||||
|
||||
if len(updated) == 0 && len(new) == 0 {
|
||||
logging.Log.Infof("%s: There are %d vulnerabilities, but no difference between current result and previous one.", current.FormatServerName(), len(current.ScannedCves))
|
||||
}
|
||||
|
||||
for cveID, vuln := range new {
|
||||
updated[cveID] = vuln
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
func getMinusDiffCves(previous, current models.ScanResult) models.VulnInfos {
|
||||
currentCveIDsSet := map[string]bool{}
|
||||
for _, currentVulnInfo := range current.ScannedCves {
|
||||
currentCveIDsSet[currentVulnInfo.CveID] = true
|
||||
}
|
||||
|
||||
clear := models.VulnInfos{}
|
||||
for _, v := range previous.ScannedCves {
|
||||
if !currentCveIDsSet[v.CveID] {
|
||||
v.DiffStatus = models.DiffMinus
|
||||
clear[v.CveID] = v
|
||||
logging.Log.Debugf("clear: %s", v.CveID)
|
||||
}
|
||||
}
|
||||
if len(clear) == 0 {
|
||||
logging.Log.Infof("%s: There are %d vulnerabilities, but no difference between current result and previous one.", current.FormatServerName(), len(current.ScannedCves))
|
||||
}
|
||||
|
||||
return clear
|
||||
}
|
||||
|
||||
func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
|
||||
cTypes := []models.CveContentType{
|
||||
models.Nvd,
|
||||
models.Jvn,
|
||||
models.NewCveContentType(current.Family),
|
||||
}
|
||||
|
||||
prevLastModified := map[models.CveContentType][]time.Time{}
|
||||
preVinfo, ok := previous.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if conts, ok := preVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
prevLastModified[cType] = append(prevLastModified[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
curLastModified := map[models.CveContentType][]time.Time{}
|
||||
curVinfo, ok := current.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if conts, ok := curVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
curLastModified[cType] = append(curLastModified[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, t := range cTypes {
|
||||
if !reflect.DeepEqual(curLastModified[t], prevLastModified[t]) {
|
||||
logging.Log.Debugf("%s LastModified not equal: \n%s\n%s",
|
||||
cveID, curLastModified[t], prevLastModified[t])
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// jsonDirPattern is file name pattern of JSON directory
|
||||
// 2016-11-16T10:43:28+09:00
|
||||
// 2016-11-16T10:43:28Z
|
||||
var jsonDirPattern = regexp.MustCompile(
|
||||
`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:Z|[+-]\d{2}:\d{2})$`)
|
||||
|
||||
// ListValidJSONDirs returns valid json directory as array
|
||||
// Returned array is sorted so that recent directories are at the head
|
||||
func ListValidJSONDirs(resultsDir string) (dirs []string, err error) {
|
||||
var dirInfo []os.FileInfo
|
||||
if dirInfo, err = ioutil.ReadDir(resultsDir); err != nil {
|
||||
err = xerrors.Errorf("Failed to read %s: %w",
|
||||
config.Conf.ResultsDir, err)
|
||||
return
|
||||
}
|
||||
for _, d := range dirInfo {
|
||||
if d.IsDir() && jsonDirPattern.MatchString(d.Name()) {
|
||||
jsonDir := filepath.Join(resultsDir, d.Name())
|
||||
dirs = append(dirs, jsonDir)
|
||||
}
|
||||
}
|
||||
sort.Slice(dirs, func(i, j int) bool {
|
||||
return dirs[j] < dirs[i]
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// loadOneServerScanResult read JSON data of one server
|
||||
func loadOneServerScanResult(jsonFile string) (*models.ScanResult, error) {
|
||||
var (
|
||||
data []byte
|
||||
err error
|
||||
)
|
||||
if data, err = ioutil.ReadFile(jsonFile); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to read %s: %w", jsonFile, err)
|
||||
}
|
||||
result := &models.ScanResult{}
|
||||
if err := json.Unmarshal(data, result); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to parse %s: %w", jsonFile, err)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
package wordpress
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -10,8 +13,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
c "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/errof"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
version "github.com/hashicorp/go-version"
|
||||
@@ -50,8 +53,7 @@ type References struct {
|
||||
|
||||
// DetectWordPressCves access to wpscan and fetch scurity alerts and then set to the given ScanResult.
|
||||
// https://wpscan.com/
|
||||
// TODO move to report
|
||||
func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
|
||||
func detectWordPressCves(r *models.ScanResult, cnf config.WpScanConf) (int, error) {
|
||||
if len(r.WordPressPackages) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
@@ -62,7 +64,7 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
|
||||
fmt.Sprintf("Failed to get WordPress core version."))
|
||||
}
|
||||
url := fmt.Sprintf("https://wpscan.com/api/v3/wordpresses/%s", ver)
|
||||
wpVinfos, err := wpscan(url, ver, cnf.Token)
|
||||
wpVinfos, err := wpscan(url, ver, cnf.Token, true)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -74,7 +76,7 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
|
||||
}
|
||||
for _, p := range themes {
|
||||
url := fmt.Sprintf("https://wpscan.com/api/v3/themes/%s", p.Name)
|
||||
candidates, err := wpscan(url, p.Name, cnf.Token)
|
||||
candidates, err := wpscan(url, p.Name, cnf.Token, false)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -89,7 +91,7 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
|
||||
}
|
||||
for _, p := range plugins {
|
||||
url := fmt.Sprintf("https://wpscan.com/api/v3/plugins/%s", p.Name)
|
||||
candidates, err := wpscan(url, p.Name, cnf.Token)
|
||||
candidates, err := wpscan(url, p.Name, cnf.Token, false)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -111,13 +113,16 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
|
||||
return len(wpVinfos), nil
|
||||
}
|
||||
|
||||
func wpscan(url, name, token string) (vinfos []models.VulnInfo, err error) {
|
||||
func wpscan(url, name, token string, isCore bool) (vinfos []models.VulnInfo, err error) {
|
||||
body, err := httpRequest(url, token)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if body == "" {
|
||||
util.Log.Debugf("wpscan.com response body is empty. URL: %s", url)
|
||||
logging.Log.Debugf("wpscan.com response body is empty. URL: %s", url)
|
||||
}
|
||||
if isCore {
|
||||
name = "core"
|
||||
}
|
||||
return convertToVinfos(name, body)
|
||||
}
|
||||
@@ -127,17 +132,17 @@ func detect(installed models.WpPackage, candidates []models.VulnInfo) (vulns []m
|
||||
for _, fixstat := range v.WpPackageFixStats {
|
||||
ok, err := match(installed.Version, fixstat.FixedIn)
|
||||
if err != nil {
|
||||
util.Log.Errorf("Failed to compare versions %s installed: %s, fixedIn: %s, v: %+v",
|
||||
logging.Log.Warnf("Failed to compare versions %s installed: %s, fixedIn: %s, v: %+v",
|
||||
installed.Name, installed.Version, fixstat.FixedIn, v)
|
||||
// continue scanning
|
||||
continue
|
||||
}
|
||||
if ok {
|
||||
vulns = append(vulns, v)
|
||||
util.Log.Debugf("Affected: %s installed: %s, fixedIn: %s",
|
||||
logging.Log.Debugf("Affected: %s installed: %s, fixedIn: %s",
|
||||
installed.Name, installed.Version, fixstat.FixedIn)
|
||||
} else {
|
||||
util.Log.Debugf("Not affected: %s : %s, fixedIn: %s",
|
||||
logging.Log.Debugf("Not affected: %s : %s, fixedIn: %s",
|
||||
installed.Name, installed.Version, fixstat.FixedIn)
|
||||
}
|
||||
}
|
||||
@@ -252,7 +257,7 @@ func httpRequest(url, token string) (string, error) {
|
||||
return "", errof.New(errof.ErrWpScanAPILimitExceeded,
|
||||
fmt.Sprintf("wpscan.com API limit exceeded: %+v", resp.Status))
|
||||
} else {
|
||||
util.Log.Warnf("wpscan.com unknown status code: %+v", resp.Status)
|
||||
logging.Log.Warnf("wpscan.com unknown status code: %+v", resp.Status)
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
package wordpress
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
@@ -1,85 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package exploit
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/vulsio/go-exploitdb/db"
|
||||
exploitmodels "github.com/vulsio/go-exploitdb/models"
|
||||
)
|
||||
|
||||
// FillWithExploit fills exploit information that has in Exploit
|
||||
func FillWithExploit(driver db.DB, r *models.ScanResult, cnf *config.ExploitConf) (nExploitCve int, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, _ := util.URLPathJoin(cnf.URL, "cves")
|
||||
responses, err := getCvesViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
exps := []*exploitmodels.Exploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
exploits := ConvertToModels(exps)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Exploits = exploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nExploitCve++
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
es := driver.GetExploitByCveID(cveID)
|
||||
if len(es) == 0 {
|
||||
continue
|
||||
}
|
||||
exploits := ConvertToModels(es)
|
||||
vuln.Exploits = exploits
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return nExploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModels converts gost model to vuls model
|
||||
func ConvertToModels(es []*exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
for _, e := range es {
|
||||
var documentURL, shellURL *string
|
||||
if e.OffensiveSecurity != nil {
|
||||
os := e.OffensiveSecurity
|
||||
if os.Document != nil {
|
||||
documentURL = &os.Document.DocumentURL
|
||||
}
|
||||
if os.ShellCode != nil {
|
||||
shellURL = &os.ShellCode.ShellCodeURL
|
||||
}
|
||||
}
|
||||
exploit := models.Exploit{
|
||||
ExploitType: e.ExploitType,
|
||||
ID: e.ExploitUniqueID,
|
||||
URL: e.URL,
|
||||
Description: e.Description,
|
||||
DocumentURL: documentURL,
|
||||
ShellCodeURL: shellURL,
|
||||
}
|
||||
exploits = append(exploits, exploit)
|
||||
}
|
||||
return exploits
|
||||
}
|
||||
115
exploit/util.go
115
exploit/util.go
@@ -1,115 +0,0 @@
|
||||
package exploit
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type response struct {
|
||||
request request
|
||||
json string
|
||||
}
|
||||
|
||||
func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- request{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
util.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type request struct {
|
||||
osMajorVersion string
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGet(url string, req request, resChan chan<- response, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %s", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
util.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %s", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- response{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
158
go.mod
158
go.mod
@@ -1,24 +1,34 @@
|
||||
module github.com/future-architect/vuls
|
||||
|
||||
go 1.15
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go v50.2.0+incompatible
|
||||
github.com/BurntSushi/toml v0.3.1
|
||||
github.com/aquasecurity/fanal v0.0.0-20210119051230-28c249da7cfd
|
||||
github.com/aquasecurity/trivy v0.15.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20210121143430-2a5c54036a86
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef
|
||||
github.com/aws/aws-sdk-go v1.36.31
|
||||
github.com/BurntSushi/toml v0.4.1
|
||||
github.com/Ullaakut/nmap/v2 v2.1.2-0.20210406060955-59a52fe80a4f
|
||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||
github.com/aquasecurity/fanal v0.0.0-20211224205755-c94f68b6d71a
|
||||
github.com/aquasecurity/go-dep-parser v0.0.0-20220110153540-4a30ebc4b509
|
||||
github.com/aquasecurity/trivy v0.22.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20210916043317-726b7b72a47b
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
|
||||
github.com/aws/aws-sdk-go v1.42.0
|
||||
github.com/boltdb/bolt v1.3.1
|
||||
github.com/briandowns/spinner v1.16.0 // indirect
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible
|
||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/cheggaaa/pb/v3 v3.0.8 // indirect
|
||||
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
|
||||
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
|
||||
github.com/emersion/go-smtp v0.14.0
|
||||
github.com/fatih/color v1.13.0 // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.1 // indirect
|
||||
github.com/go-redis/redis/v8 v8.11.4 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
github.com/google/subcommands v1.2.0
|
||||
github.com/gosuri/uitable v0.0.4
|
||||
github.com/hashicorp/go-uuid v1.0.2
|
||||
github.com/hashicorp/go-version v1.2.1
|
||||
github.com/hashicorp/go-version v1.3.0
|
||||
github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c
|
||||
github.com/jesseduffield/gocui v0.3.0
|
||||
github.com/k0kubun/pp v3.0.1+incompatible
|
||||
@@ -26,25 +36,135 @@ require (
|
||||
github.com/knqyf263/go-cpe v0.0.0-20201213041631-54f6ab28673f
|
||||
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
|
||||
github.com/knqyf263/go-rpm-version v0.0.0-20170716094938-74609b86c936
|
||||
github.com/knqyf263/gost v0.1.7
|
||||
github.com/kotakanbe/go-cve-dictionary v0.5.7
|
||||
github.com/kotakanbe/go-pingscanner v0.1.0
|
||||
github.com/kotakanbe/goval-dictionary v0.3.1
|
||||
github.com/kotakanbe/logrus-prefixed-formatter v0.0.0-20180123152602-928f7356cb96
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/nlopes/slack v0.6.0
|
||||
github.com/nsf/termbox-go v0.0.0-20200418040025-38ba6e5628f1 // indirect
|
||||
github.com/olekukonko/tablewriter v0.0.4
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/parnurzeal/gorequest v0.2.16
|
||||
github.com/pelletier/go-toml v1.9.4 // indirect
|
||||
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
|
||||
github.com/sirupsen/logrus v1.7.0
|
||||
github.com/spf13/afero v1.5.1
|
||||
github.com/spf13/cobra v1.1.1
|
||||
github.com/takuzoo3868/go-msfdb v0.1.3
|
||||
github.com/vulsio/go-exploitdb v0.1.4
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad
|
||||
golang.org/x/oauth2 v0.0.0-20210125201302-af13f521f196
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
github.com/spf13/afero v1.7.0
|
||||
github.com/spf13/cast v1.4.1 // indirect
|
||||
github.com/spf13/cobra v1.3.0
|
||||
github.com/vulsio/go-cve-dictionary v0.8.2-0.20211028094424-0a854f8e8f85
|
||||
github.com/vulsio/go-exploitdb v0.4.2-0.20211028071949-1ebf9c4f6c4d
|
||||
github.com/vulsio/go-kev v0.1.0
|
||||
github.com/vulsio/go-msfdb v0.2.1-0.20211028071756-4a9759bd9f14
|
||||
github.com/vulsio/gost v0.4.1-0.20211028071837-7ad032a6ffa8
|
||||
github.com/vulsio/goval-dictionary v0.6.1-0.20211224012144-554a54938173
|
||||
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa // indirect
|
||||
golang.org/x/net v0.0.0-20211206223403-eba003a116a9 // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
|
||||
gopkg.in/ini.v1 v1.66.2 // indirect
|
||||
gorm.io/driver/mysql v1.2.1 // indirect
|
||||
gorm.io/driver/postgres v1.2.3 // indirect
|
||||
gorm.io/driver/sqlite v1.2.6 // indirect
|
||||
k8s.io/utils v0.0.0-20210111153108-fddb29f9d009
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.99.0 // indirect
|
||||
cloud.google.com/go/storage v1.18.2 // indirect
|
||||
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||
github.com/Azure/go-autorest/autorest v0.11.1 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.5 // indirect
|
||||
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/logger v0.2.0 // indirect
|
||||
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||
github.com/Masterminds/semver v1.5.0 // indirect
|
||||
github.com/Masterminds/sprig v2.22.0+incompatible // indirect
|
||||
github.com/PuerkitoBio/goquery v1.7.1 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.1 // indirect
|
||||
github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce // indirect
|
||||
github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 // indirect
|
||||
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 // indirect
|
||||
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect
|
||||
github.com/caarlos0/env/v6 v6.0.0 // indirect
|
||||
github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect
|
||||
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20211216145620-d92e9ce0af51 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/envoyproxy/go-control-plane v0.10.1 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v0.6.2 // indirect
|
||||
github.com/form3tech-oss/jwt-go v3.2.2+incompatible // indirect
|
||||
github.com/go-sql-driver/mysql v1.6.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/go-cmp v0.5.6 // indirect
|
||||
github.com/google/go-containerregistry v0.6.0 // indirect
|
||||
github.com/google/go-github/v33 v33.0.0 // indirect
|
||||
github.com/google/go-querystring v1.0.0 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/google/wire v0.4.0 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.1.1 // indirect
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720 // indirect
|
||||
github.com/gorilla/websocket v1.4.2 // indirect
|
||||
github.com/grokify/html-strip-tags-go v0.0.1 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.0 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/htcat/htcat v1.0.2 // indirect
|
||||
github.com/huandu/xstrings v1.3.2 // indirect
|
||||
github.com/imdario/mergo v0.3.12 // indirect
|
||||
github.com/inconshreveable/log15 v0.0.0-20201112154412-8562bdadbbac // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.10.1 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.2.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||
github.com/jackc/pgtype v1.9.1 // indirect
|
||||
github.com/jackc/pgx/v4 v4.14.1 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.4 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/magiconair/properties v1.8.5 // indirect
|
||||
github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 // indirect
|
||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.9 // indirect
|
||||
github.com/mitchellh/copystructure v1.1.1 // indirect
|
||||
github.com/mitchellh/mapstructure v1.4.3 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.1 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/satori/go.uuid v1.2.0 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/viper v1.10.0 // indirect
|
||||
github.com/stretchr/objx v0.3.0 // indirect
|
||||
github.com/stretchr/testify v1.7.0 // indirect
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/ymomoi/goval-parser v0.0.0-20170813122243-0a0be1dd9d08 // indirect
|
||||
go.etcd.io/bbolt v1.3.6 // indirect
|
||||
go.opencensus.io v0.23.0 // indirect
|
||||
go.uber.org/atomic v1.7.0 // indirect
|
||||
go.uber.org/multierr v1.6.0 // indirect
|
||||
go.uber.org/zap v1.20.0 // indirect
|
||||
golang.org/x/sys v0.0.0-20211210111614-af8b64212486 // indirect
|
||||
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf // indirect
|
||||
google.golang.org/api v0.63.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb // indirect
|
||||
google.golang.org/grpc v1.43.0 // indirect
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.28 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
gorm.io/gorm v1.22.4 // indirect
|
||||
moul.io/http2curl v1.0.0 // indirect
|
||||
)
|
||||
|
||||
17
gost/base.go
17
gost/base.go
@@ -1,17 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
)
|
||||
|
||||
// Base is a base struct
|
||||
type Base struct {
|
||||
}
|
||||
|
||||
// FillCVEsWithRedHat fills cve information that has in Gost
|
||||
func (b Base) FillCVEsWithRedHat(driver db.DB, r *models.ScanResult) error {
|
||||
return RedHat{}.fillCvesWithRedHatAPI(driver, r)
|
||||
}
|
||||
188
gost/debian.go
188
gost/debian.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
@@ -5,11 +6,12 @@ package gost
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/knqyf263/gost/db"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
debver "github.com/knqyf263/go-deb-version"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Debian is Gost client for Debian GNU/Linux
|
||||
@@ -21,6 +23,7 @@ type packCves struct {
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cves []models.CveContent
|
||||
fixes models.PackageFixStatuses
|
||||
}
|
||||
|
||||
func (deb Debian) supported(major string) bool {
|
||||
@@ -28,23 +31,23 @@ func (deb Debian) supported(major string) bool {
|
||||
"8": "jessie",
|
||||
"9": "stretch",
|
||||
"10": "buster",
|
||||
"11": "bullseye",
|
||||
}[major]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (deb Debian) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if !deb.supported(major(r.Release)) {
|
||||
// only logging
|
||||
util.Log.Warnf("Debian %s is not supported yet", r.Release)
|
||||
logging.Log.Warnf("Debian %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
// Add linux and set the version of running kernel to search OVAL.
|
||||
// Add linux and set the version of running kernel to search Gost.
|
||||
if r.Container.ContainerID == "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages[linuxImage]; ok {
|
||||
if p, ok := r.Packages["linux-image-"+r.RunningKernel.Release]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
@@ -54,18 +57,35 @@ func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCV
|
||||
}
|
||||
}
|
||||
|
||||
// Debian Security Tracker does not support Package for Raspbian, so skip it.
|
||||
var scanResult models.ScanResult
|
||||
if r.Family != config.Raspbian {
|
||||
scanResult = *r
|
||||
} else {
|
||||
scanResult = r.RemoveRaspbianPackFromResult()
|
||||
stashLinuxPackage := r.Packages["linux"]
|
||||
nFixedCVEs, err := deb.detectCVEsWithFixState(r, "resolved")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
r.Packages["linux"] = stashLinuxPackage
|
||||
nUnfixedCVEs, err := deb.detectCVEsWithFixState(r, "open")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return (nFixedCVEs + nUnfixedCVEs), nil
|
||||
}
|
||||
|
||||
func (deb Debian) detectCVEsWithFixState(r *models.ScanResult, fixStatus string) (nCVEs int, err error) {
|
||||
if fixStatus != "resolved" && fixStatus != "open" {
|
||||
return 0, xerrors.Errorf(`Failed to detectCVEsWithFixState. fixStatus is not allowed except "open" and "resolved"(actual: fixStatus -> %s).`, fixStatus)
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if config.Conf.Gost.IsFetchViaHTTP() {
|
||||
url, _ := util.URLPathJoin(config.Conf.Gost.URL, "debian", major(scanResult.Release), "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, url)
|
||||
if deb.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
url, _ := util.URLPathJoin(deb.DBDriver.Cnf.GetURL(), "debian", major(r.Release), "pkgs")
|
||||
s := "unfixed-cves"
|
||||
if s == "resolved" {
|
||||
s = "fixed-cves"
|
||||
}
|
||||
|
||||
responses, err := getCvesWithFixStateViaHTTP(r, url, s)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -76,43 +96,46 @@ func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCV
|
||||
return 0, err
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, debcve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&debcve))
|
||||
fixes = append(fixes, checkPackageFixStatus(&debcve)...)
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
if deb.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range scanResult.Packages {
|
||||
cveDebs := driver.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
|
||||
cves := []models.CveContent{}
|
||||
for _, cveDeb := range cveDebs {
|
||||
cves = append(cves, *deb.ConvertToModel(&cveDeb))
|
||||
for _, pack := range r.Packages {
|
||||
cves, fixes, err := deb.getCvesDebianWithfixStatus(fixStatus, major(r.Release), pack.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range scanResult.SrcPackages {
|
||||
cveDebs := driver.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
|
||||
cves := []models.CveContent{}
|
||||
for _, cveDeb := range cveDebs {
|
||||
cves = append(cves, *deb.ConvertToModel(&cveDeb))
|
||||
for _, pack := range r.SrcPackages {
|
||||
cves, fixes, err := deb.getCvesDebianWithfixStatus(fixStatus, major(r.Release), pack.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -120,13 +143,14 @@ func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCV
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for _, cve := range p.cves {
|
||||
for i, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.DebianSecurityTracker] = cve
|
||||
v.CveContents[models.DebianSecurityTracker] = []models.CveContent{cve}
|
||||
v.Confidences = models.Confidences{models.DebianSecurityTrackerMatch}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
@@ -134,6 +158,31 @@ func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCV
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.DebianSecurityTrackerMatch},
|
||||
}
|
||||
|
||||
if fixStatus == "resolved" {
|
||||
versionRelease := ""
|
||||
if p.isSrcPack {
|
||||
versionRelease = r.SrcPackages[p.packName].Version
|
||||
} else {
|
||||
versionRelease = r.Packages[p.packName].FormatVer()
|
||||
}
|
||||
|
||||
if versionRelease == "" {
|
||||
break
|
||||
}
|
||||
|
||||
affected, err := isGostDefAffected(versionRelease, p.fixes[i].FixedIn)
|
||||
if err != nil {
|
||||
logging.Log.Debugf("Failed to parse versions: %s, Ver: %s, Gost: %s",
|
||||
err, versionRelease, p.fixes[i].FixedIn)
|
||||
continue
|
||||
}
|
||||
|
||||
if !affected {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
@@ -148,25 +197,69 @@ func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCV
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, linuxImage)
|
||||
names = append(names, "linux-image-"+r.RunningKernel.Release)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
if fixStatus == "resolved" {
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixedIn: p.fixes[i].FixedIn,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
func isGostDefAffected(versionRelease, gostVersion string) (affected bool, err error) {
|
||||
vera, err := debver.NewVersion(versionRelease)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
verb, err := debver.NewVersion(gostVersion)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return vera.LessThan(verb), nil
|
||||
}
|
||||
|
||||
func (deb Debian) getCvesDebianWithfixStatus(fixStatus, release, pkgName string) ([]models.CveContent, []models.PackageFixStatus, error) {
|
||||
var f func(string, string) (map[string]gostmodels.DebianCVE, error)
|
||||
if fixStatus == "resolved" {
|
||||
f = deb.DBDriver.DB.GetFixedCvesDebian
|
||||
} else {
|
||||
f = deb.DBDriver.DB.GetUnfixedCvesDebian
|
||||
}
|
||||
debCves, err := f(release, pkgName)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
cves := []models.CveContent{}
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, devbCve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&devbCve))
|
||||
fixes = append(fixes, checkPackageFixStatus(&devbCve)...)
|
||||
}
|
||||
return cves, fixes, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
|
||||
severity := ""
|
||||
@@ -188,3 +281,22 @@ func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func checkPackageFixStatus(cve *gostmodels.DebianCVE) []models.PackageFixStatus {
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, p := range cve.Package {
|
||||
for _, r := range p.Release {
|
||||
f := models.PackageFixStatus{Name: p.PackageName}
|
||||
|
||||
if r.Status == "open" {
|
||||
f.NotFixedYet = true
|
||||
} else {
|
||||
f.FixedIn = r.FixedVersion
|
||||
}
|
||||
|
||||
fixes = append(fixes, f)
|
||||
}
|
||||
}
|
||||
|
||||
return fixes
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import "testing"
|
||||
@@ -36,10 +39,17 @@ func TestDebian_Supported(t *testing.T) {
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "11 is not supported yet",
|
||||
name: "11 is supported",
|
||||
args: args{
|
||||
major: "11",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "12 is not supported yet",
|
||||
args: args{
|
||||
major: "12",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
|
||||
93
gost/gost.go
93
gost/gost.go
@@ -1,29 +1,98 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
cnf "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
"github.com/vulsio/gost/db"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
)
|
||||
|
||||
// DBDriver is a DB Driver
|
||||
type DBDriver struct {
|
||||
DB db.DB
|
||||
Cnf config.VulnDictInterface
|
||||
}
|
||||
|
||||
// Client is the interface of OVAL client.
|
||||
type Client interface {
|
||||
DetectUnfixed(db.DB, *models.ScanResult, bool) (int, error)
|
||||
FillCVEsWithRedHat(db.DB, *models.ScanResult) error
|
||||
DetectCVEs(*models.ScanResult, bool) (int, error)
|
||||
CloseDB() error
|
||||
}
|
||||
|
||||
// Base is a base struct
|
||||
type Base struct {
|
||||
DBDriver DBDriver
|
||||
}
|
||||
|
||||
// CloseDB close a DB connection
|
||||
func (b Base) CloseDB() error {
|
||||
if b.DBDriver.DB == nil {
|
||||
return nil
|
||||
}
|
||||
return b.DBDriver.DB.CloseDB()
|
||||
}
|
||||
|
||||
// FillCVEsWithRedHat fills CVE detailed with Red Hat Security
|
||||
func FillCVEsWithRedHat(r *models.ScanResult, cnf config.GostConf) error {
|
||||
db, locked, err := newGostDB(cnf)
|
||||
if locked {
|
||||
return xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := db.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
return RedHat{Base{DBDriver{DB: db, Cnf: &cnf}}}.fillCvesWithRedHatAPI(r)
|
||||
}
|
||||
|
||||
// NewClient make Client by family
|
||||
func NewClient(family string) Client {
|
||||
func NewClient(cnf config.GostConf, family string) (Client, error) {
|
||||
db, locked, err := newGostDB(cnf)
|
||||
if locked {
|
||||
return nil, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
driver := DBDriver{DB: db, Cnf: &cnf}
|
||||
|
||||
switch family {
|
||||
case cnf.RedHat, cnf.CentOS:
|
||||
return RedHat{}
|
||||
case cnf.Debian, cnf.Raspbian:
|
||||
return Debian{}
|
||||
case cnf.Windows:
|
||||
return Microsoft{}
|
||||
case constant.RedHat, constant.CentOS, constant.Rocky, constant.Alma:
|
||||
return RedHat{Base{DBDriver: driver}}, nil
|
||||
case constant.Debian, constant.Raspbian:
|
||||
return Debian{Base{DBDriver: driver}}, nil
|
||||
case constant.Ubuntu:
|
||||
return Ubuntu{Base{DBDriver: driver}}, nil
|
||||
case constant.Windows:
|
||||
return Microsoft{Base{DBDriver: driver}}, nil
|
||||
default:
|
||||
return Pseudo{}
|
||||
return Pseudo{Base{DBDriver: driver}}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// NewGostDB returns db client for Gost
|
||||
func newGostDB(cnf config.GostConf) (driver db.DB, locked bool, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, false, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = db.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), db.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("gostDB is locked. err: %w", err)
|
||||
}
|
||||
return nil, false, err
|
||||
}
|
||||
return driver, false, nil
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
@@ -5,7 +8,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
func TestSetPackageStates(t *testing.T) {
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Microsoft is Gost client for windows
|
||||
@@ -15,16 +16,20 @@ type Microsoft struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (ms Microsoft) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if driver == nil {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (ms Microsoft) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if ms.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
cveIDs := []string{}
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
for cveID, msCve := range driver.GetMicrosoftMulti(cveIDs) {
|
||||
msCves, err := ms.DBDriver.DB.GetMicrosoftMulti(cveIDs)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
for cveID, msCve := range msCves {
|
||||
if _, ok := r.ScannedCves[cveID]; !ok {
|
||||
continue
|
||||
}
|
||||
@@ -33,7 +38,7 @@ func (ms Microsoft) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (n
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.CveContents{}
|
||||
}
|
||||
v.CveContents[models.Microsoft] = *cveCont
|
||||
v.CveContents[models.Microsoft] = []models.CveContent{*cveCont}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveID] = v
|
||||
}
|
||||
@@ -42,6 +47,9 @@ func (ms Microsoft) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (n
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ms Microsoft) ConvertToModel(cve *gostmodels.MicrosoftCVE) (*models.CveContent, []models.Mitigation) {
|
||||
sort.Slice(cve.ScoreSets, func(i, j int) bool {
|
||||
return cve.ScoreSets[i].Vector < cve.ScoreSets[j].Vector
|
||||
})
|
||||
v3score := 0.0
|
||||
var v3Vector string
|
||||
for _, scoreSet := range cve.ScoreSets {
|
||||
@@ -70,11 +78,10 @@ func (ms Microsoft) ConvertToModel(cve *gostmodels.MicrosoftCVE) (*models.CveCon
|
||||
|
||||
option := map[string]string{}
|
||||
if 0 < len(cve.ExploitStatus) {
|
||||
// TODO: CVE-2020-0739
|
||||
// "exploit_status": "Publicly Disclosed:No;Exploited:No;Latest Software Release:Exploitation Less Likely;Older Software Release:Exploitation Less Likely;DOS:N/A",
|
||||
option["exploit"] = cve.ExploitStatus
|
||||
}
|
||||
if 0 < len(cve.Workaround) {
|
||||
option["workaround"] = cve.Workaround
|
||||
}
|
||||
kbids := []string{}
|
||||
for _, kbid := range cve.KBIDs {
|
||||
kbids = append(kbids, kbid.KBID)
|
||||
@@ -86,13 +93,18 @@ func (ms Microsoft) ConvertToModel(cve *gostmodels.MicrosoftCVE) (*models.CveCon
|
||||
vendorURL := "https://msrc.microsoft.com/update-guide/vulnerability/" + cve.CveID
|
||||
mitigations := []models.Mitigation{}
|
||||
if cve.Mitigation != "" {
|
||||
mitigations = []models.Mitigation{
|
||||
{
|
||||
CveContentType: models.Microsoft,
|
||||
Mitigation: cve.Mitigation,
|
||||
URL: vendorURL,
|
||||
},
|
||||
}
|
||||
mitigations = append(mitigations, models.Mitigation{
|
||||
CveContentType: models.Microsoft,
|
||||
Mitigation: cve.Mitigation,
|
||||
URL: vendorURL,
|
||||
})
|
||||
}
|
||||
if cve.Workaround != "" {
|
||||
mitigations = append(mitigations, models.Mitigation{
|
||||
CveContentType: models.Microsoft,
|
||||
Mitigation: cve.Workaround,
|
||||
URL: vendorURL,
|
||||
})
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
)
|
||||
|
||||
// Pseudo is Gost client except for RedHat family and Debian
|
||||
@@ -12,7 +12,7 @@ type Pseudo struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (pse Pseudo) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (int, error) {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (pse Pseudo) DetectCVEs(_ *models.ScanResult, _ bool) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
@@ -10,8 +11,7 @@ import (
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/knqyf263/gost/db"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// RedHat is Gost client for RedHat family linux
|
||||
@@ -19,15 +19,10 @@ type RedHat struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (red RedHat) DetectUnfixed(driver db.DB, r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
return red.detectUnfixed(driver, r, ignoreWillNotFix)
|
||||
}
|
||||
|
||||
func (red RedHat) detectUnfixed(driver db.DB, r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
if config.Conf.Gost.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(config.Conf.Gost.URL,
|
||||
"redhat", major(r.Release), "pkgs")
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (red RedHat) DetectCVEs(r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
if red.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(red.DBDriver.Cnf.GetURL(), "redhat", major(r.Release), "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
@@ -45,12 +40,15 @@ func (red RedHat) detectUnfixed(driver db.DB, r *models.ScanResult, ignoreWillNo
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
if red.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range r.Packages {
|
||||
// CVE-ID: RedhatCVE
|
||||
cves := driver.GetUnfixedCvesRedhat(major(r.Release), pack.Name, ignoreWillNotFix)
|
||||
cves, err := red.DBDriver.DB.GetUnfixedCvesRedhat(major(r.Release), pack.Name, ignoreWillNotFix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, cve := range cves {
|
||||
if newly := red.setUnfixedCveToScanResult(&cve, r); newly {
|
||||
nCVEs++
|
||||
@@ -61,7 +59,7 @@ func (red RedHat) detectUnfixed(driver db.DB, r *models.ScanResult, ignoreWillNo
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
func (red RedHat) fillCvesWithRedHatAPI(driver db.DB, r *models.ScanResult) error {
|
||||
func (red RedHat) fillCvesWithRedHatAPI(r *models.ScanResult) error {
|
||||
cveIDs := []string{}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if _, ok := vuln.CveContents[models.RedHatAPI]; ok {
|
||||
@@ -70,9 +68,8 @@ func (red RedHat) fillCvesWithRedHatAPI(driver db.DB, r *models.ScanResult) erro
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
|
||||
if config.Conf.Gost.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(config.Conf.Gost.URL,
|
||||
"redhat", "cves")
|
||||
if red.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(config.Conf.Gost.URL, "redhat", "cves")
|
||||
responses, err := getCvesViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -88,10 +85,14 @@ func (red RedHat) fillCvesWithRedHatAPI(driver db.DB, r *models.ScanResult) erro
|
||||
red.setFixedCveToScanResult(&redCve, r)
|
||||
}
|
||||
} else {
|
||||
if driver == nil {
|
||||
if red.DBDriver.DB == nil {
|
||||
return nil
|
||||
}
|
||||
for _, redCve := range driver.GetRedhatMulti(cveIDs) {
|
||||
redCves, err := red.DBDriver.DB.GetRedhatMulti(cveIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, redCve := range redCves {
|
||||
if len(redCve.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -109,7 +110,7 @@ func (red RedHat) setFixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models.S
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = *cveCont
|
||||
v.CveContents[models.RedHatAPI] = []models.CveContent{*cveCont}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
@@ -129,7 +130,7 @@ func (red RedHat) setUnfixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = *cveCont
|
||||
v.CveContents[models.RedHatAPI] = []models.CveContent{*cveCont}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
|
||||
197
gost/ubuntu.go
Normal file
197
gost/ubuntu.go
Normal file
@@ -0,0 +1,197 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Ubuntu is Gost client for Ubuntu
|
||||
type Ubuntu struct {
|
||||
Base
|
||||
}
|
||||
|
||||
func (ubu Ubuntu) supported(version string) bool {
|
||||
_, ok := map[string]string{
|
||||
"1404": "trusty",
|
||||
"1604": "xenial",
|
||||
"1804": "bionic",
|
||||
"2004": "focal",
|
||||
"2010": "groovy",
|
||||
"2104": "hirsute",
|
||||
}[version]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (ubu Ubuntu) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
ubuReleaseVer := strings.Replace(r.Release, ".", "", 1)
|
||||
if !ubu.supported(ubuReleaseVer) {
|
||||
logging.Log.Warnf("Ubuntu %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
// Add linux and set the version of running kernel to search Gost.
|
||||
if r.Container.ContainerID == "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages[linuxImage]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
Name: "linux",
|
||||
Version: r.RunningKernel.Version,
|
||||
NewVersion: newVer,
|
||||
}
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if ubu.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
url, _ := util.URLPathJoin(ubu.DBDriver.Cnf.GetURL(), "ubuntu", ubuReleaseVer, "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, url)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for _, res := range responses {
|
||||
ubuCves := map[string]gostmodels.UbuntuCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &ubuCves); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if ubu.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range r.Packages {
|
||||
ubuCves, err := ubu.DBDriver.DB.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range r.SrcPackages {
|
||||
ubuCves, err := ubu.DBDriver.DB.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for _, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.UbuntuAPI] = []models.CveContent{cve}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.UbuntuAPIMatch},
|
||||
}
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
if p.isSrcPack {
|
||||
if srcPack, ok := r.SrcPackages[p.packName]; ok {
|
||||
for _, binName := range srcPack.BinaryNames {
|
||||
if _, ok := r.Packages[binName]; ok {
|
||||
names = append(names, binName)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, linuxImage)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ubu Ubuntu) ConvertToModel(cve *gostmodels.UbuntuCVE) *models.CveContent {
|
||||
references := []models.Reference{}
|
||||
for _, r := range cve.References {
|
||||
if strings.Contains(r.Reference, "https://cve.mitre.org/cgi-bin/cvename.cgi?name=") {
|
||||
references = append(references, models.Reference{Source: "CVE", Link: r.Reference})
|
||||
} else {
|
||||
references = append(references, models.Reference{Link: r.Reference})
|
||||
}
|
||||
}
|
||||
|
||||
for _, b := range cve.Bugs {
|
||||
references = append(references, models.Reference{Source: "Bug", Link: b.Bug})
|
||||
}
|
||||
|
||||
for _, u := range cve.Upstreams {
|
||||
for _, upstreamLink := range u.UpstreamLinks {
|
||||
references = append(references, models.Reference{Source: "UPSTREAM", Link: upstreamLink.Link})
|
||||
}
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.UbuntuAPI,
|
||||
CveID: cve.Candidate,
|
||||
Summary: cve.Description,
|
||||
Cvss2Severity: cve.Priority,
|
||||
Cvss3Severity: cve.Priority,
|
||||
SourceLink: "https://ubuntu.com/security/" + cve.Candidate,
|
||||
References: references,
|
||||
Published: cve.PublicDate,
|
||||
}
|
||||
}
|
||||
137
gost/ubuntu_test.go
Normal file
137
gost/ubuntu_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
func TestUbuntu_Supported(t *testing.T) {
|
||||
type args struct {
|
||||
ubuReleaseVer string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "14.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1404",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "16.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1604",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "18.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1804",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "20.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2004",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "20.10 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2010",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "21.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2104",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "empty string is not supported yet",
|
||||
args: args{
|
||||
ubuReleaseVer: "",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ubu := Ubuntu{}
|
||||
if got := ubu.supported(tt.args.ubuReleaseVer); got != tt.want {
|
||||
t.Errorf("Ubuntu.Supported() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUbuntuConvertToModel(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input gostmodels.UbuntuCVE
|
||||
expected models.CveContent
|
||||
}{
|
||||
{
|
||||
name: "gost Ubuntu.ConvertToModel",
|
||||
input: gostmodels.UbuntuCVE{
|
||||
Candidate: "CVE-2021-3517",
|
||||
PublicDate: time.Date(2021, 5, 19, 14, 15, 0, 0, time.UTC),
|
||||
References: []gostmodels.UbuntuReference{
|
||||
{Reference: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-3517"},
|
||||
{Reference: "https://gitlab.gnome.org/GNOME/libxml2/-/issues/235"},
|
||||
{Reference: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/bf22713507fe1fc3a2c4b525cf0a88c2dc87a3a2"}},
|
||||
Description: "description.",
|
||||
Notes: []gostmodels.UbuntuNote{},
|
||||
Bugs: []gostmodels.UbuntuBug{{Bug: "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=987738"}},
|
||||
Priority: "medium",
|
||||
Patches: []gostmodels.UbuntuPatch{
|
||||
{PackageName: "libxml2", ReleasePatches: []gostmodels.UbuntuReleasePatch{
|
||||
{ReleaseName: "focal", Status: "needed", Note: ""},
|
||||
}},
|
||||
},
|
||||
Upstreams: []gostmodels.UbuntuUpstream{{
|
||||
PackageName: "libxml2", UpstreamLinks: []gostmodels.UbuntuUpstreamLink{
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/50f06b3efb638efb0abd95dc62dca05ae67882c2"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
expected: models.CveContent{
|
||||
Type: models.UbuntuAPI,
|
||||
CveID: "CVE-2021-3517",
|
||||
Summary: "description.",
|
||||
Cvss2Severity: "medium",
|
||||
Cvss3Severity: "medium",
|
||||
SourceLink: "https://ubuntu.com/security/CVE-2021-3517",
|
||||
References: []models.Reference{
|
||||
{Source: "CVE", Link: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-3517"},
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/issues/235"},
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/bf22713507fe1fc3a2c4b525cf0a88c2dc87a3a2"},
|
||||
{Source: "Bug", Link: "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=987738"},
|
||||
{Source: "UPSTREAM", Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/50f06b3efb638efb0abd95dc62dca05ae67882c2"}},
|
||||
Published: time.Date(2021, 5, 19, 14, 15, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ubu := Ubuntu{}
|
||||
got := ubu.ConvertToModel(&tt.input)
|
||||
if !reflect.DeepEqual(got, &tt.expected) {
|
||||
t.Errorf("Ubuntu.ConvertToModel() = %#v, want %#v", got, &tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
17
gost/util.go
17
gost/util.go
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
@@ -6,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
@@ -48,7 +52,7 @@ func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
util.Log.Debugf("HTTP Request to %s", url)
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
@@ -82,7 +86,10 @@ type request struct {
|
||||
|
||||
func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
return getCvesWithFixStateViaHTTP(r, urlPrefix, "unfixed-cves")
|
||||
}
|
||||
|
||||
func getCvesWithFixStateViaHTTP(r *models.ScanResult, urlPrefix, fixState string) (responses []response, err error) {
|
||||
nReq := len(r.Packages) + len(r.SrcPackages)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
@@ -117,12 +124,12 @@ func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.packName,
|
||||
"unfixed-cves",
|
||||
fixState,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
util.Log.Debugf("HTTP Request to %s", url)
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
@@ -160,12 +167,12 @@ func httpGet(url string, req request, resChan chan<- response, errChan chan<- er
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %s", url, resp, errs)
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
util.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %s", t, err)
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
|
||||
BIN
img/sponsor/tines.png
Normal file
BIN
img/sponsor/tines.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
1
integration
Submodule
1
integration
Submodule
Submodule integration added at 3d05674df7
120
logging/logutil.go
Normal file
120
logging/logutil.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package logging
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
|
||||
"github.com/k0kubun/pp"
|
||||
"github.com/rifflock/lfshook"
|
||||
"github.com/sirupsen/logrus"
|
||||
|
||||
formatter "github.com/kotakanbe/logrus-prefixed-formatter"
|
||||
)
|
||||
|
||||
//LogOpts has options for logging
|
||||
type LogOpts struct {
|
||||
Debug bool `json:"debug,omitempty"`
|
||||
DebugSQL bool `json:"debugSQL,omitempty"`
|
||||
LogToFile bool `json:"logToFile,omitempty"`
|
||||
LogDir string `json:"logDir,omitempty"`
|
||||
LogJSON bool `json:"logJSON"`
|
||||
Quiet bool `json:"quiet,omitempty"`
|
||||
}
|
||||
|
||||
// Log for localhost
|
||||
var Log Logger
|
||||
|
||||
// Logger has logrus entry
|
||||
type Logger struct {
|
||||
logrus.Entry
|
||||
}
|
||||
|
||||
func init() {
|
||||
log := logrus.New()
|
||||
log.Out = ioutil.Discard
|
||||
fields := logrus.Fields{"prefix": ""}
|
||||
Log = Logger{Entry: *log.WithFields(fields)}
|
||||
}
|
||||
|
||||
// NewNormalLogger creates normal logger
|
||||
func NewNormalLogger() Logger {
|
||||
return Logger{Entry: logrus.Entry{Logger: logrus.New()}}
|
||||
}
|
||||
|
||||
// NewCustomLogger creates logrus
|
||||
func NewCustomLogger(debug, quiet, logToFile bool, logDir, logMsgAnsiColor, serverName string) Logger {
|
||||
log := logrus.New()
|
||||
log.Formatter = &formatter.TextFormatter{MsgAnsiColor: logMsgAnsiColor}
|
||||
log.Level = logrus.InfoLevel
|
||||
if debug {
|
||||
log.Level = logrus.DebugLevel
|
||||
pp.ColoringEnabled = false
|
||||
}
|
||||
|
||||
if flag.Lookup("test.v") != nil {
|
||||
return Logger{Entry: *logrus.NewEntry(log)}
|
||||
}
|
||||
|
||||
whereami := "localhost"
|
||||
if serverName != "" {
|
||||
whereami = serverName
|
||||
}
|
||||
|
||||
if logToFile {
|
||||
dir := GetDefaultLogDir()
|
||||
if logDir != "" {
|
||||
dir = logDir
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
if err := os.Mkdir(dir, 0700); err != nil {
|
||||
log.Errorf("Failed to create log directory. path: %s, err: %+v", dir, err)
|
||||
}
|
||||
}
|
||||
|
||||
logFile := dir + "/vuls.log"
|
||||
if file, err := os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644); err == nil {
|
||||
log.Out = io.MultiWriter(os.Stderr, file)
|
||||
} else {
|
||||
log.Out = os.Stderr
|
||||
log.Errorf("Failed to create log file. path: %s, err: %+v", logFile, err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); err == nil {
|
||||
path := filepath.Join(dir, fmt.Sprintf("%s.log", whereami))
|
||||
if _, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644); err == nil {
|
||||
log.Hooks.Add(lfshook.NewHook(lfshook.PathMap{
|
||||
logrus.DebugLevel: path,
|
||||
logrus.InfoLevel: path,
|
||||
logrus.WarnLevel: path,
|
||||
logrus.ErrorLevel: path,
|
||||
logrus.FatalLevel: path,
|
||||
logrus.PanicLevel: path,
|
||||
}, nil))
|
||||
} else {
|
||||
log.Errorf("Failed to create log file. path: %s, err: %+v", path, err)
|
||||
}
|
||||
}
|
||||
} else if quiet {
|
||||
log.Out = ioutil.Discard
|
||||
} else {
|
||||
log.Out = os.Stderr
|
||||
}
|
||||
|
||||
entry := log.WithFields(logrus.Fields{"prefix": whereami})
|
||||
return Logger{Entry: *entry}
|
||||
}
|
||||
|
||||
// GetDefaultLogDir returns default log directory
|
||||
func GetDefaultLogDir() string {
|
||||
defaultLogDir := "/var/log/vuls"
|
||||
if runtime.GOOS == "windows" {
|
||||
defaultLogDir = filepath.Join(os.Getenv("APPDATA"), "vuls")
|
||||
}
|
||||
return defaultLogDir
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -8,13 +9,26 @@ import (
|
||||
)
|
||||
|
||||
// CveContents has CveContent
|
||||
type CveContents map[CveContentType]CveContent
|
||||
type CveContents map[CveContentType][]CveContent
|
||||
|
||||
// NewCveContents create CveContents
|
||||
func NewCveContents(conts ...CveContent) CveContents {
|
||||
m := CveContents{}
|
||||
for _, cont := range conts {
|
||||
m[cont.Type] = cont
|
||||
if cont.Type == Jvn {
|
||||
found := false
|
||||
for _, cveCont := range m[cont.Type] {
|
||||
if cont.SourceLink == cveCont.SourceLink {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
m[cont.Type] = append(m[cont.Type], cont)
|
||||
}
|
||||
} else {
|
||||
m[cont.Type] = []CveContent{cont}
|
||||
}
|
||||
}
|
||||
return m
|
||||
}
|
||||
@@ -44,16 +58,18 @@ func (v CveContents) Except(exceptCtypes ...CveContentType) (values CveContents)
|
||||
}
|
||||
|
||||
// PrimarySrcURLs returns link of source
|
||||
func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveContentStr) {
|
||||
func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string, confidences Confidences) (values []CveContentStr) {
|
||||
if cveID == "" {
|
||||
return
|
||||
}
|
||||
|
||||
if cont, found := v[Nvd]; found {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Vendor Advisory" {
|
||||
values = append(values, CveContentStr{Nvd, r.Link})
|
||||
if conts, found := v[Nvd]; found {
|
||||
for _, cont := range conts {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Vendor Advisory" {
|
||||
values = append(values, CveContentStr{Nvd, r.Link})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -61,17 +77,31 @@ func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveC
|
||||
|
||||
order := CveContentTypes{Nvd, NewCveContentType(myFamily), GitHub}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found {
|
||||
if cont.SourceLink == "" {
|
||||
continue
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.SourceLink == "" {
|
||||
continue
|
||||
}
|
||||
values = append(values, CveContentStr{ctype, cont.SourceLink})
|
||||
}
|
||||
values = append(values, CveContentStr{ctype, cont.SourceLink})
|
||||
}
|
||||
}
|
||||
|
||||
if lang == "ja" {
|
||||
if cont, found := v[Jvn]; found && 0 < len(cont.SourceLink) {
|
||||
values = append(values, CveContentStr{Jvn, cont.SourceLink})
|
||||
jvnMatch := false
|
||||
for _, confidence := range confidences {
|
||||
if confidence.DetectionMethod == JvnVendorProductMatchStr {
|
||||
jvnMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if lang == "ja" || jvnMatch {
|
||||
if conts, found := v[Jvn]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.SourceLink) {
|
||||
values = append(values, CveContentStr{Jvn, cont.SourceLink})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,14 +116,17 @@ func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveC
|
||||
|
||||
// PatchURLs returns link of patch
|
||||
func (v CveContents) PatchURLs() (urls []string) {
|
||||
cont, found := v[Nvd]
|
||||
conts, found := v[Nvd]
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Patch" {
|
||||
urls = append(urls, r.Link)
|
||||
|
||||
for _, cont := range conts {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Patch" {
|
||||
urls = append(urls, r.Link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,11 +163,15 @@ func (v CveContents) Cpes(myFamily string) (values []CveContentCpes) {
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.Cpes) {
|
||||
values = append(values, CveContentCpes{
|
||||
Type: ctype,
|
||||
Value: cont.Cpes,
|
||||
})
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.Cpes) {
|
||||
values = append(values, CveContentCpes{
|
||||
Type: ctype,
|
||||
Value: cont.Cpes,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
@@ -152,11 +189,15 @@ func (v CveContents) References(myFamily string) (values []CveContentRefs) {
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.References) {
|
||||
values = append(values, CveContentRefs{
|
||||
Type: ctype,
|
||||
Value: cont.References,
|
||||
})
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.References) {
|
||||
values = append(values, CveContentRefs{
|
||||
Type: ctype,
|
||||
Value: cont.References,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,17 +209,21 @@ func (v CveContents) CweIDs(myFamily string) (values []CveContentStr) {
|
||||
order := CveContentTypes{NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.CweIDs) {
|
||||
for _, cweID := range cont.CweIDs {
|
||||
for _, val := range values {
|
||||
if val.Value == cweID {
|
||||
continue
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.CweIDs) {
|
||||
for _, cweID := range cont.CweIDs {
|
||||
for _, val := range values {
|
||||
if val.Value == cweID {
|
||||
continue
|
||||
}
|
||||
}
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: cweID,
|
||||
})
|
||||
}
|
||||
}
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: cweID,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -197,6 +242,47 @@ func (v CveContents) UniqCweIDs(myFamily string) (values []CveContentStr) {
|
||||
return values
|
||||
}
|
||||
|
||||
// Sort elements for integration-testing
|
||||
func (v CveContents) Sort() {
|
||||
for contType, contents := range v {
|
||||
// CVSS3 desc, CVSS2 desc, SourceLink asc
|
||||
sort.Slice(contents, func(i, j int) bool {
|
||||
if contents[i].Cvss3Score > contents[j].Cvss3Score {
|
||||
return true
|
||||
} else if contents[i].Cvss3Score == contents[i].Cvss3Score {
|
||||
if contents[i].Cvss2Score > contents[j].Cvss2Score {
|
||||
return true
|
||||
} else if contents[i].Cvss2Score == contents[i].Cvss2Score {
|
||||
if contents[i].SourceLink < contents[j].SourceLink {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
v[contType] = contents
|
||||
}
|
||||
for contType, contents := range v {
|
||||
for cveID, cont := range contents {
|
||||
sort.Slice(cont.References, func(i, j int) bool {
|
||||
return cont.References[i].Link < cont.References[j].Link
|
||||
})
|
||||
sort.Slice(cont.CweIDs, func(i, j int) bool {
|
||||
return cont.CweIDs[i] < cont.CweIDs[j]
|
||||
})
|
||||
for i, ref := range cont.References {
|
||||
// sort v.CveContents[].References[].Tags
|
||||
sort.Slice(ref.Tags, func(j, k int) bool {
|
||||
return ref.Tags[j] < ref.Tags[k]
|
||||
})
|
||||
cont.References[i] = ref
|
||||
}
|
||||
contents[cveID] = cont
|
||||
}
|
||||
v[contType] = contents
|
||||
}
|
||||
}
|
||||
|
||||
// CveContent has abstraction of various vulnerability information
|
||||
type CveContent struct {
|
||||
Type CveContentType `json:"type"`
|
||||
@@ -233,7 +319,7 @@ func NewCveContentType(name string) CveContentType {
|
||||
return Nvd
|
||||
case "jvn":
|
||||
return Jvn
|
||||
case "redhat", "centos":
|
||||
case "redhat", "centos", "alma", "rocky":
|
||||
return RedHat
|
||||
case "oracle":
|
||||
return Oracle
|
||||
@@ -245,6 +331,8 @@ func NewCveContentType(name string) CveContentType {
|
||||
return RedHatAPI
|
||||
case "debian_security_tracker":
|
||||
return DebianSecurityTracker
|
||||
case "ubuntu_api":
|
||||
return UbuntuAPI
|
||||
case "microsoft":
|
||||
return Microsoft
|
||||
case "wordpress":
|
||||
@@ -282,6 +370,9 @@ const (
|
||||
// Ubuntu is Ubuntu
|
||||
Ubuntu CveContentType = "ubuntu"
|
||||
|
||||
// UbuntuAPI is Ubuntu
|
||||
UbuntuAPI CveContentType = "ubuntu_api"
|
||||
|
||||
// Oracle is Oracle Linux
|
||||
Oracle CveContentType = "oracle"
|
||||
|
||||
@@ -317,10 +408,11 @@ var AllCveContetTypes = CveContentTypes{
|
||||
RedHat,
|
||||
RedHatAPI,
|
||||
Debian,
|
||||
DebianSecurityTracker,
|
||||
Ubuntu,
|
||||
UbuntuAPI,
|
||||
Amazon,
|
||||
SUSE,
|
||||
DebianSecurityTracker,
|
||||
WpScan,
|
||||
Trivy,
|
||||
GitHub,
|
||||
|
||||
@@ -11,12 +11,12 @@ func TestExcept(t *testing.T) {
|
||||
out CveContents
|
||||
}{{
|
||||
in: CveContents{
|
||||
RedHat: {Type: RedHat},
|
||||
Ubuntu: {Type: Ubuntu},
|
||||
Debian: {Type: Debian},
|
||||
RedHat: []CveContent{{Type: RedHat}},
|
||||
Ubuntu: []CveContent{{Type: Ubuntu}},
|
||||
Debian: []CveContent{{Type: Debian}},
|
||||
},
|
||||
out: CveContents{
|
||||
RedHat: {Type: RedHat},
|
||||
RedHat: []CveContent{{Type: RedHat}},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -30,9 +30,10 @@ func TestExcept(t *testing.T) {
|
||||
|
||||
func TestSourceLinks(t *testing.T) {
|
||||
type in struct {
|
||||
lang string
|
||||
cveID string
|
||||
cont CveContents
|
||||
lang string
|
||||
cveID string
|
||||
cont CveContents
|
||||
confidences Confidences
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
@@ -44,15 +45,15 @@ func TestSourceLinks(t *testing.T) {
|
||||
lang: "ja",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
SourceLink: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
References: []Reference{
|
||||
{
|
||||
@@ -69,7 +70,7 @@ func TestSourceLinks(t *testing.T) {
|
||||
},
|
||||
},
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/CVE-2017-6074",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
@@ -97,14 +98,14 @@ func TestSourceLinks(t *testing.T) {
|
||||
lang: "en",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
SourceLink: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
@@ -128,11 +129,123 @@ func TestSourceLinks(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// Confidence: JvnVendorProductMatch
|
||||
{
|
||||
in: in{
|
||||
lang: "en",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
}},
|
||||
},
|
||||
confidences: Confidences{
|
||||
Confidence{DetectionMethod: JvnVendorProductMatchStr},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
{
|
||||
Type: Jvn,
|
||||
Value: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for i, tt := range tests {
|
||||
actual := tt.in.cont.PrimarySrcURLs(tt.in.lang, "redhat", tt.in.cveID)
|
||||
actual := tt.in.cont.PrimarySrcURLs(tt.in.lang, "redhat", tt.in.cveID, tt.in.confidences)
|
||||
if !reflect.DeepEqual(tt.out, actual) {
|
||||
t.Errorf("\n[%d] expected: %v\n actual: %v\n", i, tt.out, actual)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCveContents_Sort(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
v CveContents
|
||||
want CveContents
|
||||
}{
|
||||
{
|
||||
name: "sorted",
|
||||
v: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{Cvss3Score: 3},
|
||||
{Cvss3Score: 10},
|
||||
},
|
||||
},
|
||||
want: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{Cvss3Score: 10},
|
||||
{Cvss3Score: 3},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss3, cvss2, sourceLink",
|
||||
v: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2023/JVNDB-2023-001210.html",
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2021/JVNDB-2021-001210.html",
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2021/JVNDB-2021-001210.html",
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2023/JVNDB-2023-001210.html",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss3, cvss2",
|
||||
v: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 1,
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 10,
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tt.v.Sort()
|
||||
if !reflect.DeepEqual(tt.v, tt.want) {
|
||||
t.Errorf("\n[%s] expected: %v\n actual: %v\n", tt.name, tt.want, tt.v)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,23 +6,22 @@ import (
|
||||
"github.com/aquasecurity/trivy-db/pkg/db"
|
||||
trivyDBTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
"github.com/aquasecurity/trivy/pkg/detector/library"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"golang.org/x/xerrors"
|
||||
// "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
|
||||
// LibraryScanners is an array of LibraryScanner
|
||||
type LibraryScanners []LibraryScanner
|
||||
|
||||
// Find : find by name
|
||||
func (lss LibraryScanners) Find(path, name string) map[string]types.Library {
|
||||
filtered := map[string]types.Library{}
|
||||
func (lss LibraryScanners) Find(path, name string) map[string]Library {
|
||||
filtered := map[string]Library{}
|
||||
for _, ls := range lss {
|
||||
for _, lib := range ls.Libs {
|
||||
if ls.Path == path && lib.Name == name {
|
||||
filtered[ls.Path] = lib
|
||||
if ls.LockfilePath == path && lib.Name == name {
|
||||
filtered[ls.LockfilePath] = lib
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -40,13 +39,26 @@ func (lss LibraryScanners) Total() (total int) {
|
||||
|
||||
// LibraryScanner has libraries information
|
||||
type LibraryScanner struct {
|
||||
Path string
|
||||
Libs []types.Library
|
||||
Type string
|
||||
Libs []Library
|
||||
|
||||
// The path to the Lockfile is stored.
|
||||
LockfilePath string `json:"path,omitempty"`
|
||||
}
|
||||
|
||||
// Library holds the attribute of a package library
|
||||
type Library struct {
|
||||
Name string
|
||||
Version string
|
||||
|
||||
// The Path to the library in the container image. Empty string when Lockfile scan.
|
||||
// This field is used to convert the result JSON of a `trivy image` using trivy-to-vuls.
|
||||
FilePath string
|
||||
}
|
||||
|
||||
// Scan : scan target library
|
||||
func (s LibraryScanner) Scan() ([]VulnInfo, error) {
|
||||
scanner, err := library.DriverFactory{}.NewDriver(filepath.Base(string(s.Path)))
|
||||
scanner, err := library.NewDriver(s.Type)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to new a library driver: %w", err)
|
||||
}
|
||||
@@ -71,7 +83,7 @@ func (s LibraryScanner) convertFanalToVuln(tvulns []types.DetectedVulnerability)
|
||||
for _, tvuln := range tvulns {
|
||||
vinfo, err := s.getVulnDetail(tvuln)
|
||||
if err != nil {
|
||||
util.Log.Debugf("failed to getVulnDetail. err: %s, tvuln: %#v", err, tvuln)
|
||||
logging.Log.Debugf("failed to getVulnDetail. err: %+v, tvuln: %#v", err, tvuln)
|
||||
continue
|
||||
}
|
||||
vulns = append(vulns, vinfo)
|
||||
@@ -87,52 +99,57 @@ func (s LibraryScanner) getVulnDetail(tvuln types.DetectedVulnerability) (vinfo
|
||||
|
||||
vinfo.CveID = tvuln.VulnerabilityID
|
||||
vinfo.CveContents = getCveContents(tvuln.VulnerabilityID, vul)
|
||||
if tvuln.FixedVersion != "" {
|
||||
vinfo.LibraryFixedIns = []LibraryFixedIn{
|
||||
{
|
||||
Key: s.GetLibraryKey(),
|
||||
Name: tvuln.PkgName,
|
||||
FixedIn: tvuln.FixedVersion,
|
||||
Path: s.Path,
|
||||
},
|
||||
}
|
||||
vinfo.LibraryFixedIns = []LibraryFixedIn{
|
||||
{
|
||||
Key: s.GetLibraryKey(),
|
||||
Name: tvuln.PkgName,
|
||||
FixedIn: tvuln.FixedVersion,
|
||||
Path: s.LockfilePath,
|
||||
},
|
||||
}
|
||||
return vinfo, nil
|
||||
}
|
||||
|
||||
func getCveContents(cveID string, vul trivyDBTypes.Vulnerability) (contents map[CveContentType]CveContent) {
|
||||
contents = map[CveContentType]CveContent{}
|
||||
func getCveContents(cveID string, vul trivyDBTypes.Vulnerability) (contents map[CveContentType][]CveContent) {
|
||||
contents = map[CveContentType][]CveContent{}
|
||||
refs := []Reference{}
|
||||
for _, refURL := range vul.References {
|
||||
refs = append(refs, Reference{Source: "trivy", Link: refURL})
|
||||
}
|
||||
|
||||
content := CveContent{
|
||||
Type: Trivy,
|
||||
CveID: cveID,
|
||||
Title: vul.Title,
|
||||
Summary: vul.Description,
|
||||
Cvss3Severity: string(vul.Severity),
|
||||
References: refs,
|
||||
contents[Trivy] = []CveContent{
|
||||
{
|
||||
Type: Trivy,
|
||||
CveID: cveID,
|
||||
Title: vul.Title,
|
||||
Summary: vul.Description,
|
||||
Cvss3Severity: string(vul.Severity),
|
||||
References: refs,
|
||||
},
|
||||
}
|
||||
contents[Trivy] = content
|
||||
return contents
|
||||
}
|
||||
|
||||
// LibraryMap is filename and library type
|
||||
var LibraryMap = map[string]string{
|
||||
"package-lock.json": "node",
|
||||
"yarn.lock": "node",
|
||||
"Gemfile.lock": "ruby",
|
||||
"Cargo.lock": "rust",
|
||||
"composer.lock": "php",
|
||||
"Pipfile.lock": "python",
|
||||
"poetry.lock": "python",
|
||||
"package-lock.json": "node",
|
||||
"yarn.lock": "node",
|
||||
"Gemfile.lock": "ruby",
|
||||
"Cargo.lock": "rust",
|
||||
"composer.lock": "php",
|
||||
"Pipfile.lock": "python",
|
||||
"poetry.lock": "python",
|
||||
"packages.lock.json": ".net",
|
||||
"go.sum": "gomod",
|
||||
}
|
||||
|
||||
// GetLibraryKey returns target library key
|
||||
func (s LibraryScanner) GetLibraryKey() string {
|
||||
fileName := filepath.Base(s.Path)
|
||||
fileName := filepath.Base(s.LockfilePath)
|
||||
switch s.Type {
|
||||
case "jar", "war", "ear":
|
||||
return "java"
|
||||
}
|
||||
return LibraryMap[fileName]
|
||||
}
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@ package models
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
func TestLibraryScanners_Find(t *testing.T) {
|
||||
@@ -16,14 +14,14 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
name string
|
||||
lss LibraryScanners
|
||||
args args
|
||||
want map[string]types.Library
|
||||
want map[string]Library
|
||||
}{
|
||||
{
|
||||
name: "single file",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathA",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -32,7 +30,7 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libA"},
|
||||
want: map[string]types.Library{
|
||||
want: map[string]Library{
|
||||
"/pathA": {
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -43,8 +41,8 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
name: "multi file",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathA",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -52,8 +50,8 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "/pathB",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathB",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.5",
|
||||
@@ -62,7 +60,7 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libA"},
|
||||
want: map[string]types.Library{
|
||||
want: map[string]Library{
|
||||
"/pathA": {
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -73,8 +71,8 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
name: "miss",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathA",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -83,7 +81,7 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libB"},
|
||||
want: map[string]types.Library{},
|
||||
want: map[string]Library{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
|
||||
@@ -382,7 +382,7 @@ func Test_IsRaspbianPackage(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func Test_parseListenPorts(t *testing.T) {
|
||||
func Test_NewPortStat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
args string
|
||||
@@ -423,7 +423,7 @@ func Test_parseListenPorts(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error occurred: %s", err)
|
||||
} else if !reflect.DeepEqual(*listenPort, tt.expect) {
|
||||
t.Errorf("base.parseListenPorts() = %v, want %v", *listenPort, tt.expect)
|
||||
t.Errorf("base.NewPortStat() = %v, want %v", *listenPort, tt.expect)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -4,13 +4,14 @@ import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/cwe"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
)
|
||||
|
||||
// ScanResults is a slide of ScanResult
|
||||
@@ -18,31 +19,31 @@ type ScanResults []ScanResult
|
||||
|
||||
// ScanResult has the result of scanned CVE information.
|
||||
type ScanResult struct {
|
||||
JSONVersion int `json:"jsonVersion"`
|
||||
Lang string `json:"lang"`
|
||||
ServerUUID string `json:"serverUUID"`
|
||||
ServerName string `json:"serverName"` // TOML Section key
|
||||
Family string `json:"family"`
|
||||
Release string `json:"release"`
|
||||
Container Container `json:"container"`
|
||||
Platform Platform `json:"platform"`
|
||||
IPv4Addrs []string `json:"ipv4Addrs,omitempty"` // only global unicast address (https://golang.org/pkg/net/#IP.IsGlobalUnicast)
|
||||
IPv6Addrs []string `json:"ipv6Addrs,omitempty"` // only global unicast address (https://golang.org/pkg/net/#IP.IsGlobalUnicast)
|
||||
IPSIdentifiers map[config.IPS]string `json:"ipsIdentifiers,omitempty"`
|
||||
ScannedAt time.Time `json:"scannedAt"`
|
||||
ScanMode string `json:"scanMode"`
|
||||
ScannedVersion string `json:"scannedVersion"`
|
||||
ScannedRevision string `json:"scannedRevision"`
|
||||
ScannedBy string `json:"scannedBy"`
|
||||
ScannedVia string `json:"scannedVia"`
|
||||
ScannedIPv4Addrs []string `json:"scannedIpv4Addrs,omitempty"`
|
||||
ScannedIPv6Addrs []string `json:"scannedIpv6Addrs,omitempty"`
|
||||
ReportedAt time.Time `json:"reportedAt"`
|
||||
ReportedVersion string `json:"reportedVersion"`
|
||||
ReportedRevision string `json:"reportedRevision"`
|
||||
ReportedBy string `json:"reportedBy"`
|
||||
Errors []string `json:"errors"`
|
||||
Warnings []string `json:"warnings"`
|
||||
JSONVersion int `json:"jsonVersion"`
|
||||
Lang string `json:"lang"`
|
||||
ServerUUID string `json:"serverUUID"`
|
||||
ServerName string `json:"serverName"` // TOML Section key
|
||||
Family string `json:"family"`
|
||||
Release string `json:"release"`
|
||||
Container Container `json:"container"`
|
||||
Platform Platform `json:"platform"`
|
||||
IPv4Addrs []string `json:"ipv4Addrs,omitempty"` // only global unicast address (https://golang.org/pkg/net/#IP.IsGlobalUnicast)
|
||||
IPv6Addrs []string `json:"ipv6Addrs,omitempty"` // only global unicast address (https://golang.org/pkg/net/#IP.IsGlobalUnicast)
|
||||
IPSIdentifiers map[string]string `json:"ipsIdentifiers,omitempty"`
|
||||
ScannedAt time.Time `json:"scannedAt"`
|
||||
ScanMode string `json:"scanMode"`
|
||||
ScannedVersion string `json:"scannedVersion"`
|
||||
ScannedRevision string `json:"scannedRevision"`
|
||||
ScannedBy string `json:"scannedBy"`
|
||||
ScannedVia string `json:"scannedVia"`
|
||||
ScannedIPv4Addrs []string `json:"scannedIpv4Addrs,omitempty"`
|
||||
ScannedIPv6Addrs []string `json:"scannedIpv6Addrs,omitempty"`
|
||||
ReportedAt time.Time `json:"reportedAt"`
|
||||
ReportedVersion string `json:"reportedVersion"`
|
||||
ReportedRevision string `json:"reportedRevision"`
|
||||
ReportedBy string `json:"reportedBy"`
|
||||
Errors []string `json:"errors"`
|
||||
Warnings []string `json:"warnings"`
|
||||
|
||||
ScannedCves VulnInfos `json:"scannedCves"`
|
||||
RunningKernel Kernel `json:"runningKernel"`
|
||||
@@ -59,13 +60,386 @@ type ScanResult struct {
|
||||
} `json:"config"`
|
||||
}
|
||||
|
||||
// Container has Container information
|
||||
type Container struct {
|
||||
ContainerID string `json:"containerID"`
|
||||
Name string `json:"name"`
|
||||
Image string `json:"image"`
|
||||
Type string `json:"type"`
|
||||
UUID string `json:"uuid"`
|
||||
}
|
||||
|
||||
// Platform has platform information
|
||||
type Platform struct {
|
||||
Name string `json:"name"` // aws or azure or gcp or other...
|
||||
InstanceID string `json:"instanceID"`
|
||||
}
|
||||
|
||||
// Kernel has the Release, version and whether need restart
|
||||
type Kernel struct {
|
||||
Release string `json:"release"`
|
||||
Version string `json:"version"`
|
||||
RebootRequired bool `json:"rebootRequired"`
|
||||
}
|
||||
|
||||
// FilterInactiveWordPressLibs is filter function.
|
||||
func (r *ScanResult) FilterInactiveWordPressLibs(detectInactive bool) {
|
||||
if detectInactive {
|
||||
return
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
if len(v.WpPackageFixStats) == 0 {
|
||||
return true
|
||||
}
|
||||
// Ignore if all libs in this vulnInfo inactive
|
||||
for _, wp := range v.WpPackageFixStats {
|
||||
if p, ok := r.WordPressPackages.Find(wp.Name); ok {
|
||||
if p.Status != Inactive {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
logging.Log.Warnf("Failed to find the WordPress pkg: %+s", wp.Name)
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
}
|
||||
|
||||
// ReportFileName returns the filename on localhost without extension
|
||||
func (r ScanResult) ReportFileName() (name string) {
|
||||
if r.Container.ContainerID == "" {
|
||||
return r.ServerName
|
||||
}
|
||||
return fmt.Sprintf("%s@%s", r.Container.Name, r.ServerName)
|
||||
}
|
||||
|
||||
// ReportKeyName returns the name of key on S3, Azure-Blob without extension
|
||||
func (r ScanResult) ReportKeyName() (name string) {
|
||||
timestr := r.ScannedAt.Format(time.RFC3339)
|
||||
if r.Container.ContainerID == "" {
|
||||
return fmt.Sprintf("%s/%s", timestr, r.ServerName)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s@%s", timestr, r.Container.Name, r.ServerName)
|
||||
}
|
||||
|
||||
// ServerInfo returns server name one line
|
||||
func (r ScanResult) ServerInfo() string {
|
||||
if r.Container.ContainerID == "" {
|
||||
return fmt.Sprintf("%s (%s%s)",
|
||||
r.FormatServerName(), r.Family, r.Release)
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"%s (%s%s) on %s",
|
||||
r.FormatServerName(),
|
||||
r.Family,
|
||||
r.Release,
|
||||
r.ServerName,
|
||||
)
|
||||
}
|
||||
|
||||
// ServerInfoTui returns server information for TUI sidebar
|
||||
func (r ScanResult) ServerInfoTui() string {
|
||||
if r.Container.ContainerID == "" {
|
||||
line := fmt.Sprintf("%s (%s%s)",
|
||||
r.ServerName, r.Family, r.Release)
|
||||
if len(r.Warnings) != 0 {
|
||||
line = "[Warn] " + line
|
||||
}
|
||||
if r.RunningKernel.RebootRequired {
|
||||
return "[Reboot] " + line
|
||||
}
|
||||
return line
|
||||
}
|
||||
|
||||
fmtstr := "|-- %s (%s%s)"
|
||||
if r.RunningKernel.RebootRequired {
|
||||
fmtstr = "|-- [Reboot] %s (%s%s)"
|
||||
}
|
||||
return fmt.Sprintf(fmtstr, r.Container.Name, r.Family, r.Release)
|
||||
}
|
||||
|
||||
// FormatServerName returns server and container name
|
||||
func (r ScanResult) FormatServerName() (name string) {
|
||||
if r.Container.ContainerID == "" {
|
||||
name = r.ServerName
|
||||
} else {
|
||||
name = fmt.Sprintf("%s@%s",
|
||||
r.Container.Name, r.ServerName)
|
||||
}
|
||||
if r.RunningKernel.RebootRequired {
|
||||
name = "[Reboot Required] " + name
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// FormatTextReportHeader returns header of text report
|
||||
func (r ScanResult) FormatTextReportHeader() string {
|
||||
var buf bytes.Buffer
|
||||
for i := 0; i < len(r.ServerInfo()); i++ {
|
||||
buf.WriteString("=")
|
||||
}
|
||||
|
||||
pkgs := r.FormatUpdatablePkgsSummary()
|
||||
if 0 < len(r.WordPressPackages) {
|
||||
pkgs = fmt.Sprintf("%s, %d WordPress pkgs", pkgs, len(r.WordPressPackages))
|
||||
}
|
||||
if 0 < len(r.LibraryScanners) {
|
||||
pkgs = fmt.Sprintf("%s, %d libs", pkgs, r.LibraryScanners.Total())
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s\n%s\n%s\n%s, %s, %s, %s\n%s\n",
|
||||
r.ServerInfo(),
|
||||
buf.String(),
|
||||
r.ScannedCves.FormatCveSummary(),
|
||||
r.ScannedCves.FormatFixedStatus(r.Packages),
|
||||
r.FormatExploitCveSummary(),
|
||||
r.FormatMetasploitCveSummary(),
|
||||
r.FormatAlertSummary(),
|
||||
pkgs)
|
||||
}
|
||||
|
||||
// FormatUpdatablePkgsSummary returns a summary of updatable packages
|
||||
func (r ScanResult) FormatUpdatablePkgsSummary() string {
|
||||
mode := r.Config.Scan.Servers[r.ServerName].Mode
|
||||
if !r.isDisplayUpdatableNum(mode) {
|
||||
return fmt.Sprintf("%d installed", len(r.Packages))
|
||||
}
|
||||
|
||||
nUpdatable := 0
|
||||
for _, p := range r.Packages {
|
||||
if p.NewVersion == "" {
|
||||
continue
|
||||
}
|
||||
if p.Version != p.NewVersion || p.Release != p.NewRelease {
|
||||
nUpdatable++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d installed, %d updatable",
|
||||
len(r.Packages),
|
||||
nUpdatable)
|
||||
}
|
||||
|
||||
// FormatExploitCveSummary returns a summary of exploit cve
|
||||
func (r ScanResult) FormatExploitCveSummary() string {
|
||||
nExploitCve := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if 0 < len(vuln.Exploits) {
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d poc", nExploitCve)
|
||||
}
|
||||
|
||||
// FormatMetasploitCveSummary returns a summary of exploit cve
|
||||
func (r ScanResult) FormatMetasploitCveSummary() string {
|
||||
nMetasploitCve := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if 0 < len(vuln.Metasploits) {
|
||||
nMetasploitCve++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d exploits", nMetasploitCve)
|
||||
}
|
||||
|
||||
// FormatAlertSummary returns a summary of CERT alerts
|
||||
func (r ScanResult) FormatAlertSummary() string {
|
||||
cisaCnt := 0
|
||||
uscertCnt := 0
|
||||
jpcertCnt := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if len(vuln.AlertDict.CISA) > 0 {
|
||||
cisaCnt += len(vuln.AlertDict.CISA)
|
||||
}
|
||||
if len(vuln.AlertDict.USCERT) > 0 {
|
||||
uscertCnt += len(vuln.AlertDict.USCERT)
|
||||
}
|
||||
if len(vuln.AlertDict.JPCERT) > 0 {
|
||||
jpcertCnt += len(vuln.AlertDict.JPCERT)
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("cisa: %d, uscert: %d, jpcert: %d alerts", cisaCnt, uscertCnt, jpcertCnt)
|
||||
}
|
||||
|
||||
func (r ScanResult) isDisplayUpdatableNum(mode config.ScanMode) bool {
|
||||
if r.Family == constant.FreeBSD {
|
||||
return false
|
||||
}
|
||||
|
||||
if mode.IsOffline() {
|
||||
return false
|
||||
}
|
||||
if mode.IsFastRoot() || mode.IsDeep() {
|
||||
return true
|
||||
}
|
||||
if mode.IsFast() {
|
||||
switch r.Family {
|
||||
case constant.RedHat,
|
||||
constant.Oracle,
|
||||
constant.Debian,
|
||||
constant.Ubuntu,
|
||||
constant.Raspbian:
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsContainer returns whether this ServerInfo is about container
|
||||
func (r ScanResult) IsContainer() bool {
|
||||
return 0 < len(r.Container.ContainerID)
|
||||
}
|
||||
|
||||
// RemoveRaspbianPackFromResult is for Raspberry Pi and removes the Raspberry Pi dedicated package from ScanResult.
|
||||
func (r ScanResult) RemoveRaspbianPackFromResult() *ScanResult {
|
||||
if r.Family != constant.Raspbian {
|
||||
return &r
|
||||
}
|
||||
|
||||
packs := make(Packages)
|
||||
for _, pack := range r.Packages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
packs[pack.Name] = pack
|
||||
}
|
||||
}
|
||||
srcPacks := make(SrcPackages)
|
||||
for _, pack := range r.SrcPackages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
srcPacks[pack.Name] = pack
|
||||
}
|
||||
}
|
||||
|
||||
r.Packages = packs
|
||||
r.SrcPackages = srcPacks
|
||||
|
||||
return &r
|
||||
}
|
||||
|
||||
// ClearFields clears a given fields of ScanResult
|
||||
func (r ScanResult) ClearFields(targetTagNames []string) ScanResult {
|
||||
if len(targetTagNames) == 0 {
|
||||
return r
|
||||
}
|
||||
target := map[string]bool{}
|
||||
for _, n := range targetTagNames {
|
||||
target[strings.ToLower(n)] = true
|
||||
}
|
||||
t := reflect.ValueOf(r).Type()
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
jsonValue := strings.Split(f.Tag.Get("json"), ",")[0]
|
||||
if ok := target[strings.ToLower(jsonValue)]; ok {
|
||||
vv := reflect.New(f.Type).Elem().Interface()
|
||||
reflect.ValueOf(&r).Elem().FieldByName(f.Name).Set(reflect.ValueOf(vv))
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// CheckEOL checks the EndOfLife of the OS
|
||||
func (r *ScanResult) CheckEOL() {
|
||||
switch r.Family {
|
||||
case constant.ServerTypePseudo, constant.Raspbian:
|
||||
return
|
||||
}
|
||||
|
||||
eol, found := config.GetEOL(r.Family, r.Release)
|
||||
if !found {
|
||||
r.Warnings = append(r.Warnings,
|
||||
fmt.Sprintf("Failed to check EOL. Register the issue to https://github.com/future-architect/vuls/issues with the information in `Family: %s Release: %s`",
|
||||
r.Family, r.Release))
|
||||
return
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
if eol.IsStandardSupportEnded(now) {
|
||||
r.Warnings = append(r.Warnings, "Standard OS support is EOL(End-of-Life). Purchase extended support if available or Upgrading your OS is strongly recommended.")
|
||||
if eol.ExtendedSupportUntil.IsZero() {
|
||||
return
|
||||
}
|
||||
if !eol.IsExtendedSuppportEnded(now) {
|
||||
r.Warnings = append(r.Warnings,
|
||||
fmt.Sprintf("Extended support available until %s. Check the vendor site.",
|
||||
eol.ExtendedSupportUntil.Format("2006-01-02")))
|
||||
} else {
|
||||
r.Warnings = append(r.Warnings,
|
||||
"Extended support is also EOL. There are many Vulnerabilities that are not detected, Upgrading your OS strongly recommended.")
|
||||
}
|
||||
} else if !eol.StandardSupportUntil.IsZero() &&
|
||||
now.AddDate(0, 3, 0).After(eol.StandardSupportUntil) {
|
||||
r.Warnings = append(r.Warnings,
|
||||
fmt.Sprintf("Standard OS support will be end in 3 months. EOL date: %s",
|
||||
eol.StandardSupportUntil.Format("2006-01-02")))
|
||||
}
|
||||
}
|
||||
|
||||
// SortForJSONOutput sort list elements in the ScanResult to diff in integration-test
|
||||
func (r *ScanResult) SortForJSONOutput() {
|
||||
for k, v := range r.Packages {
|
||||
sort.Slice(v.AffectedProcs, func(i, j int) bool {
|
||||
return v.AffectedProcs[i].PID < v.AffectedProcs[j].PID
|
||||
})
|
||||
sort.Slice(v.NeedRestartProcs, func(i, j int) bool {
|
||||
return v.NeedRestartProcs[i].PID < v.NeedRestartProcs[j].PID
|
||||
})
|
||||
r.Packages[k] = v
|
||||
}
|
||||
for i, v := range r.LibraryScanners {
|
||||
sort.Slice(v.Libs, func(i, j int) bool {
|
||||
switch strings.Compare(v.Libs[i].Name, v.Libs[j].Name) {
|
||||
case -1:
|
||||
return true
|
||||
case 1:
|
||||
return false
|
||||
}
|
||||
return v.Libs[i].Version < v.Libs[j].Version
|
||||
|
||||
})
|
||||
r.LibraryScanners[i] = v
|
||||
}
|
||||
|
||||
for k, v := range r.ScannedCves {
|
||||
sort.Slice(v.AffectedPackages, func(i, j int) bool {
|
||||
return v.AffectedPackages[i].Name < v.AffectedPackages[j].Name
|
||||
})
|
||||
sort.Slice(v.DistroAdvisories, func(i, j int) bool {
|
||||
return v.DistroAdvisories[i].AdvisoryID < v.DistroAdvisories[j].AdvisoryID
|
||||
})
|
||||
sort.Slice(v.Exploits, func(i, j int) bool {
|
||||
return v.Exploits[i].URL < v.Exploits[j].URL
|
||||
})
|
||||
sort.Slice(v.Metasploits, func(i, j int) bool {
|
||||
return v.Metasploits[i].Name < v.Metasploits[j].Name
|
||||
})
|
||||
sort.Slice(v.Mitigations, func(i, j int) bool {
|
||||
return v.Mitigations[i].URL < v.Mitigations[j].URL
|
||||
})
|
||||
|
||||
v.CveContents.Sort()
|
||||
|
||||
sort.Slice(v.AlertDict.USCERT, func(i, j int) bool {
|
||||
return v.AlertDict.USCERT[i].Title < v.AlertDict.USCERT[j].Title
|
||||
})
|
||||
sort.Slice(v.AlertDict.JPCERT, func(i, j int) bool {
|
||||
return v.AlertDict.JPCERT[i].Title < v.AlertDict.JPCERT[j].Title
|
||||
})
|
||||
sort.Slice(v.AlertDict.CISA, func(i, j int) bool {
|
||||
return v.AlertDict.CISA[i].Title < v.AlertDict.CISA[j].Title
|
||||
})
|
||||
r.ScannedCves[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// CweDict is a dictionary for CWE
|
||||
type CweDict map[string]CweDictEntry
|
||||
|
||||
// Get the name, url, top10URL for the specified cweID, lang
|
||||
func (c CweDict) Get(cweID, lang string) (name, url, top10Rank, top10URL, cweTop25Rank, cweTop25URL, sansTop25Rank, sansTop25URL string) {
|
||||
cweNum := strings.TrimPrefix(cweID, "CWE-")
|
||||
switch config.Conf.Lang {
|
||||
switch lang {
|
||||
case "ja":
|
||||
if dict, ok := c[cweNum]; ok && dict.OwaspTopTen2017 != "" {
|
||||
top10Rank = dict.OwaspTopTen2017
|
||||
@@ -117,414 +491,3 @@ type CweDictEntry struct {
|
||||
CweTopTwentyfive2019 string `json:"cweTopTwentyfive2019"`
|
||||
SansTopTwentyfive string `json:"sansTopTwentyfive"`
|
||||
}
|
||||
|
||||
// Kernel has the Release, version and whether need restart
|
||||
type Kernel struct {
|
||||
Release string `json:"release"`
|
||||
Version string `json:"version"`
|
||||
RebootRequired bool `json:"rebootRequired"`
|
||||
}
|
||||
|
||||
// FilterByCvssOver is filter function.
|
||||
func (r ScanResult) FilterByCvssOver(over float64) ScanResult {
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
if over <= v.MaxCvssScore().Value.Score {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterIgnoreCves is filter function.
|
||||
func (r ScanResult) FilterIgnoreCves() ScanResult {
|
||||
ignoreCves := []string{}
|
||||
if len(r.Container.Name) == 0 {
|
||||
//TODO pass by args
|
||||
ignoreCves = config.Conf.Servers[r.ServerName].IgnoreCves
|
||||
} else {
|
||||
//TODO pass by args
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
if con, ok := s.Containers[r.Container.Name]; ok {
|
||||
ignoreCves = con.IgnoreCves
|
||||
} else {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
util.Log.Errorf("%s is not found in config.toml",
|
||||
r.ServerName)
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
for _, c := range ignoreCves {
|
||||
if v.CveID == c {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterUnfixed is filter function.
|
||||
func (r ScanResult) FilterUnfixed(ignoreUnfixed bool) ScanResult {
|
||||
if !ignoreUnfixed {
|
||||
return r
|
||||
}
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
// Report cves detected by CPE because Vuls can't know 'fixed' or 'unfixed'
|
||||
if len(v.CpeURIs) != 0 {
|
||||
return true
|
||||
}
|
||||
NotFixedAll := true
|
||||
for _, p := range v.AffectedPackages {
|
||||
NotFixedAll = NotFixedAll && p.NotFixedYet
|
||||
}
|
||||
return !NotFixedAll
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterIgnorePkgs is filter function.
|
||||
func (r ScanResult) FilterIgnorePkgs() ScanResult {
|
||||
var ignorePkgsRegexps []string
|
||||
if len(r.Container.Name) == 0 {
|
||||
//TODO pass by args
|
||||
ignorePkgsRegexps = config.Conf.Servers[r.ServerName].IgnorePkgsRegexp
|
||||
} else {
|
||||
if s, ok := config.Conf.Servers[r.ServerName]; ok {
|
||||
if con, ok := s.Containers[r.Container.Name]; ok {
|
||||
ignorePkgsRegexps = con.IgnorePkgsRegexp
|
||||
} else {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
util.Log.Errorf("%s is not found in config.toml",
|
||||
r.ServerName)
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
regexps := []*regexp.Regexp{}
|
||||
for _, pkgRegexp := range ignorePkgsRegexps {
|
||||
re, err := regexp.Compile(pkgRegexp)
|
||||
if err != nil {
|
||||
util.Log.Errorf("Failed to parse %s. err: %+v", pkgRegexp, err)
|
||||
continue
|
||||
} else {
|
||||
regexps = append(regexps, re)
|
||||
}
|
||||
}
|
||||
if len(regexps) == 0 {
|
||||
return r
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
if len(v.AffectedPackages) == 0 {
|
||||
return true
|
||||
}
|
||||
for _, p := range v.AffectedPackages {
|
||||
match := false
|
||||
for _, re := range regexps {
|
||||
if re.MatchString(p.Name) {
|
||||
match = true
|
||||
}
|
||||
}
|
||||
if !match {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// FilterInactiveWordPressLibs is filter function.
|
||||
func (r ScanResult) FilterInactiveWordPressLibs(detectInactive bool) ScanResult {
|
||||
if detectInactive {
|
||||
return r
|
||||
}
|
||||
|
||||
filtered := r.ScannedCves.Find(func(v VulnInfo) bool {
|
||||
if len(v.WpPackageFixStats) == 0 {
|
||||
return true
|
||||
}
|
||||
// Ignore if all libs in this vulnInfo inactive
|
||||
for _, wp := range v.WpPackageFixStats {
|
||||
if p, ok := r.WordPressPackages.Find(wp.Name); ok {
|
||||
if p.Status != Inactive {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return r
|
||||
}
|
||||
|
||||
// ReportFileName returns the filename on localhost without extension
|
||||
func (r ScanResult) ReportFileName() (name string) {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
return fmt.Sprintf("%s", r.ServerName)
|
||||
}
|
||||
return fmt.Sprintf("%s@%s", r.Container.Name, r.ServerName)
|
||||
}
|
||||
|
||||
// ReportKeyName returns the name of key on S3, Azure-Blob without extension
|
||||
func (r ScanResult) ReportKeyName() (name string) {
|
||||
timestr := r.ScannedAt.Format(time.RFC3339)
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
return fmt.Sprintf("%s/%s", timestr, r.ServerName)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s@%s", timestr, r.Container.Name, r.ServerName)
|
||||
}
|
||||
|
||||
// ServerInfo returns server name one line
|
||||
func (r ScanResult) ServerInfo() string {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
return fmt.Sprintf("%s (%s%s)",
|
||||
r.FormatServerName(), r.Family, r.Release)
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"%s (%s%s) on %s",
|
||||
r.FormatServerName(),
|
||||
r.Family,
|
||||
r.Release,
|
||||
r.ServerName,
|
||||
)
|
||||
}
|
||||
|
||||
// ServerInfoTui returns server information for TUI sidebar
|
||||
func (r ScanResult) ServerInfoTui() string {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
line := fmt.Sprintf("%s (%s%s)",
|
||||
r.ServerName, r.Family, r.Release)
|
||||
if len(r.Warnings) != 0 {
|
||||
line = "[Warn] " + line
|
||||
}
|
||||
if r.RunningKernel.RebootRequired {
|
||||
return "[Reboot] " + line
|
||||
}
|
||||
return line
|
||||
}
|
||||
|
||||
fmtstr := "|-- %s (%s%s)"
|
||||
if r.RunningKernel.RebootRequired {
|
||||
fmtstr = "|-- [Reboot] %s (%s%s)"
|
||||
}
|
||||
return fmt.Sprintf(fmtstr, r.Container.Name, r.Family, r.Release)
|
||||
}
|
||||
|
||||
// FormatServerName returns server and container name
|
||||
func (r ScanResult) FormatServerName() (name string) {
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
name = r.ServerName
|
||||
} else {
|
||||
name = fmt.Sprintf("%s@%s",
|
||||
r.Container.Name, r.ServerName)
|
||||
}
|
||||
if r.RunningKernel.RebootRequired {
|
||||
name = "[Reboot Required] " + name
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// FormatTextReportHeader returns header of text report
|
||||
func (r ScanResult) FormatTextReportHeader() string {
|
||||
var buf bytes.Buffer
|
||||
for i := 0; i < len(r.ServerInfo()); i++ {
|
||||
buf.WriteString("=")
|
||||
}
|
||||
|
||||
pkgs := r.FormatUpdatablePacksSummary()
|
||||
if 0 < len(r.WordPressPackages) {
|
||||
pkgs = fmt.Sprintf("%s, %d WordPress pkgs", pkgs, len(r.WordPressPackages))
|
||||
}
|
||||
if 0 < len(r.LibraryScanners) {
|
||||
pkgs = fmt.Sprintf("%s, %d libs", pkgs, r.LibraryScanners.Total())
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s\n%s\n%s, %s, %s, %s, %s\n%s\n",
|
||||
r.ServerInfo(),
|
||||
buf.String(),
|
||||
r.ScannedCves.FormatCveSummary(),
|
||||
r.ScannedCves.FormatFixedStatus(r.Packages),
|
||||
r.FormatExploitCveSummary(),
|
||||
r.FormatMetasploitCveSummary(),
|
||||
r.FormatAlertSummary(),
|
||||
pkgs)
|
||||
}
|
||||
|
||||
// FormatUpdatablePacksSummary returns a summary of updatable packages
|
||||
func (r ScanResult) FormatUpdatablePacksSummary() string {
|
||||
if !r.isDisplayUpdatableNum() {
|
||||
return fmt.Sprintf("%d installed", len(r.Packages))
|
||||
}
|
||||
|
||||
nUpdatable := 0
|
||||
for _, p := range r.Packages {
|
||||
if p.NewVersion == "" {
|
||||
continue
|
||||
}
|
||||
if p.Version != p.NewVersion || p.Release != p.NewRelease {
|
||||
nUpdatable++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d installed, %d updatable",
|
||||
len(r.Packages),
|
||||
nUpdatable)
|
||||
}
|
||||
|
||||
// FormatExploitCveSummary returns a summary of exploit cve
|
||||
func (r ScanResult) FormatExploitCveSummary() string {
|
||||
nExploitCve := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if 0 < len(vuln.Exploits) {
|
||||
nExploitCve++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d poc", nExploitCve)
|
||||
}
|
||||
|
||||
// FormatMetasploitCveSummary returns a summary of exploit cve
|
||||
func (r ScanResult) FormatMetasploitCveSummary() string {
|
||||
nMetasploitCve := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if 0 < len(vuln.Metasploits) {
|
||||
nMetasploitCve++
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%d exploits", nMetasploitCve)
|
||||
}
|
||||
|
||||
// FormatAlertSummary returns a summary of CERT alerts
|
||||
func (r ScanResult) FormatAlertSummary() string {
|
||||
jaCnt := 0
|
||||
enCnt := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if len(vuln.AlertDict.En) > 0 {
|
||||
enCnt += len(vuln.AlertDict.En)
|
||||
}
|
||||
if len(vuln.AlertDict.Ja) > 0 {
|
||||
jaCnt += len(vuln.AlertDict.Ja)
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("en: %d, ja: %d alerts", enCnt, jaCnt)
|
||||
}
|
||||
|
||||
func (r ScanResult) isDisplayUpdatableNum() bool {
|
||||
if r.Family == config.FreeBSD {
|
||||
return false
|
||||
}
|
||||
|
||||
var mode config.ScanMode
|
||||
//TODO pass by args
|
||||
s, _ := config.Conf.Servers[r.ServerName]
|
||||
mode = s.Mode
|
||||
|
||||
if mode.IsOffline() {
|
||||
return false
|
||||
}
|
||||
if mode.IsFastRoot() || mode.IsDeep() {
|
||||
return true
|
||||
}
|
||||
if mode.IsFast() {
|
||||
switch r.Family {
|
||||
case config.RedHat,
|
||||
config.Oracle,
|
||||
config.Debian,
|
||||
config.Ubuntu,
|
||||
config.Raspbian:
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsContainer returns whether this ServerInfo is about container
|
||||
func (r ScanResult) IsContainer() bool {
|
||||
return 0 < len(r.Container.ContainerID)
|
||||
}
|
||||
|
||||
// IsDeepScanMode checks if the scan mode is deep scan mode.
|
||||
func (r ScanResult) IsDeepScanMode() bool {
|
||||
for _, s := range r.Config.Scan.Servers {
|
||||
if ok := s.Mode.IsDeep(); ok {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Container has Container information
|
||||
type Container struct {
|
||||
ContainerID string `json:"containerID"`
|
||||
Name string `json:"name"`
|
||||
Image string `json:"image"`
|
||||
Type string `json:"type"`
|
||||
UUID string `json:"uuid"`
|
||||
}
|
||||
|
||||
// Platform has platform information
|
||||
type Platform struct {
|
||||
Name string `json:"name"` // aws or azure or gcp or other...
|
||||
InstanceID string `json:"instanceID"`
|
||||
}
|
||||
|
||||
// RemoveRaspbianPackFromResult is for Raspberry Pi and removes the Raspberry Pi dedicated package from ScanResult.
|
||||
func (r ScanResult) RemoveRaspbianPackFromResult() ScanResult {
|
||||
if r.Family != config.Raspbian {
|
||||
return r
|
||||
}
|
||||
|
||||
result := r
|
||||
packs := make(Packages)
|
||||
for _, pack := range r.Packages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
packs[pack.Name] = pack
|
||||
}
|
||||
}
|
||||
srcPacks := make(SrcPackages)
|
||||
for _, pack := range r.SrcPackages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
srcPacks[pack.Name] = pack
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
result.Packages = packs
|
||||
result.SrcPackages = srcPacks
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// ClearFields clears a given fields of ScanResult
|
||||
func (r ScanResult) ClearFields(targetTagNames []string) ScanResult {
|
||||
if len(targetTagNames) == 0 {
|
||||
return r
|
||||
}
|
||||
target := map[string]bool{}
|
||||
for _, n := range targetTagNames {
|
||||
target[strings.ToLower(n)] = true
|
||||
}
|
||||
t := reflect.ValueOf(r).Type()
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
jsonValue := strings.Split(f.Tag.Get("json"), ",")[0]
|
||||
if ok := target[strings.ToLower(jsonValue)]; ok {
|
||||
vv := reflect.New(f.Type).Elem().Interface()
|
||||
reflect.ValueOf(&r).Elem().FieldByName(f.Name).Set(reflect.ValueOf(vv))
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
195
models/utils.go
195
models/utils.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package models
|
||||
@@ -5,116 +6,120 @@ package models
|
||||
import (
|
||||
"strings"
|
||||
|
||||
cvedict "github.com/kotakanbe/go-cve-dictionary/models"
|
||||
cvedict "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
// ConvertJvnToModel convert JVN to CveContent
|
||||
func ConvertJvnToModel(cveID string, jvn *cvedict.Jvn) *CveContent {
|
||||
if jvn == nil {
|
||||
return nil
|
||||
}
|
||||
// var cpes = []Cpe{}
|
||||
// for _, c := range jvn.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
func ConvertJvnToModel(cveID string, jvns []cvedict.Jvn) []CveContent {
|
||||
cves := []CveContent{}
|
||||
for _, jvn := range jvns {
|
||||
// cpes := []Cpe{}
|
||||
// for _, c := range jvn.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
refs := []Reference{}
|
||||
for _, r := range jvn.References {
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
})
|
||||
}
|
||||
refs := []Reference{}
|
||||
for _, r := range jvn.References {
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
})
|
||||
}
|
||||
|
||||
return &CveContent{
|
||||
Type: Jvn,
|
||||
CveID: cveID,
|
||||
Title: jvn.Title,
|
||||
Summary: jvn.Summary,
|
||||
Cvss2Score: jvn.Cvss2.BaseScore,
|
||||
Cvss2Vector: jvn.Cvss2.VectorString,
|
||||
Cvss2Severity: jvn.Cvss2.Severity,
|
||||
Cvss3Score: jvn.Cvss3.BaseScore,
|
||||
Cvss3Vector: jvn.Cvss3.VectorString,
|
||||
Cvss3Severity: jvn.Cvss3.BaseSeverity,
|
||||
SourceLink: jvn.JvnLink,
|
||||
// Cpes: cpes,
|
||||
References: refs,
|
||||
Published: jvn.PublishedDate,
|
||||
LastModified: jvn.LastModifiedDate,
|
||||
cve := CveContent{
|
||||
Type: Jvn,
|
||||
CveID: cveID,
|
||||
Title: jvn.Title,
|
||||
Summary: jvn.Summary,
|
||||
Cvss2Score: jvn.Cvss2.BaseScore,
|
||||
Cvss2Vector: jvn.Cvss2.VectorString,
|
||||
Cvss2Severity: jvn.Cvss2.Severity,
|
||||
Cvss3Score: jvn.Cvss3.BaseScore,
|
||||
Cvss3Vector: jvn.Cvss3.VectorString,
|
||||
Cvss3Severity: jvn.Cvss3.BaseSeverity,
|
||||
SourceLink: jvn.JvnLink,
|
||||
// Cpes: cpes,
|
||||
References: refs,
|
||||
Published: jvn.PublishedDate,
|
||||
LastModified: jvn.LastModifiedDate,
|
||||
}
|
||||
cves = append(cves, cve)
|
||||
}
|
||||
return cves
|
||||
}
|
||||
|
||||
// ConvertNvdJSONToModel convert NVD to CveContent
|
||||
func ConvertNvdJSONToModel(cveID string, nvd *cvedict.NvdJSON) (*CveContent, []Exploit, []Mitigation) {
|
||||
if nvd == nil {
|
||||
return nil, nil, nil
|
||||
}
|
||||
// var cpes = []Cpe{}
|
||||
// for _, c := range nvd.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
// ConvertNvdToModel convert NVD to CveContent
|
||||
func ConvertNvdToModel(cveID string, nvds []cvedict.Nvd) ([]CveContent, []Exploit, []Mitigation) {
|
||||
cves := []CveContent{}
|
||||
refs := []Reference{}
|
||||
exploits := []Exploit{}
|
||||
mitigations := []Mitigation{}
|
||||
for _, r := range nvd.References {
|
||||
var tags []string
|
||||
if 0 < len(r.Tags) {
|
||||
tags = strings.Split(r.Tags, ",")
|
||||
}
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
Tags: tags,
|
||||
})
|
||||
if strings.Contains(r.Tags, "Exploit") {
|
||||
exploits = append(exploits, Exploit{
|
||||
//TODO Add const to here
|
||||
// https://github.com/vulsio/go-exploitdb/blob/master/models/exploit.go#L13-L18
|
||||
ExploitType: "nvd",
|
||||
URL: r.Link,
|
||||
for _, nvd := range nvds {
|
||||
// cpes := []Cpe{}
|
||||
// for _, c := range nvd.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
for _, r := range nvd.References {
|
||||
var tags []string
|
||||
if 0 < len(r.Tags) {
|
||||
tags = strings.Split(r.Tags, ",")
|
||||
}
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
Tags: tags,
|
||||
})
|
||||
if strings.Contains(r.Tags, "Exploit") {
|
||||
exploits = append(exploits, Exploit{
|
||||
//TODO Add const to here
|
||||
// https://github.com/vulsio/go-exploitdb/blob/master/models/exploit.go#L13-L18
|
||||
ExploitType: "nvd",
|
||||
URL: r.Link,
|
||||
})
|
||||
}
|
||||
if strings.Contains(r.Tags, "Mitigation") {
|
||||
mitigations = append(mitigations, Mitigation{
|
||||
CveContentType: Nvd,
|
||||
URL: r.Link,
|
||||
})
|
||||
}
|
||||
}
|
||||
if strings.Contains(r.Tags, "Mitigation") {
|
||||
mitigations = append(mitigations, Mitigation{
|
||||
CveContentType: Nvd,
|
||||
URL: r.Link,
|
||||
})
|
||||
|
||||
cweIDs := []string{}
|
||||
for _, cid := range nvd.Cwes {
|
||||
cweIDs = append(cweIDs, cid.CweID)
|
||||
}
|
||||
}
|
||||
|
||||
cweIDs := []string{}
|
||||
for _, cid := range nvd.Cwes {
|
||||
cweIDs = append(cweIDs, cid.CweID)
|
||||
}
|
||||
desc := []string{}
|
||||
for _, d := range nvd.Descriptions {
|
||||
desc = append(desc, d.Value)
|
||||
}
|
||||
|
||||
desc := []string{}
|
||||
for _, d := range nvd.Descriptions {
|
||||
desc = append(desc, d.Value)
|
||||
cve := CveContent{
|
||||
Type: Nvd,
|
||||
CveID: cveID,
|
||||
Summary: strings.Join(desc, "\n"),
|
||||
Cvss2Score: nvd.Cvss2.BaseScore,
|
||||
Cvss2Vector: nvd.Cvss2.VectorString,
|
||||
Cvss2Severity: nvd.Cvss2.Severity,
|
||||
Cvss3Score: nvd.Cvss3.BaseScore,
|
||||
Cvss3Vector: nvd.Cvss3.VectorString,
|
||||
Cvss3Severity: nvd.Cvss3.BaseSeverity,
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/" + cveID,
|
||||
// Cpes: cpes,
|
||||
CweIDs: cweIDs,
|
||||
References: refs,
|
||||
Published: nvd.PublishedDate,
|
||||
LastModified: nvd.LastModifiedDate,
|
||||
}
|
||||
cves = append(cves, cve)
|
||||
}
|
||||
|
||||
return &CveContent{
|
||||
Type: Nvd,
|
||||
CveID: cveID,
|
||||
Summary: strings.Join(desc, "\n"),
|
||||
Cvss2Score: nvd.Cvss2.BaseScore,
|
||||
Cvss2Vector: nvd.Cvss2.VectorString,
|
||||
Cvss2Severity: nvd.Cvss2.Severity,
|
||||
Cvss3Score: nvd.Cvss3.BaseScore,
|
||||
Cvss3Vector: nvd.Cvss3.VectorString,
|
||||
Cvss3Severity: nvd.Cvss3.BaseSeverity,
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/" + cveID,
|
||||
// Cpes: cpes,
|
||||
CweIDs: cweIDs,
|
||||
References: refs,
|
||||
Published: nvd.PublishedDate,
|
||||
LastModified: nvd.LastModifiedDate,
|
||||
}, exploits, mitigations
|
||||
return cves, exploits, mitigations
|
||||
}
|
||||
|
||||
@@ -3,11 +3,12 @@ package models
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
exploitmodels "github.com/vulsio/go-exploitdb/models"
|
||||
)
|
||||
|
||||
@@ -26,15 +27,109 @@ func (v VulnInfos) Find(f func(VulnInfo) bool) VulnInfos {
|
||||
return filtered
|
||||
}
|
||||
|
||||
// FilterByCvssOver return scored vulnerabilities
|
||||
func (v VulnInfos) FilterByCvssOver(over float64) (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
if over <= v.MaxCvssScore().Value.Score {
|
||||
return true
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterByConfidenceOver scored vulnerabilities
|
||||
func (v VulnInfos) FilterByConfidenceOver(over int) (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
for _, c := range v.Confidences {
|
||||
if over <= c.Score {
|
||||
return true
|
||||
}
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterIgnoreCves filter function.
|
||||
func (v VulnInfos) FilterIgnoreCves(ignoreCveIDs []string) (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
for _, c := range ignoreCveIDs {
|
||||
if v.CveID == c {
|
||||
nFiltered++
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterUnfixed filter unfixed CVE-IDs
|
||||
func (v VulnInfos) FilterUnfixed(ignoreUnfixed bool) (_ VulnInfos, nFiltered int) {
|
||||
if !ignoreUnfixed {
|
||||
return v, 0
|
||||
}
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
// Report cves detected by CPE because Vuls can't know 'fixed' or 'unfixed'
|
||||
if len(v.CpeURIs) != 0 {
|
||||
return true
|
||||
}
|
||||
NotFixedAll := true
|
||||
for _, p := range v.AffectedPackages {
|
||||
NotFixedAll = NotFixedAll && p.NotFixedYet
|
||||
}
|
||||
if NotFixedAll {
|
||||
nFiltered++
|
||||
}
|
||||
return !NotFixedAll
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterIgnorePkgs is filter function.
|
||||
func (v VulnInfos) FilterIgnorePkgs(ignorePkgsRegexps []string) (_ VulnInfos, nFiltered int) {
|
||||
regexps := []*regexp.Regexp{}
|
||||
for _, pkgRegexp := range ignorePkgsRegexps {
|
||||
re, err := regexp.Compile(pkgRegexp)
|
||||
if err != nil {
|
||||
logging.Log.Warnf("Failed to parse %s. err: %+v", pkgRegexp, err)
|
||||
continue
|
||||
}
|
||||
regexps = append(regexps, re)
|
||||
}
|
||||
if len(regexps) == 0 {
|
||||
return v, 0
|
||||
}
|
||||
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
if len(v.AffectedPackages) == 0 {
|
||||
return true
|
||||
}
|
||||
for _, p := range v.AffectedPackages {
|
||||
match := false
|
||||
for _, re := range regexps {
|
||||
if re.MatchString(p.Name) {
|
||||
match = true
|
||||
}
|
||||
}
|
||||
if !match {
|
||||
return true
|
||||
}
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FindScoredVulns return scored vulnerabilities
|
||||
func (v VulnInfos) FindScoredVulns() VulnInfos {
|
||||
func (v VulnInfos) FindScoredVulns() (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(vv VulnInfo) bool {
|
||||
if 0 < vv.MaxCvss2Score().Value.Score ||
|
||||
0 < vv.MaxCvss3Score().Value.Score {
|
||||
return true
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
})
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// ToSortedSlice returns slice of VulnInfos that is sorted by Score, CVE-ID
|
||||
@@ -80,14 +175,15 @@ func (v VulnInfos) CountGroupBySeverity() map[string]int {
|
||||
// FormatCveSummary summarize the number of CVEs group by CVSSv2 Severity
|
||||
func (v VulnInfos) FormatCveSummary() string {
|
||||
m := v.CountGroupBySeverity()
|
||||
|
||||
if config.Conf.IgnoreUnscoredCves {
|
||||
return fmt.Sprintf("Total: %d (Critical:%d High:%d Medium:%d Low:%d)",
|
||||
m["High"]+m["Medium"]+m["Low"], m["Critical"], m["High"], m["Medium"], m["Low"])
|
||||
}
|
||||
return fmt.Sprintf("Total: %d (Critical:%d High:%d Medium:%d Low:%d ?:%d)",
|
||||
m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
line := fmt.Sprintf("Total: %d (Critical:%d High:%d Medium:%d Low:%d ?:%d)",
|
||||
m["Critical"]+m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
m["Critical"], m["High"], m["Medium"], m["Low"], m["Unknown"])
|
||||
|
||||
nPlus, nMinus := v.CountDiff()
|
||||
if 0 < nPlus || 0 < nMinus {
|
||||
line = fmt.Sprintf("%s +%d -%d", line, nPlus, nMinus)
|
||||
}
|
||||
return line
|
||||
}
|
||||
|
||||
// FormatFixedStatus summarize the number of cves are fixed.
|
||||
@@ -105,6 +201,18 @@ func (v VulnInfos) FormatFixedStatus(packs Packages) string {
|
||||
return fmt.Sprintf("%d/%d Fixed", fixed, total)
|
||||
}
|
||||
|
||||
// CountDiff counts the number of added/removed CVE-ID
|
||||
func (v VulnInfos) CountDiff() (nPlus int, nMinus int) {
|
||||
for _, vInfo := range v {
|
||||
if vInfo.DiffStatus == DiffPlus {
|
||||
nPlus++
|
||||
} else if vInfo.DiffStatus == DiffMinus {
|
||||
nMinus++
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// PackageFixStatuses is a list of PackageStatus
|
||||
type PackageFixStatuses []PackageFixStatus
|
||||
|
||||
@@ -133,7 +241,6 @@ func (ps PackageFixStatuses) Sort() {
|
||||
sort.Slice(ps, func(i, j int) bool {
|
||||
return ps[i].Name < ps[j].Name
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// PackageFixStatus has name and other status about the package
|
||||
@@ -159,8 +266,8 @@ type VulnInfo struct {
|
||||
GitHubSecurityAlerts GitHubSecurityAlerts `json:"gitHubSecurityAlerts,omitempty"`
|
||||
WpPackageFixStats WpPackageFixStats `json:"wpPackageFixStats,omitempty"`
|
||||
LibraryFixedIns LibraryFixedIns `json:"libraryFixedIns,omitempty"`
|
||||
|
||||
VulnType string `json:"vulnType,omitempty"`
|
||||
VulnType string `json:"vulnType,omitempty"`
|
||||
DiffStatus DiffStatus `json:"diffStatus,omitempty"`
|
||||
}
|
||||
|
||||
// Alert has CERT alert information
|
||||
@@ -236,33 +343,68 @@ func (g WpPackages) Add(pkg WpPackage) WpPackages {
|
||||
return append(g, pkg)
|
||||
}
|
||||
|
||||
// DiffStatus keeps a comparison with the previous detection results for this CVE
|
||||
type DiffStatus string
|
||||
|
||||
const (
|
||||
// DiffPlus is newly detected CVE
|
||||
DiffPlus = DiffStatus("+")
|
||||
|
||||
// DiffMinus is resolved CVE
|
||||
DiffMinus = DiffStatus("-")
|
||||
)
|
||||
|
||||
// CveIDDiffFormat format CVE-ID for diff mode
|
||||
func (v VulnInfo) CveIDDiffFormat() string {
|
||||
if v.DiffStatus != "" {
|
||||
return fmt.Sprintf("%s %s", v.DiffStatus, v.CveID)
|
||||
}
|
||||
return v.CveID
|
||||
}
|
||||
|
||||
// Titles returns title (TUI)
|
||||
func (v VulnInfo) Titles(lang, myFamily string) (values []CveContentStr) {
|
||||
if lang == "ja" {
|
||||
if cont, found := v.CveContents[Jvn]; found && cont.Title != "" {
|
||||
values = append(values, CveContentStr{Jvn, cont.Title})
|
||||
if conts, found := v.CveContents[Jvn]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{Jvn, cont.Title})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RedHat API has one line title.
|
||||
if cont, found := v.CveContents[RedHatAPI]; found && cont.Title != "" {
|
||||
values = append(values, CveContentStr{RedHatAPI, cont.Title})
|
||||
if conts, found := v.CveContents[RedHatAPI]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{RedHatAPI, cont.Title})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GitHub security alerts has a title.
|
||||
if cont, found := v.CveContents[GitHub]; found && cont.Title != "" {
|
||||
values = append(values, CveContentStr{GitHub, cont.Title})
|
||||
if conts, found := v.CveContents[GitHub]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{GitHub, cont.Title})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
order := CveContentTypes{Trivy, Nvd, NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -285,23 +427,31 @@ func (v VulnInfo) Titles(lang, myFamily string) (values []CveContentStr) {
|
||||
// Summaries returns summaries
|
||||
func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
if lang == "ja" {
|
||||
if cont, found := v.CveContents[Jvn]; found && cont.Summary != "" {
|
||||
summary := cont.Title
|
||||
summary += "\n" + strings.Replace(
|
||||
strings.Replace(cont.Summary, "\n", " ", -1), "\r", " ", -1)
|
||||
values = append(values, CveContentStr{Jvn, summary})
|
||||
if conts, found := v.CveContents[Jvn]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Summary != "" {
|
||||
summary := cont.Title
|
||||
summary += "\n" + strings.Replace(
|
||||
strings.Replace(cont.Summary, "\n", " ", -1), "\r", " ", -1)
|
||||
values = append(values, CveContentStr{Jvn, summary})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
order := CveContentTypes{Trivy, NewCveContentType(myFamily), Nvd, GitHub}
|
||||
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -312,11 +462,15 @@ func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
})
|
||||
}
|
||||
|
||||
if v, ok := v.CveContents[WpScan]; ok {
|
||||
values = append(values, CveContentStr{
|
||||
Type: WpScan,
|
||||
Value: v.Title,
|
||||
})
|
||||
if conts, ok := v.CveContents[WpScan]; ok {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{
|
||||
Type: WpScan,
|
||||
Value: cont.Title,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(values) == 0 {
|
||||
@@ -333,20 +487,22 @@ func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
func (v VulnInfo) Cvss2Scores() (values []CveContentCvss) {
|
||||
order := []CveContentType{RedHatAPI, RedHat, Nvd, Jvn}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found {
|
||||
if cont.Cvss2Score == 0 && cont.Cvss2Severity == "" {
|
||||
continue
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Cvss2Score == 0 && cont.Cvss2Severity == "" {
|
||||
continue
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: cont.Cvss2Score,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: cont.Cvss2Score,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
@@ -356,34 +512,40 @@ func (v VulnInfo) Cvss2Scores() (values []CveContentCvss) {
|
||||
func (v VulnInfo) Cvss3Scores() (values []CveContentCvss) {
|
||||
order := []CveContentType{RedHatAPI, RedHat, Nvd, Jvn}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found {
|
||||
if cont.Cvss3Score == 0 && cont.Cvss3Severity == "" {
|
||||
continue
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Cvss3Score == 0 && cont.Cvss3Severity == "" {
|
||||
continue
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: cont.Cvss3Score,
|
||||
Vector: cont.Cvss3Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: cont.Cvss3Score,
|
||||
Vector: cont.Cvss3Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, ctype := range []CveContentType{Debian, DebianSecurityTracker, Ubuntu, Amazon, Trivy, GitHub, WpScan} {
|
||||
if cont, found := v.CveContents[ctype]; found && cont.Cvss3Severity != "" {
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: severityToCvssScoreRoughly(cont.Cvss3Severity),
|
||||
CalculatedBySeverity: true,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Cvss3Severity != "" {
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: severityToCvssScoreRoughly(cont.Cvss3Severity),
|
||||
CalculatedBySeverity: true,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -445,24 +607,28 @@ func (v VulnInfo) MaxCvss2Score() CveContentCvss {
|
||||
|
||||
// AttackVector returns attack vector string
|
||||
func (v VulnInfo) AttackVector() string {
|
||||
for _, cnt := range v.CveContents {
|
||||
if strings.HasPrefix(cnt.Cvss2Vector, "AV:N") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:N") {
|
||||
return "AV:N"
|
||||
} else if strings.HasPrefix(cnt.Cvss2Vector, "AV:A") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:A") {
|
||||
return "AV:A"
|
||||
} else if strings.HasPrefix(cnt.Cvss2Vector, "AV:L") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:L") {
|
||||
return "AV:L"
|
||||
} else if strings.Contains(cnt.Cvss3Vector, "AV:P") {
|
||||
// no AV:P in CVSS v2
|
||||
return "AV:P"
|
||||
for _, conts := range v.CveContents {
|
||||
for _, cont := range conts {
|
||||
if strings.HasPrefix(cont.Cvss2Vector, "AV:N") ||
|
||||
strings.Contains(cont.Cvss3Vector, "AV:N") {
|
||||
return "AV:N"
|
||||
} else if strings.HasPrefix(cont.Cvss2Vector, "AV:A") ||
|
||||
strings.Contains(cont.Cvss3Vector, "AV:A") {
|
||||
return "AV:A"
|
||||
} else if strings.HasPrefix(cont.Cvss2Vector, "AV:L") ||
|
||||
strings.Contains(cont.Cvss3Vector, "AV:L") {
|
||||
return "AV:L"
|
||||
} else if strings.Contains(cont.Cvss3Vector, "AV:P") {
|
||||
// no AV:P in CVSS v2
|
||||
return "AV:P"
|
||||
}
|
||||
}
|
||||
}
|
||||
if cont, found := v.CveContents[DebianSecurityTracker]; found {
|
||||
if attackRange, found := cont.Optional["attack range"]; found {
|
||||
return attackRange
|
||||
if conts, found := v.CveContents[DebianSecurityTracker]; found {
|
||||
for _, cont := range conts {
|
||||
if attackRange, found := cont.Optional["attack range"]; found {
|
||||
return attackRange
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
@@ -647,20 +813,26 @@ type Mitigation struct {
|
||||
URL string `json:"url,omitempty"`
|
||||
}
|
||||
|
||||
// AlertDict has target cve JPCERT and USCERT alert data
|
||||
// AlertDict has target cve JPCERT, USCERT and CISA alert data
|
||||
type AlertDict struct {
|
||||
Ja []Alert `json:"ja"`
|
||||
En []Alert `json:"en"`
|
||||
CISA []Alert `json:"cisa"`
|
||||
JPCERT []Alert `json:"jpcert"`
|
||||
USCERT []Alert `json:"uscert"`
|
||||
}
|
||||
|
||||
// IsEmpty checks if the content of AlertDict is empty
|
||||
func (a AlertDict) IsEmpty() bool {
|
||||
return len(a.CISA) == 0 && len(a.JPCERT) == 0 && len(a.USCERT) == 0
|
||||
}
|
||||
|
||||
// FormatSource returns which source has this alert
|
||||
func (a AlertDict) FormatSource() string {
|
||||
s := []string{}
|
||||
if len(a.En) != 0 {
|
||||
s = append(s, "USCERT")
|
||||
var s []string
|
||||
if len(a.CISA) != 0 {
|
||||
s = append(s, "CISA")
|
||||
}
|
||||
if len(a.Ja) != 0 {
|
||||
s = append(s, "JPCERT")
|
||||
if len(a.USCERT) != 0 || len(a.JPCERT) != 0 {
|
||||
s = append(s, "CERT")
|
||||
}
|
||||
return strings.Join(s, "/")
|
||||
}
|
||||
@@ -704,53 +876,56 @@ func (c Confidence) String() string {
|
||||
type DetectionMethod string
|
||||
|
||||
const (
|
||||
// CpeNameMatchStr is a String representation of CpeNameMatch
|
||||
CpeNameMatchStr = "CpeNameMatch"
|
||||
// NvdExactVersionMatchStr :
|
||||
NvdExactVersionMatchStr = "NvdExactVersionMatch"
|
||||
|
||||
// YumUpdateSecurityMatchStr is a String representation of YumUpdateSecurityMatch
|
||||
YumUpdateSecurityMatchStr = "YumUpdateSecurityMatch"
|
||||
// NvdRoughVersionMatchStr :
|
||||
NvdRoughVersionMatchStr = "NvdRoughVersionMatch"
|
||||
|
||||
// PkgAuditMatchStr is a String representation of PkgAuditMatch
|
||||
// NvdVendorProductMatchStr :
|
||||
NvdVendorProductMatchStr = "NvdVendorProductMatch"
|
||||
|
||||
// JvnVendorProductMatchStr :
|
||||
JvnVendorProductMatchStr = "JvnVendorProductMatch"
|
||||
|
||||
// PkgAuditMatchStr :
|
||||
PkgAuditMatchStr = "PkgAuditMatch"
|
||||
|
||||
// OvalMatchStr is a String representation of OvalMatch
|
||||
// OvalMatchStr :
|
||||
OvalMatchStr = "OvalMatch"
|
||||
|
||||
// RedHatAPIStr is a String representation of RedHatAPIMatch
|
||||
// RedHatAPIStr is :
|
||||
RedHatAPIStr = "RedHatAPIMatch"
|
||||
|
||||
// DebianSecurityTrackerMatchStr is a String representation of DebianSecurityTrackerMatch
|
||||
// DebianSecurityTrackerMatchStr :
|
||||
DebianSecurityTrackerMatchStr = "DebianSecurityTrackerMatch"
|
||||
|
||||
// TrivyMatchStr is a String representation of Trivy
|
||||
// UbuntuAPIMatchStr :
|
||||
UbuntuAPIMatchStr = "UbuntuAPIMatch"
|
||||
|
||||
// TrivyMatchStr :
|
||||
TrivyMatchStr = "TrivyMatch"
|
||||
|
||||
// ChangelogExactMatchStr is a String representation of ChangelogExactMatch
|
||||
// ChangelogExactMatchStr :
|
||||
ChangelogExactMatchStr = "ChangelogExactMatch"
|
||||
|
||||
// ChangelogLenientMatchStr is a String representation of ChangelogLenientMatch
|
||||
ChangelogLenientMatchStr = "ChangelogLenientMatch"
|
||||
// ChangelogRoughMatchStr :
|
||||
ChangelogRoughMatchStr = "ChangelogRoughMatch"
|
||||
|
||||
// GitHubMatchStr is a String representation of GitHubMatch
|
||||
// GitHubMatchStr :
|
||||
GitHubMatchStr = "GitHubMatch"
|
||||
|
||||
// WpScanMatchStr is a String representation of WordPress VulnDB scanning
|
||||
// WpScanMatchStr :
|
||||
WpScanMatchStr = "WpScanMatch"
|
||||
|
||||
// FailedToGetChangelog is a String representation of FailedToGetChangelog
|
||||
// FailedToGetChangelog :
|
||||
FailedToGetChangelog = "FailedToGetChangelog"
|
||||
|
||||
// FailedToFindVersionInChangelog is a String representation of FailedToFindVersionInChangelog
|
||||
// FailedToFindVersionInChangelog :
|
||||
FailedToFindVersionInChangelog = "FailedToFindVersionInChangelog"
|
||||
)
|
||||
|
||||
var (
|
||||
// CpeNameMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
CpeNameMatch = Confidence{100, CpeNameMatchStr, 1}
|
||||
|
||||
// YumUpdateSecurityMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
YumUpdateSecurityMatch = Confidence{100, YumUpdateSecurityMatchStr, 2}
|
||||
|
||||
// PkgAuditMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
PkgAuditMatch = Confidence{100, PkgAuditMatchStr, 2}
|
||||
|
||||
@@ -763,18 +938,33 @@ var (
|
||||
// DebianSecurityTrackerMatch ranking how confident the CVE-ID was detected correctly
|
||||
DebianSecurityTrackerMatch = Confidence{100, DebianSecurityTrackerMatchStr, 0}
|
||||
|
||||
// UbuntuAPIMatch ranking how confident the CVE-ID was detected correctly
|
||||
UbuntuAPIMatch = Confidence{100, UbuntuAPIMatchStr, 0}
|
||||
|
||||
// TrivyMatch ranking how confident the CVE-ID was detected correctly
|
||||
TrivyMatch = Confidence{100, TrivyMatchStr, 0}
|
||||
|
||||
// ChangelogExactMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogExactMatch = Confidence{95, ChangelogExactMatchStr, 3}
|
||||
|
||||
// ChangelogLenientMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogLenientMatch = Confidence{50, ChangelogLenientMatchStr, 4}
|
||||
// ChangelogRoughMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogRoughMatch = Confidence{50, ChangelogRoughMatchStr, 4}
|
||||
|
||||
// GitHubMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
GitHubMatch = Confidence{97, GitHubMatchStr, 2}
|
||||
GitHubMatch = Confidence{100, GitHubMatchStr, 2}
|
||||
|
||||
// WpScanMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
WpScanMatch = Confidence{100, WpScanMatchStr, 0}
|
||||
|
||||
// NvdExactVersionMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
NvdExactVersionMatch = Confidence{100, NvdExactVersionMatchStr, 1}
|
||||
|
||||
// NvdRoughVersionMatch NvdExactVersionMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
NvdRoughVersionMatch = Confidence{80, NvdRoughVersionMatchStr, 1}
|
||||
|
||||
// NvdVendorProductMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
NvdVendorProductMatch = Confidence{10, NvdVendorProductMatchStr, 9}
|
||||
|
||||
// JvnVendorProductMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
JvnVendorProductMatch = Confidence{10, JvnVendorProductMatchStr, 10}
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
||||
package msf
|
||||
51
msf/msf.go
51
msf/msf.go
@@ -1,51 +0,0 @@
|
||||
// +build !scanner
|
||||
|
||||
package msf
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/takuzoo3868/go-msfdb/db"
|
||||
metasploitmodels "github.com/takuzoo3868/go-msfdb/models"
|
||||
)
|
||||
|
||||
// FillWithMetasploit fills metasploit module information that has in module
|
||||
func FillWithMetasploit(driver db.DB, r *models.ScanResult) (nMetasploitCve int, err error) {
|
||||
if driver == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
ms := driver.GetModuleByCveID(cveID)
|
||||
if len(ms) == 0 {
|
||||
continue
|
||||
}
|
||||
modules := ConvertToModels(ms)
|
||||
vuln.Metasploits = modules
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nMetasploitCve++
|
||||
}
|
||||
|
||||
return nMetasploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModels converts gost model to vuls model
|
||||
func ConvertToModels(ms []*metasploitmodels.Metasploit) (modules []models.Metasploit) {
|
||||
for _, m := range ms {
|
||||
var links []string
|
||||
if 0 < len(m.References) {
|
||||
for _, u := range m.References {
|
||||
links = append(links, u.Link)
|
||||
}
|
||||
}
|
||||
module := models.Metasploit{
|
||||
Name: m.Name,
|
||||
Title: m.Title,
|
||||
Description: m.Description,
|
||||
URLs: links,
|
||||
}
|
||||
modules = append(modules, module)
|
||||
}
|
||||
return modules
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/kotakanbe/goval-dictionary/db"
|
||||
)
|
||||
|
||||
// Alpine is the struct of Alpine Linux
|
||||
@@ -15,22 +16,33 @@ type Alpine struct {
|
||||
}
|
||||
|
||||
// NewAlpine creates OVAL client for SUSE
|
||||
func NewAlpine() Alpine {
|
||||
func NewAlpine(cnf config.VulnDictInterface) Alpine {
|
||||
return Alpine{
|
||||
Base{
|
||||
family: config.Alpine,
|
||||
family: constant.Alpine,
|
||||
Cnf: cnf,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// FillWithOval returns scan result after updating CVE info by OVAL
|
||||
func (o Alpine) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err error) {
|
||||
func (o Alpine) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
var relatedDefs ovalResult
|
||||
if config.Conf.OvalDict.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r); err != nil {
|
||||
if o.Cnf.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -42,18 +54,18 @@ func (o Alpine) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
return len(relatedDefs.entries), nil
|
||||
}
|
||||
|
||||
func (o Alpine) update(r *models.ScanResult, defPacks defPacks) {
|
||||
cveID := defPacks.def.Advisory.Cves[0].CveID
|
||||
func (o Alpine) update(r *models.ScanResult, defpacks defPacks) {
|
||||
cveID := defpacks.def.Advisory.Cves[0].CveID
|
||||
vinfo, ok := r.ScannedCves[cveID]
|
||||
if !ok {
|
||||
util.Log.Debugf("%s is newly detected by OVAL", cveID)
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", cveID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: cveID,
|
||||
Confidences: []models.Confidence{models.OvalMatch},
|
||||
}
|
||||
}
|
||||
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages = defpacks.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[cveID] = vinfo
|
||||
}
|
||||
|
||||
272
oval/debian.go
272
oval/debian.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -7,10 +8,11 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/kotakanbe/goval-dictionary/db"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
// DebianBase is the base struct of Debian and Ubuntu
|
||||
@@ -18,73 +20,75 @@ type DebianBase struct {
|
||||
Base
|
||||
}
|
||||
|
||||
func (o DebianBase) update(r *models.ScanResult, defPacks defPacks) {
|
||||
ovalContent := *o.convertToModel(&defPacks.def)
|
||||
ovalContent.Type = models.NewCveContentType(o.family)
|
||||
vinfo, ok := r.ScannedCves[defPacks.def.Debian.CveID]
|
||||
if !ok {
|
||||
util.Log.Debugf("%s is newly detected by OVAL", defPacks.def.Debian.CveID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: defPacks.def.Debian.CveID,
|
||||
Confidences: []models.Confidence{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(ovalContent),
|
||||
func (o DebianBase) update(r *models.ScanResult, defpacks defPacks) {
|
||||
for _, cve := range defpacks.def.Advisory.Cves {
|
||||
ovalContent := o.convertToModel(cve.CveID, &defpacks.def)
|
||||
if ovalContent == nil {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
cveContents := vinfo.CveContents
|
||||
ctype := models.NewCveContentType(o.family)
|
||||
if _, ok := vinfo.CveContents[ctype]; ok {
|
||||
util.Log.Debugf("%s OVAL will be overwritten",
|
||||
defPacks.def.Debian.CveID)
|
||||
vinfo, ok := r.ScannedCves[cve.CveID]
|
||||
if !ok {
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", cve.CveID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
Confidences: []models.Confidence{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(*ovalContent),
|
||||
}
|
||||
} else {
|
||||
util.Log.Debugf("%s is also detected by OVAL",
|
||||
defPacks.def.Debian.CveID)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
if r.Family != config.Raspbian {
|
||||
cveContents := vinfo.CveContents
|
||||
if _, ok := vinfo.CveContents[ovalContent.Type]; ok {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten", cve.CveID)
|
||||
} else {
|
||||
logging.Log.Debugf("%s is also detected by OVAL", cve.CveID)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
} else {
|
||||
if len(vinfo.Confidences) == 0 {
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
cveContents[ovalContent.Type] = []models.CveContent{*ovalContent}
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
// uniq(vinfo.AffectedPackages[].Name + defPacks.binpkgFixstat(map[string(=package name)]fixStat{}))
|
||||
collectBinpkgFixstat := defPacks{
|
||||
binpkgFixstat: map[string]fixStat{},
|
||||
}
|
||||
for packName, fixStatus := range defpacks.binpkgFixstat {
|
||||
collectBinpkgFixstat.binpkgFixstat[packName] = fixStatus
|
||||
}
|
||||
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
isSrcPack: false,
|
||||
}
|
||||
}
|
||||
cveContents[ctype] = ovalContent
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
// uniq(vinfo.PackNames + defPacks.binpkgStat)
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
isSrcPack: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Update package status of source packages.
|
||||
// In the case of Debian based Linux, sometimes source package name is defined as affected package in OVAL.
|
||||
// To display binary package name showed in apt-get, need to convert source name to binary name.
|
||||
for binName := range defPacks.binpkgFixstat {
|
||||
if srcPack, ok := r.SrcPackages.FindByBinName(binName); ok {
|
||||
for _, p := range defPacks.def.AffectedPacks {
|
||||
if p.Name == srcPack.Name {
|
||||
defPacks.binpkgFixstat[binName] = fixStat{
|
||||
notFixedYet: p.NotFixedYet,
|
||||
fixedIn: p.Version,
|
||||
isSrcPack: true,
|
||||
srcPackName: srcPack.Name,
|
||||
// Update package status of source packages.
|
||||
// In the case of Debian based Linux, sometimes source package name is defined as affected package in OVAL.
|
||||
// To display binary package name showed in apt-get, need to convert source name to binary name.
|
||||
for binName := range defpacks.binpkgFixstat {
|
||||
if srcPack, ok := r.SrcPackages.FindByBinName(binName); ok {
|
||||
for _, p := range defpacks.def.AffectedPacks {
|
||||
if p.Name == srcPack.Name {
|
||||
collectBinpkgFixstat.binpkgFixstat[binName] = fixStat{
|
||||
notFixedYet: p.NotFixedYet,
|
||||
fixedIn: p.Version,
|
||||
isSrcPack: true,
|
||||
srcPackName: srcPack.Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[defPacks.def.Debian.CveID] = vinfo
|
||||
vinfo.AffectedPackages = collectBinpkgFixstat.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[cve.CveID] = vinfo
|
||||
}
|
||||
}
|
||||
|
||||
func (o DebianBase) convertToModel(def *ovalmodels.Definition) *models.CveContent {
|
||||
refs := []models.Reference{}
|
||||
func (o DebianBase) convertToModel(cveID string, def *ovalmodels.Definition) *models.CveContent {
|
||||
refs := make([]models.Reference, 0, len(def.References))
|
||||
for _, r := range def.References {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: r.RefURL,
|
||||
@@ -93,14 +97,23 @@ func (o DebianBase) convertToModel(def *ovalmodels.Definition) *models.CveConten
|
||||
})
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
CveID: def.Debian.CveID,
|
||||
Title: def.Title,
|
||||
Summary: def.Description,
|
||||
Cvss2Severity: def.Advisory.Severity,
|
||||
Cvss3Severity: def.Advisory.Severity,
|
||||
References: refs,
|
||||
for _, cve := range def.Advisory.Cves {
|
||||
if cve.CveID != cveID {
|
||||
continue
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.NewCveContentType(o.family),
|
||||
CveID: cve.CveID,
|
||||
Title: def.Title,
|
||||
Summary: def.Description,
|
||||
Cvss2Severity: def.Advisory.Severity,
|
||||
Cvss3Severity: def.Advisory.Severity,
|
||||
References: refs,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Debian is the interface for Debian OVAL
|
||||
@@ -109,18 +122,19 @@ type Debian struct {
|
||||
}
|
||||
|
||||
// NewDebian creates OVAL client for Debian
|
||||
func NewDebian() Debian {
|
||||
func NewDebian(cnf config.VulnDictInterface) Debian {
|
||||
return Debian{
|
||||
DebianBase{
|
||||
Base{
|
||||
family: config.Debian,
|
||||
family: constant.Debian,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// FillWithOval returns scan result after updating CVE info by OVAL
|
||||
func (o Debian) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err error) {
|
||||
func (o Debian) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
|
||||
//Debian's uname gives both of kernel release(uname -r), version(kernel-image version)
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
@@ -139,29 +153,23 @@ func (o Debian) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
}
|
||||
|
||||
var relatedDefs ovalResult
|
||||
if config.Conf.OvalDict.IsFetchViaHTTP() {
|
||||
if r.Family != config.Raspbian {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
// OVAL does not support Package for Raspbian, so skip it.
|
||||
result := r.RemoveRaspbianPackFromResult()
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(&result); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if o.Cnf.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
if r.Family != config.Raspbian {
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
// OVAL does not support Package for Raspbian, so skip it.
|
||||
result := r.RemoveRaspbianPackFromResult()
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, &result); err != nil {
|
||||
return 0, err
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,9 +193,11 @@ func (o Debian) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
}
|
||||
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if cont, ok := vuln.CveContents[models.Debian]; ok {
|
||||
cont.SourceLink = "https://security-tracker.debian.org/tracker/" + cont.CveID
|
||||
vuln.CveContents[models.Debian] = cont
|
||||
if conts, ok := vuln.CveContents[models.Debian]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "https://security-tracker.debian.org/tracker/" + cont.CveID
|
||||
vuln.CveContents[models.Debian][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
return len(relatedDefs.entries), nil
|
||||
@@ -199,18 +209,19 @@ type Ubuntu struct {
|
||||
}
|
||||
|
||||
// NewUbuntu creates OVAL client for Debian
|
||||
func NewUbuntu() Ubuntu {
|
||||
func NewUbuntu(cnf config.VulnDictInterface) Ubuntu {
|
||||
return Ubuntu{
|
||||
DebianBase{
|
||||
Base{
|
||||
family: config.Ubuntu,
|
||||
family: constant.Ubuntu,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// FillWithOval returns scan result after updating CVE info by OVAL
|
||||
func (o Ubuntu) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err error) {
|
||||
func (o Ubuntu) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
switch util.Major(r.Release) {
|
||||
case "14":
|
||||
kernelNamesInOval := []string{
|
||||
@@ -226,7 +237,7 @@ func (o Ubuntu) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
"linux-signed-lts-xenial",
|
||||
"linux",
|
||||
}
|
||||
return o.fillWithOval(driver, r, kernelNamesInOval)
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
case "16":
|
||||
kernelNamesInOval := []string{
|
||||
"linux-aws",
|
||||
@@ -261,7 +272,7 @@ func (o Ubuntu) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
"linux-snapdragon",
|
||||
"linux",
|
||||
}
|
||||
return o.fillWithOval(driver, r, kernelNamesInOval)
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
case "18":
|
||||
kernelNamesInOval := []string{
|
||||
"linux-aws",
|
||||
@@ -316,7 +327,7 @@ func (o Ubuntu) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
"linux-snapdragon",
|
||||
"linux",
|
||||
}
|
||||
return o.fillWithOval(driver, r, kernelNamesInOval)
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
case "20":
|
||||
kernelNamesInOval := []string{
|
||||
"linux-aws",
|
||||
@@ -344,12 +355,53 @@ func (o Ubuntu) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err
|
||||
"linux-signed-oracle",
|
||||
"linux",
|
||||
}
|
||||
return o.fillWithOval(driver, r, kernelNamesInOval)
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
case "21":
|
||||
kernelNamesInOval := []string{
|
||||
"linux-aws",
|
||||
"linux-base-sgx",
|
||||
"linux-base",
|
||||
"linux-cloud-tools-common",
|
||||
"linux-cloud-tools-generic",
|
||||
"linux-cloud-tools-lowlatency",
|
||||
"linux-cloud-tools-virtual",
|
||||
"linux-gcp",
|
||||
"linux-generic",
|
||||
"linux-gke",
|
||||
"linux-headers-aws",
|
||||
"linux-headers-gcp",
|
||||
"linux-headers-gke",
|
||||
"linux-headers-oracle",
|
||||
"linux-image-aws",
|
||||
"linux-image-extra-virtual",
|
||||
"linux-image-gcp",
|
||||
"linux-image-generic",
|
||||
"linux-image-gke",
|
||||
"linux-image-lowlatency",
|
||||
"linux-image-oracle",
|
||||
"linux-image-virtual",
|
||||
"linux-lowlatency",
|
||||
"linux-modules-extra-aws",
|
||||
"linux-modules-extra-gcp",
|
||||
"linux-modules-extra-gke",
|
||||
"linux-oracle",
|
||||
"linux-tools-aws",
|
||||
"linux-tools-common",
|
||||
"linux-tools-gcp",
|
||||
"linux-tools-generic",
|
||||
"linux-tools-gke",
|
||||
"linux-tools-host",
|
||||
"linux-tools-lowlatency",
|
||||
"linux-tools-oracle",
|
||||
"linux-tools-virtual",
|
||||
"linux-virtual",
|
||||
}
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
}
|
||||
return 0, fmt.Errorf("Ubuntu %s is not support for now", r.Release)
|
||||
}
|
||||
|
||||
func (o Ubuntu) fillWithOval(driver db.DB, r *models.ScanResult, kernelNamesInOval []string) (nCVEs int, err error) {
|
||||
func (o Ubuntu) fillWithOval(r *models.ScanResult, kernelNamesInOval []string) (nCVEs int, err error) {
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
runningKernelVersion := ""
|
||||
kernelPkgInOVAL := ""
|
||||
@@ -361,7 +413,7 @@ func (o Ubuntu) fillWithOval(driver db.DB, r *models.ScanResult, kernelNamesInOv
|
||||
if v, ok := r.Packages[linuxImage]; ok {
|
||||
runningKernelVersion = v.Version
|
||||
} else {
|
||||
util.Log.Warnf("Unable to detect vulns of running kernel because the version of the running kernel is unknown. server: %s",
|
||||
logging.Log.Warnf("Unable to detect vulns of running kernel because the version of the running kernel is unknown. server: %s",
|
||||
r.ServerName)
|
||||
}
|
||||
|
||||
@@ -400,7 +452,7 @@ func (o Ubuntu) fillWithOval(driver db.DB, r *models.ScanResult, kernelNamesInOv
|
||||
}
|
||||
|
||||
if kernelPkgInOVAL == "" {
|
||||
util.Log.Warnf("The OVAL name of the running kernel image %+v is not found. So vulns of `linux` wll be detected. server: %s",
|
||||
logging.Log.Warnf("The OVAL name of the running kernel image %+v is not found. So vulns of `linux` wll be detected. server: %s",
|
||||
r.RunningKernel, r.ServerName)
|
||||
kernelPkgInOVAL = "linux"
|
||||
isOVALKernelPkgAdded = true
|
||||
@@ -415,11 +467,21 @@ func (o Ubuntu) fillWithOval(driver db.DB, r *models.ScanResult, kernelNamesInOv
|
||||
}
|
||||
|
||||
var relatedDefs ovalResult
|
||||
if config.Conf.OvalDict.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r); err != nil {
|
||||
if o.Cnf.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -450,9 +512,11 @@ func (o Ubuntu) fillWithOval(driver db.DB, r *models.ScanResult, kernelNamesInOv
|
||||
}
|
||||
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if cont, ok := vuln.CveContents[models.Ubuntu]; ok {
|
||||
cont.SourceLink = "http://people.ubuntu.com/~ubuntu-security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.Ubuntu] = cont
|
||||
if conts, ok := vuln.CveContents[models.Ubuntu]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "http://people.ubuntu.com/~ubuntu-security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.Ubuntu][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
return len(relatedDefs.entries), nil
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -6,10 +7,8 @@ import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
func TestPackNamesOfUpdateDebian(t *testing.T) {
|
||||
@@ -31,8 +30,8 @@ func TestPackNamesOfUpdateDebian(t *testing.T) {
|
||||
},
|
||||
defPacks: defPacks{
|
||||
def: ovalmodels.Definition{
|
||||
Debian: ovalmodels.Debian{
|
||||
CveID: "CVE-2000-1000",
|
||||
Advisory: ovalmodels.Advisory{
|
||||
Cves: []ovalmodels.Cve{{CveID: "CVE-2000-1000"}},
|
||||
},
|
||||
},
|
||||
binpkgFixstat: map[string]fixStat{
|
||||
@@ -54,15 +53,68 @@ func TestPackNamesOfUpdateDebian(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: models.ScanResult{
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2000-1000": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packA"},
|
||||
},
|
||||
},
|
||||
"CVE-2000-1001": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packC"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
defPacks: defPacks{
|
||||
def: ovalmodels.Definition{
|
||||
Advisory: ovalmodels.Advisory{
|
||||
Cves: []ovalmodels.Cve{
|
||||
{
|
||||
CveID: "CVE-2000-1000",
|
||||
},
|
||||
{
|
||||
CveID: "CVE-2000-1001",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
binpkgFixstat: map[string]fixStat{
|
||||
"packB": {
|
||||
notFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
out: models.ScanResult{
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2000-1000": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packA"},
|
||||
{Name: "packB", NotFixedYet: false},
|
||||
},
|
||||
},
|
||||
"CVE-2000-1001": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packB", NotFixedYet: false},
|
||||
{Name: "packC"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
util.Log = util.NewCustomLogger(config.ServerInfo{})
|
||||
// util.Log = util.NewCustomLogger()
|
||||
for i, tt := range tests {
|
||||
Debian{}.update(&tt.in, tt.defPacks)
|
||||
e := tt.out.ScannedCves["CVE-2000-1000"].AffectedPackages
|
||||
a := tt.in.ScannedCves["CVE-2000-1000"].AffectedPackages
|
||||
if !reflect.DeepEqual(a, e) {
|
||||
t.Errorf("[%d] expected: %#v\n actual: %#v\n", i, e, a)
|
||||
for cveid := range tt.out.ScannedCves {
|
||||
e := tt.out.ScannedCves[cveid].AffectedPackages
|
||||
a := tt.in.ScannedCves[cveid].AffectedPackages
|
||||
if !reflect.DeepEqual(a, e) {
|
||||
t.Errorf("[%d] expected: %v\n actual: %v\n", i, e, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
package oval
|
||||
95
oval/oval.go
95
oval/oval.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -6,60 +7,92 @@ import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
cnf "github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/kotakanbe/goval-dictionary/db"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"github.com/vulsio/goval-dictionary/db"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Client is the interface of OVAL client.
|
||||
type Client interface {
|
||||
FillWithOval(db.DB, *models.ScanResult) (int, error)
|
||||
|
||||
// CheckIfOvalFetched checks if oval entries are in DB by family, release.
|
||||
CheckIfOvalFetched(db.DB, string, string) (bool, error)
|
||||
CheckIfOvalFresh(db.DB, string, string) (bool, error)
|
||||
FillWithOval(*models.ScanResult) (int, error)
|
||||
CheckIfOvalFetched(string, string) (bool, error)
|
||||
CheckIfOvalFresh(string, string) (bool, error)
|
||||
}
|
||||
|
||||
// Base is a base struct
|
||||
type Base struct {
|
||||
family string
|
||||
Cnf config.VulnDictInterface
|
||||
}
|
||||
|
||||
// CheckIfOvalFetched checks if oval entries are in DB by family, release.
|
||||
func (b Base) CheckIfOvalFetched(driver db.DB, osFamily, release string) (fetched bool, err error) {
|
||||
if !cnf.Conf.OvalDict.IsFetchViaHTTP() {
|
||||
count, err := driver.CountDefs(osFamily, release)
|
||||
func (b Base) CheckIfOvalFetched(osFamily, release string) (fetched bool, err error) {
|
||||
ovalFamily, err := GetFamilyInOval(osFamily)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if !b.Cnf.IsFetchViaHTTP() {
|
||||
driver, err := newOvalDB(b.Cnf)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to count OVAL defs: %s, %s, %w", osFamily, release, err)
|
||||
return false, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
count, err := driver.CountDefs(ovalFamily, release)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to count OVAL defs: %s, %s, %w", ovalFamily, release, err)
|
||||
}
|
||||
logging.Log.Infof("OVAL %s %s found. defs: %d", osFamily, release, count)
|
||||
return 0 < count, nil
|
||||
}
|
||||
|
||||
url, _ := util.URLPathJoin(cnf.Conf.OvalDict.URL, "count", osFamily, release)
|
||||
url, _ := util.URLPathJoin(config.Conf.OvalDict.URL, "count", ovalFamily, release)
|
||||
resp, body, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return false, xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %s", url, resp, errs)
|
||||
return false, xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
count := 0
|
||||
if err := json.Unmarshal([]byte(body), &count); err != nil {
|
||||
return false, xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
|
||||
}
|
||||
logging.Log.Infof("OVAL %s %s is fresh. defs: %d", osFamily, release, count)
|
||||
return 0 < count, nil
|
||||
}
|
||||
|
||||
// CheckIfOvalFresh checks if oval entries are fresh enough
|
||||
func (b Base) CheckIfOvalFresh(driver db.DB, osFamily, release string) (ok bool, err error) {
|
||||
func (b Base) CheckIfOvalFresh(osFamily, release string) (ok bool, err error) {
|
||||
ovalFamily, err := GetFamilyInOval(osFamily)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
var lastModified time.Time
|
||||
if !cnf.Conf.OvalDict.IsFetchViaHTTP() {
|
||||
lastModified = driver.GetLastModified(osFamily, release)
|
||||
if !b.Cnf.IsFetchViaHTTP() {
|
||||
driver, err := newOvalDB(b.Cnf)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
lastModified, err = driver.GetLastModified(ovalFamily, release)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to GetLastModified: %w", err)
|
||||
}
|
||||
} else {
|
||||
url, _ := util.URLPathJoin(cnf.Conf.OvalDict.URL, "lastmodified", osFamily, release)
|
||||
url, _ := util.URLPathJoin(config.Conf.OvalDict.URL, "lastmodified", ovalFamily, release)
|
||||
resp, body, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return false, xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %s", url, resp, errs)
|
||||
return false, xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(body), &lastModified); err != nil {
|
||||
@@ -70,10 +103,32 @@ func (b Base) CheckIfOvalFresh(driver db.DB, osFamily, release string) (ok bool,
|
||||
since := time.Now()
|
||||
since = since.AddDate(0, 0, -3)
|
||||
if lastModified.Before(since) {
|
||||
util.Log.Warnf("OVAL for %s %s is old, last modified is %s. It's recommended to update OVAL to improve scanning accuracy. How to update OVAL database, see https://github.com/kotakanbe/goval-dictionary#usage",
|
||||
logging.Log.Warnf("OVAL for %s %s is old, last modified is %s. It's recommended to update OVAL to improve scanning accuracy. How to update OVAL database, see https://github.com/vulsio/goval-dictionary#usage",
|
||||
osFamily, release, lastModified)
|
||||
return false, nil
|
||||
}
|
||||
util.Log.Infof("OVAL is fresh: %s %s ", osFamily, release)
|
||||
logging.Log.Infof("OVAL %s %s is fresh. lastModified: %s", osFamily, release, lastModified.Format(time.RFC3339))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// NewOvalDB returns oval db client
|
||||
func newOvalDB(cnf config.VulnDictInterface) (driver db.DB, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
|
||||
driver, locked, err := db.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), db.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
err = xerrors.Errorf("SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
}
|
||||
err = xerrors.Errorf("Failed to new OVAL DB. err: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
return driver, nil
|
||||
}
|
||||
|
||||
190
oval/redhat.go
190
oval/redhat.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -8,30 +9,41 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/kotakanbe/goval-dictionary/db"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
// RedHatBase is the base struct for RedHat and CentOS
|
||||
// RedHatBase is the base struct for RedHat, CentOS, Alma and Rocky
|
||||
type RedHatBase struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// FillWithOval returns scan result after updating CVE info by OVAL
|
||||
func (o RedHatBase) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int, err error) {
|
||||
func (o RedHatBase) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
var relatedDefs ovalResult
|
||||
if config.Conf.OvalDict.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r); err != nil {
|
||||
if o.Cnf.IsFetchViaHTTP() {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
relatedDefs.Sort()
|
||||
for _, defPacks := range relatedDefs.entries {
|
||||
nCVEs += o.update(r, defPacks)
|
||||
}
|
||||
@@ -39,14 +51,33 @@ func (o RedHatBase) FillWithOval(driver db.DB, r *models.ScanResult) (nCVEs int,
|
||||
for _, vuln := range r.ScannedCves {
|
||||
switch models.NewCveContentType(o.family) {
|
||||
case models.RedHat:
|
||||
if cont, ok := vuln.CveContents[models.RedHat]; ok {
|
||||
cont.SourceLink = "https://access.redhat.com/security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.RedHat] = cont
|
||||
if conts, ok := vuln.CveContents[models.RedHat]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "https://access.redhat.com/security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.RedHat][i] = cont
|
||||
}
|
||||
}
|
||||
case models.Oracle:
|
||||
if cont, ok := vuln.CveContents[models.Oracle]; ok {
|
||||
cont.SourceLink = fmt.Sprintf("https://linux.oracle.com/cve/%s.html", cont.CveID)
|
||||
vuln.CveContents[models.Oracle] = cont
|
||||
if conts, ok := vuln.CveContents[models.Oracle]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = fmt.Sprintf("https://linux.oracle.com/cve/%s.html", cont.CveID)
|
||||
vuln.CveContents[models.Oracle][i] = cont
|
||||
}
|
||||
}
|
||||
case models.Amazon:
|
||||
for _, d := range vuln.DistroAdvisories {
|
||||
if conts, ok := vuln.CveContents[models.Amazon]; ok {
|
||||
for i, cont := range conts {
|
||||
if strings.HasPrefix(d.AdvisoryID, "ALAS2022-") {
|
||||
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2022/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2022", "ALAS"))
|
||||
} else if strings.HasPrefix(d.AdvisoryID, "ALAS2-") {
|
||||
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2", "ALAS"))
|
||||
} else if strings.HasPrefix(d.AdvisoryID, "ALAS-") {
|
||||
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/%s.html", d.AdvisoryID)
|
||||
}
|
||||
vuln.CveContents[models.Amazon][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -81,60 +112,71 @@ var kernelRelatedPackNames = map[string]bool{
|
||||
"kernel-tools": true,
|
||||
"kernel-tools-libs": true,
|
||||
"kernel-tools-libs-devel": true,
|
||||
"kernel-uek": true,
|
||||
"perf": true,
|
||||
"python-perf": true,
|
||||
}
|
||||
|
||||
func (o RedHatBase) update(r *models.ScanResult, defPacks defPacks) (nCVEs int) {
|
||||
ctype := models.NewCveContentType(o.family)
|
||||
for _, cve := range defPacks.def.Advisory.Cves {
|
||||
ovalContent := *o.convertToModel(cve.CveID, &defPacks.def)
|
||||
func (o RedHatBase) update(r *models.ScanResult, defpacks defPacks) (nCVEs int) {
|
||||
for _, cve := range defpacks.def.Advisory.Cves {
|
||||
ovalContent := o.convertToModel(cve.CveID, &defpacks.def)
|
||||
if ovalContent == nil {
|
||||
continue
|
||||
}
|
||||
vinfo, ok := r.ScannedCves[cve.CveID]
|
||||
if !ok {
|
||||
util.Log.Debugf("%s is newly detected by OVAL", cve.CveID)
|
||||
logging.Log.Debugf("%s is newly detected by OVAL: DefID: %s", cve.CveID, defpacks.def.DefinitionID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
Confidences: models.Confidences{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(ovalContent),
|
||||
CveContents: models.NewCveContents(*ovalContent),
|
||||
}
|
||||
nCVEs++
|
||||
} else {
|
||||
cveContents := vinfo.CveContents
|
||||
if v, ok := vinfo.CveContents[ctype]; ok {
|
||||
if v.LastModified.After(ovalContent.LastModified) {
|
||||
util.Log.Debugf("%s, OvalID: %d ignored: ",
|
||||
cve.CveID, defPacks.def.ID)
|
||||
} else {
|
||||
util.Log.Debugf("%s OVAL will be overwritten", cve.CveID)
|
||||
if v, ok := vinfo.CveContents[ovalContent.Type]; ok {
|
||||
for _, vv := range v {
|
||||
if vv.LastModified.After(ovalContent.LastModified) {
|
||||
logging.Log.Debugf("%s ignored. DefID: %s ", cve.CveID, defpacks.def.DefinitionID)
|
||||
} else {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten. DefID: %s", cve.CveID, defpacks.def.DefinitionID)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
util.Log.Debugf("%s also detected by OVAL", cve.CveID)
|
||||
logging.Log.Debugf("%s also detected by OVAL. DefID: %s", cve.CveID, defpacks.def.DefinitionID)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
cveContents[ctype] = ovalContent
|
||||
cveContents[ovalContent.Type] = []models.CveContent{*ovalContent}
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
vinfo.DistroAdvisories.AppendIfMissing(
|
||||
o.convertToDistroAdvisory(&defPacks.def))
|
||||
o.convertToDistroAdvisory(&defpacks.def))
|
||||
|
||||
// uniq(vinfo.AffectedPackages[].Name + defPacks.binpkgFixstat(map[string(=package name)]fixStat{}))
|
||||
collectBinpkgFixstat := defPacks{
|
||||
binpkgFixstat: map[string]fixStat{},
|
||||
}
|
||||
for packName, fixStatus := range defpacks.binpkgFixstat {
|
||||
collectBinpkgFixstat.binpkgFixstat[packName] = fixStatus
|
||||
}
|
||||
|
||||
// uniq(vinfo.PackNames + defPacks.actuallyAffectedPackNames)
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
if stat, ok := defPacks.binpkgFixstat[pack.Name]; !ok {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
if stat, ok := collectBinpkgFixstat.binpkgFixstat[pack.Name]; !ok {
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
}
|
||||
} else if stat.notFixedYet {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: true,
|
||||
fixedIn: pack.FixedIn,
|
||||
}
|
||||
}
|
||||
}
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages = collectBinpkgFixstat.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[cve.CveID] = vinfo
|
||||
}
|
||||
@@ -143,9 +185,12 @@ func (o RedHatBase) update(r *models.ScanResult, defPacks defPacks) (nCVEs int)
|
||||
|
||||
func (o RedHatBase) convertToDistroAdvisory(def *ovalmodels.Definition) *models.DistroAdvisory {
|
||||
advisoryID := def.Title
|
||||
if (o.family == config.RedHat || o.family == config.CentOS) && len(advisoryID) > 0 {
|
||||
ss := strings.Fields(def.Title)
|
||||
advisoryID = strings.TrimSuffix(ss[0], ":")
|
||||
switch o.family {
|
||||
case constant.RedHat, constant.CentOS, constant.Alma, constant.Rocky, constant.Oracle:
|
||||
if def.Title != "" {
|
||||
ss := strings.Fields(def.Title)
|
||||
advisoryID = strings.TrimSuffix(ss[0], ":")
|
||||
}
|
||||
}
|
||||
return &models.DistroAdvisory{
|
||||
AdvisoryID: advisoryID,
|
||||
@@ -157,18 +202,19 @@ func (o RedHatBase) convertToDistroAdvisory(def *ovalmodels.Definition) *models.
|
||||
}
|
||||
|
||||
func (o RedHatBase) convertToModel(cveID string, def *ovalmodels.Definition) *models.CveContent {
|
||||
refs := make([]models.Reference, 0, len(def.References))
|
||||
for _, r := range def.References {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: r.RefURL,
|
||||
Source: r.Source,
|
||||
RefID: r.RefID,
|
||||
})
|
||||
}
|
||||
|
||||
for _, cve := range def.Advisory.Cves {
|
||||
if cve.CveID != cveID {
|
||||
continue
|
||||
}
|
||||
var refs []models.Reference
|
||||
for _, r := range def.References {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: r.RefURL,
|
||||
Source: r.Source,
|
||||
RefID: r.RefID,
|
||||
})
|
||||
}
|
||||
|
||||
score2, vec2 := o.parseCvss2(cve.Cvss2)
|
||||
score3, vec3 := o.parseCvss3(cve.Cvss3)
|
||||
@@ -246,11 +292,12 @@ type RedHat struct {
|
||||
}
|
||||
|
||||
// NewRedhat creates OVAL client for Redhat
|
||||
func NewRedhat() RedHat {
|
||||
func NewRedhat(cnf config.VulnDictInterface) RedHat {
|
||||
return RedHat{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: config.RedHat,
|
||||
family: constant.RedHat,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -262,11 +309,12 @@ type CentOS struct {
|
||||
}
|
||||
|
||||
// NewCentOS creates OVAL client for CentOS
|
||||
func NewCentOS() CentOS {
|
||||
func NewCentOS(cnf config.VulnDictInterface) CentOS {
|
||||
return CentOS{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: config.CentOS,
|
||||
family: constant.CentOS,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -278,11 +326,12 @@ type Oracle struct {
|
||||
}
|
||||
|
||||
// NewOracle creates OVAL client for Oracle
|
||||
func NewOracle() Oracle {
|
||||
func NewOracle(cnf config.VulnDictInterface) Oracle {
|
||||
return Oracle{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: config.Oracle,
|
||||
family: constant.Oracle,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -295,11 +344,48 @@ type Amazon struct {
|
||||
}
|
||||
|
||||
// NewAmazon creates OVAL client for Amazon Linux
|
||||
func NewAmazon() Amazon {
|
||||
func NewAmazon(cnf config.VulnDictInterface) Amazon {
|
||||
return Amazon{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: config.Amazon,
|
||||
family: constant.Amazon,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Alma is the interface for RedhatBase OVAL
|
||||
type Alma struct {
|
||||
// Base
|
||||
RedHatBase
|
||||
}
|
||||
|
||||
// NewAlma creates OVAL client for Alma Linux
|
||||
func NewAlma(cnf config.VulnDictInterface) Alma {
|
||||
return Alma{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: constant.Alma,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Rocky is the interface for RedhatBase OVAL
|
||||
type Rocky struct {
|
||||
// Base
|
||||
RedHatBase
|
||||
}
|
||||
|
||||
// NewRocky creates OVAL client for Rocky Linux
|
||||
func NewRocky(cnf config.VulnDictInterface) Rocky {
|
||||
return Rocky{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: constant.Rocky,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user